diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index b92bc58be..c7b164be0 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -1,6 +1,10 @@
name: Hemera Indexer Continuous Integration
-on: [pull_request]
+on:
+ push:
+ branches:
+ - master
+ pull_request:
jobs:
build:
@@ -38,7 +42,7 @@ jobs:
- name: Install dependencies
run: |
pip install --upgrade pip
- pip install poetry
+ pip install poetry==1.6.1
poetry update
poetry install -v
poetry show
@@ -57,7 +61,7 @@ jobs:
- name: Pipeline Test with pytest
run: |
export PYTHONPATH=$(pwd)
- poetry run pytest indexer --cov --cov-report=xml
+ poetry run pytest -vv -m indexer --cov --cov-report=xml
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v5
diff --git a/.github/workflows/ut.yaml b/.github/workflows/ut.yaml
index 196119358..b01587586 100644
--- a/.github/workflows/ut.yaml
+++ b/.github/workflows/ut.yaml
@@ -1,6 +1,10 @@
name: Python application unit test
-on: [pull_request]
+on:
+ push:
+ branches:
+ - master
+ pull_request:
jobs:
test:
@@ -17,7 +21,7 @@ jobs:
- name: Install dependencies
run: |
pip install --upgrade pip
- pip install poetry
+ pip install poetry==1.6.1
poetry update
poetry install -v
@@ -37,4 +41,4 @@ jobs:
MANTLE_PUBLIC_NODE_DEBUG_RPC_URL: '${{ secrets.MANTLE_PUBLIC_NODE_DEBUG_RPC_URL }}'
run: |
export PYTHONPATH=$(pwd)
- make test indexer
+ poetry run pytest -vv -m indexer
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index 8d5178316..565e3f375 100644
--- a/.gitignore
+++ b/.gitignore
@@ -22,9 +22,6 @@ wheels/
.installed.cfg
*.egg
-# JetBrains IDE
-.idea/
-
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
@@ -45,9 +42,19 @@ docker-compose/data/
docker-compose/output/
docker-compose/postgres/
+# JetBrains IDE
+.idea/
+*.iml
+
+# vim
+*.swp
+
+# OSX
+.DS_Store
+
# Local Config
-resource/hemera.ini
+hemera/resource/hemera.ini
sync_record
alembic.ini
-!indexer/modules/custom/hemera_ens/abi/*.json
-!indexer/modules/custom/cyber_id/abi/*.json
+!hemera_udf/hemera_ens/abi/*.json
+!hemera_udf/cyber_id/abi/*.json
diff --git a/Dockerfile b/Dockerfile
index d07d2d2c7..6af7424b4 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -15,7 +15,7 @@ FROM python:3.9-slim
WORKDIR "/app"
-COPY --from=builder /app/migrations ./migrations
+COPY --from=builder /app/hemera/migrations ./migrations
COPY --from=builder /app/dist/*.whl .
RUN pip install *.whl
diff --git a/Makefile b/Makefile
index b76ece24a..f5d63d714 100644
--- a/Makefile
+++ b/Makefile
@@ -43,7 +43,7 @@ endif
init_db:
@echo "Initializing database..."
- poetry run python -m hemera.py db --init
+ poetry run hemera db --init-schema
development:
@echo "Setting up development environment..."
@@ -76,7 +76,7 @@ development:
fi; \
if ! poetry --version &> /dev/null; then \
echo "Installing Poetry..."; \
- pip install poetry; \
+ pip install poetry==1.6.1; \
else \
echo "Poetry is already installed."; \
fi; \
diff --git a/api/app/api.py b/api/app/api.py
deleted file mode 100644
index 93519ac12..000000000
--- a/api/app/api.py
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/python3
-# -*- coding: utf-8 -*-
-
-from flask_restx import Api
-
-from api.app.address.routes import address_features_namespace
-from api.app.contract.routes import contract_namespace
-from api.app.explorer.routes import explorer_namespace
-from api.app.l2_explorer.routes import l2_explorer_namespace
-from api.app.user_operation.routes import user_operation_namespace
-from indexer.modules.custom.address_index.endpoint.routes import address_profile_namespace
-from indexer.modules.custom.deposit_to_l2.endpoint.routes import token_deposit_namespace
-from indexer.modules.custom.hemera_ens.endpoint import af_ens_namespace
-from indexer.modules.custom.init_capital.endpoints.routes import init_capital_namespace
-from indexer.modules.custom.merchant_moe.endpoints.routes import merchant_moe_namespace
-from indexer.modules.custom.opensea.endpoint.routes import opensea_namespace
-from indexer.modules.custom.staking_fbtc.endpoints.routes import staking_namespace
-from indexer.modules.custom.uniswap_v3.endpoints.routes import uniswap_v3_namespace
-
-api = Api()
-
-api.add_namespace(explorer_namespace)
-api.add_namespace(opensea_namespace)
-api.add_namespace(contract_namespace)
-api.add_namespace(uniswap_v3_namespace)
-api.add_namespace(token_deposit_namespace)
-api.add_namespace(user_operation_namespace)
-api.add_namespace(staking_namespace)
-api.add_namespace(merchant_moe_namespace)
-
-api.add_namespace(l2_explorer_namespace)
-api.add_namespace(af_ens_namespace)
-api.add_namespace(address_profile_namespace)
-
-api.add_namespace(address_features_namespace)
-api.add_namespace(init_capital_namespace)
diff --git a/cli/__init__.py b/cli/__init__.py
deleted file mode 100644
index 5d544fb98..000000000
--- a/cli/__init__.py
+++ /dev/null
@@ -1,30 +0,0 @@
-import click
-
-from cli.aggregates import aggregates
-from cli.api import api
-from cli.db import db
-from cli.reorg import reorg
-from cli.stream import stream
-from indexer.utils.logging_utils import logging_basic_config
-
-logging_basic_config()
-
-from importlib import metadata
-
-
-def get_version():
- return metadata.version("hemera")
-
-
-@click.group()
-@click.version_option(version=get_version())
-@click.pass_context
-def cli(ctx):
- pass
-
-
-cli.add_command(stream, "stream")
-cli.add_command(api, "api")
-cli.add_command(aggregates, "aggregates")
-cli.add_command(reorg, "reorg")
-cli.add_command(db, "db")
diff --git a/cli/aggregates.py b/cli/aggregates.py
deleted file mode 100644
index c2bdb347d..000000000
--- a/cli/aggregates.py
+++ /dev/null
@@ -1,70 +0,0 @@
-import click
-
-from common.services.postgresql_service import PostgreSQLService
-from indexer.aggr_jobs.utils import DateType, check_data_completeness, get_yesterday_date
-from indexer.controller.aggregates_controller import AggregatesController
-from indexer.controller.dispatcher.aggregates_dispatcher import AggregatesDispatcher
-
-
-@click.command(context_settings=dict(help_option_names=["-h", "--help"]))
-@click.option(
- "-pg",
- "--postgres-url",
- type=str,
- required=True,
- envvar="POSTGRES_URL",
- help="The required postgres connection url." "e.g. postgresql+psycopg2://postgres:admin@127.0.0.1:5432/ethereum",
-)
-@click.option(
- "-p",
- "--provider-uri",
- default=None,
- show_default=True,
- type=str,
- envvar="PROVIDER_URI",
- help="The URI of the web3 provider e.g. "
- "file://$HOME/Library/Ethereum/geth.ipc or https://mainnet.infura.io"
- "It helps determine whether the latest synchronized data meets the required execution date range.",
-)
-@click.option(
- "-sd",
- "--start-date",
- default=None,
- show_default=True,
- type=DateType(),
- help="Start date in YYYY-MM-DD format",
- envvar="START_DATE",
-)
-@click.option(
- "-ed",
- "--end-date",
- default=None,
- show_default=True,
- type=DateType(),
- help="End date in YYYY-MM-DD format",
- envvar="END_DATE",
-)
-@click.option(
- "-D",
- "--date-batch-size",
- default=30,
- show_default=True,
- type=int,
- envvar="DATE_BATCH_SIZE",
- help="How many DATEs to batch in single sync round",
-)
-def aggregates(postgres_url, provider_uri, start_date, end_date, date_batch_size):
- if not start_date and not end_date:
- start_date, end_date = get_yesterday_date()
- elif not end_date:
- _, end_date = get_yesterday_date()
-
- db_service = PostgreSQLService(postgres_url)
-
- check_data_completeness(db_service, provider_uri, end_date)
-
- config = {"db_service": db_service}
- dispatcher = AggregatesDispatcher(config)
-
- controller = AggregatesController(job_dispatcher=dispatcher)
- controller.action(start_date, end_date, date_batch_size)
diff --git a/cli/api.py b/cli/api.py
deleted file mode 100644
index 59691f6ba..000000000
--- a/cli/api.py
+++ /dev/null
@@ -1,11 +0,0 @@
-import click
-
-from cli.logo import print_logo
-
-
-@click.command(context_settings=dict(help_option_names=["-h", "--help"]))
-def api():
- print_logo()
- from api.app.main import app
-
- app.run("0.0.0.0", 8082, threaded=True, debug=True)
diff --git a/cli/db.py b/cli/db.py
deleted file mode 100644
index b88a10a31..000000000
--- a/cli/db.py
+++ /dev/null
@@ -1,106 +0,0 @@
-import logging
-from typing import List
-
-import click
-
-from common.models import HemeraModel, model_path_patterns
-from common.services.postgresql_service import PostgreSQLService
-from common.utils.module_loading import import_string, scan_subclass_by_path_patterns
-
-logger = logging.getLogger("DB Client")
-
-
-@click.command(context_settings=dict(help_option_names=["-h", "--help"]))
-@click.option(
- "-pg",
- "--postgres-url",
- type=str,
- required=True,
- envvar="POSTGRES_URL",
- help="The required postgres connection url." "e.g. postgresql+psycopg2://postgres:admin@127.0.0.1:5432/ethereum",
-)
-@click.option(
- "-i",
- "--init",
- is_flag=True,
- required=False,
- help="The -i or --init flag triggers the database initialization process. ",
-)
-@click.option(
- "-v",
- "--version",
- type=str,
- default="head",
- show_default=True,
- required=False,
- help="The database version that would be initialized. "
- "By using default value 'head', database would be initialized to the latest version."
- "To make this option work, either -i or --init must be used.",
-)
-@click.option(
- "-c",
- "--create-tables",
- type=str,
- default="",
- required=False,
- help="Table names that need to be created in the database. e.g. blocks,transactions",
-)
-@click.option(
- "-d",
- "--drop-tables",
- type=str,
- default="",
- required=False,
- help="Table names that need to be dropped in the database. e.g. blocks,transactions",
-)
-def db(postgres_url, init, version, create_tables="", drop_tables=""):
- service = PostgreSQLService(jdbc_url=postgres_url, db_version=version, init_schema=init)
-
- if create_tables != "" or drop_tables != "":
- exist_models_path = [
- value["cls_import_path"]
- for key, value in scan_subclass_by_path_patterns(model_path_patterns, HemeraModel).items()
- ]
- exist_models = {
- table.__tablename__: table
- for table in [import_string(path) for path in exist_models_path]
- if hasattr(table, "__tablename__")
- }
-
- tables = create_tables.split(",")
- if len(tables) > 0:
- create(service, tables, exist_models)
-
- logger.info("Table creation has been finished.")
-
- tables = drop_tables.split(",")
- if len(drop_tables) > 0:
- drop(service, tables, exist_models)
-
- logger.info("Table deletion has been finished.")
-
- logger.info("db operation finished, now exit.")
-
-
-def create(service: PostgreSQLService, tables: List[str], exist_models: dict):
- engine = service.get_service_engine()
- for table in tables:
- if table in exist_models:
- exist_models[table].__table__.create(engine, checkfirst=True)
- logger.info(f"Table {table} created successfully.")
- else:
- logger.warning(
- f"No Table {table} model definition was found in the following directories: {model_path_patterns} "
- )
-
-
-def drop(service: PostgreSQLService, tables: List[str], exist_models: dict):
- engine = service.get_service_engine()
- for table in tables:
- if table in exist_models:
- exist_models[table].__table__.drop(engine, checkfirst=True)
- logger.info(f"Table {table} dropped successfully.")
- else:
- logger.warning(
- f"No Table {table} model definition was found in the following directories: {model_path_patterns} "
- )
diff --git a/cli/reorg.py b/cli/reorg.py
deleted file mode 100644
index e91c5ffd3..000000000
--- a/cli/reorg.py
+++ /dev/null
@@ -1,216 +0,0 @@
-import logging
-import os
-
-import click
-
-from common.services.postgresql_service import PostgreSQLService
-from enumeration.entity_type import ALL_ENTITY_COLLECTIONS, calculate_entity_value, generate_output_types
-from indexer.controller.reorg_controller import ReorgController
-from indexer.controller.scheduler.reorg_scheduler import ReorgScheduler
-from indexer.exporters.postgres_item_exporter import PostgresItemExporter
-from indexer.utils.exception_recorder import ExceptionRecorder
-from indexer.utils.logging_utils import configure_logging, configure_signals
-from indexer.utils.provider import get_provider_from_uri
-from indexer.utils.rpc_utils import pick_random_provider_uri
-from indexer.utils.thread_local_proxy import ThreadLocalProxy
-
-exception_recorder = ExceptionRecorder()
-
-
-@click.command(context_settings=dict(help_option_names=["-h", "--help"]))
-@click.option(
- "-p",
- "--provider-uri",
- default="https://ethereum-rpc.publicnode.com",
- show_default=True,
- type=str,
- envvar="PROVIDER_URI",
- help="The URI of the web3 provider e.g. "
- "file://$HOME/Library/Ethereum/geth.ipc or https://ethereum-rpc.publicnode.com",
-)
-@click.option(
- "-d",
- "--debug-provider-uri",
- default="https://ethereum-rpc.publicnode.com",
- show_default=True,
- type=str,
- envvar="DEBUG_PROVIDER_URI",
- help="The URI of the web3 debug provider e.g. "
- "file://$HOME/Library/Ethereum/geth.ipc or https://ethereum-rpc.publicnode.com",
-)
-@click.option(
- "-pg",
- "--postgres-url",
- type=str,
- required=True,
- envvar="POSTGRES_URL",
- help="The required postgres connection url." "e.g. postgresql+psycopg2://postgres:admin@127.0.0.1:5432/ethereum",
-)
-@click.option(
- "-v",
- "--db-version",
- default="head",
- show_default=True,
- type=str,
- envvar="DB_VERSION",
- help="The database version to initialize the database. using the alembic script's revision ID to "
- "specify a version."
- " e.g. head, indicates the latest version."
- "or base, indicates the empty database without any table.",
-)
-@click.option(
- "-b",
- "--batch-size",
- default=10,
- show_default=True,
- type=int,
- envvar="BATCH_SIZE",
- help="How many parameters to batch in single request",
-)
-@click.option(
- "--debug-batch-size",
- default=1,
- show_default=True,
- type=int,
- envvar="DEBUG_BATCH_SIZE",
- help="How many parameters to batch in single debug rpc request",
-)
-@click.option(
- "--block-number",
- show_default=True,
- type=int,
- envvar="BLOCK_NUMBER",
- help="Specify the block number to reorging.",
-)
-@click.option(
- "-r",
- "--ranges",
- default=1000,
- show_default=True,
- type=int,
- envvar="RANGES",
- help="Specify the range limit for data fixing.",
-)
-@click.option(
- "--log-file",
- default=None,
- show_default=True,
- type=str,
- envvar="LOG_FILE",
- help="Log file",
-)
-@click.option(
- "-m",
- "--multicall",
- default=False,
- show_default=True,
- type=bool,
- help="if `multicall` is set to True, it will decrease the consume of rpc calls",
- envvar="MULTI_CALL_ENABLE",
-)
-@click.option("--cache", default=None, show_default=True, type=str, envvar="CACHE", help="Cache")
-@click.option(
- "--auto-upgrade-db",
- default=True,
- show_default=True,
- type=bool,
- envvar="AUTO_UPGRADE_DB",
- help="Whether to automatically run database migration scripts to update the database to the latest version.",
-)
-@click.option(
- "--log-level",
- default="INFO",
- show_default=True,
- type=str,
- envvar="LOG_LEVEL",
- help="Set the logging output level.",
-)
-def reorg(
- provider_uri,
- debug_provider_uri,
- postgres_url,
- block_number,
- ranges,
- batch_size,
- debug_batch_size,
- db_version="head",
- multicall=True,
- log_file=None,
- cache=None,
- config_file=None,
- auto_upgrade_db=True,
- log_level="INFO",
-):
- configure_logging(log_level=log_level, log_file=log_file)
- configure_signals()
-
- provider_uri = pick_random_provider_uri(provider_uri)
- debug_provider_uri = pick_random_provider_uri(debug_provider_uri)
- logging.info("Using provider " + provider_uri)
- logging.info("Using debug provider " + debug_provider_uri)
-
- # build postgresql service
- if postgres_url:
- service = PostgreSQLService(postgres_url, db_version=db_version, init_schema=auto_upgrade_db)
- config = {"db_service": service}
- exception_recorder.init_pg_service(service)
- else:
- logging.error("No postgres url provided. Exception recorder will not be useful.")
- exit(1)
-
- if config_file:
- if not os.path.exists(config_file):
- raise click.ClickException(f"Config file {config_file} not found")
- with open(config_file, "r") as f:
- if config_file.endswith(".json"):
- import json
-
- config.update(json.load(f))
- elif config_file.endswith(".yaml") or config_file.endswith(".yml"):
- import yaml
-
- config.update(yaml.safe_load(f))
- else:
- raise click.ClickException(f"Config file {config_file} is not supported)")
-
- entity_types = calculate_entity_value(",".join(ALL_ENTITY_COLLECTIONS))
- output_types = list(generate_output_types(entity_types))
-
- job_scheduler = ReorgScheduler(
- batch_web3_provider=ThreadLocalProxy(lambda: get_provider_from_uri(provider_uri, batch=True)),
- batch_web3_debug_provider=ThreadLocalProxy(lambda: get_provider_from_uri(debug_provider_uri, batch=True)),
- item_exporters=PostgresItemExporter(
- postgres_url=postgres_url, db_version=db_version, init_schema=auto_upgrade_db
- ),
- batch_size=batch_size,
- debug_batch_size=debug_batch_size,
- required_output_types=output_types,
- config=config,
- cache=cache,
- multicall=multicall,
- )
-
- controller = ReorgController(
- batch_web3_provider=ThreadLocalProxy(lambda: get_provider_from_uri(provider_uri, batch=False)),
- job_scheduler=job_scheduler,
- ranges=ranges,
- config=config,
- )
-
- job = None
- while True:
- if job:
- controller.action(
- job_id=job.job_id,
- block_number=job.last_fixed_block_number - 1,
- remains=job.remain_process,
- )
- else:
- controller.action(block_number=block_number)
-
- job = controller.wake_up_next_job()
- if job:
- logging.info(f"Waking up uncompleted job: {job.job_id}.")
- else:
- logging.info("No more uncompleted jobs to wake-up, reorg process will terminate.")
- break
diff --git a/cli/stream.py b/cli/stream.py
deleted file mode 100644
index f0ed167bb..000000000
--- a/cli/stream.py
+++ /dev/null
@@ -1,477 +0,0 @@
-import logging
-import os
-import time
-
-import click
-from web3 import Web3
-
-from cli.logo import print_logo
-from common.services.postgresql_service import PostgreSQLService
-from enumeration.entity_type import DEFAULT_COLLECTION, calculate_entity_value, generate_output_types
-from indexer.controller.scheduler.job_scheduler import JobScheduler
-from indexer.controller.stream_controller import StreamController
-from indexer.exporters.item_exporter import create_item_exporters
-from indexer.utils.exception_recorder import ExceptionRecorder
-from indexer.utils.limit_reader import create_limit_reader
-from indexer.utils.logging_utils import configure_logging, configure_signals
-from indexer.utils.parameter_utils import (
- check_file_exporter_parameter,
- check_source_load_parameter,
- generate_dataclass_type_list_from_parameter,
-)
-from indexer.utils.provider import get_provider_from_uri
-from indexer.utils.rpc_utils import pick_random_provider_uri
-from indexer.utils.sync_recorder import create_recorder
-from indexer.utils.thread_local_proxy import ThreadLocalProxy
-
-exception_recorder = ExceptionRecorder()
-
-
-def calculate_execution_time(func):
- def wrapper(*args, **kwargs):
- start_time = time.time()
- result = func(*args, **kwargs)
- end_time = time.time()
- execution_time = end_time - start_time
- print(f"function {func.__name__} time: {execution_time:.6f} s")
- return result
-
- return wrapper
-
-
-@click.command(context_settings=dict(help_option_names=["-h", "--help"]))
-@click.option(
- "-p",
- "--provider-uri",
- default="https://ethereum-rpc.publicnode.com",
- show_default=True,
- type=str,
- envvar="PROVIDER_URI",
- help="The URI of the web3 provider e.g. "
- "file://$HOME/Library/Ethereum/geth.ipc or https://ethereum-rpc.publicnode.com",
-)
-@click.option(
- "-pg",
- "--postgres-url",
- type=str,
- required=False,
- envvar="POSTGRES_URL",
- help="The required postgres connection url." "e.g. postgresql+psycopg2://postgres:admin@127.0.0.1:5432/ethereum",
-)
-@click.option(
- "-d",
- "--debug-provider-uri",
- default="https://ethereum-rpc.publicnode.com",
- show_default=True,
- type=str,
- envvar="DEBUG_PROVIDER_URI",
- help="The URI of the web3 debug provider e.g. "
- "file://$HOME/Library/Ethereum/geth.ipc or https://ethereum-rpc.publicnode.com",
-)
-@click.option(
- "-o",
- "--output",
- type=str,
- envvar="OUTPUT",
- help="The output selection."
- "Print to console e.g. console; "
- "or postgresql e.g. postgres"
- "or local json file e.g. jsonfile://your-file-path; "
- "or local csv file e.g. csvfile://your-file-path; "
- "or both. e.g. console,jsonfile://your-file-path,csvfile://your-file-path",
-)
-@click.option(
- "-E",
- "--entity-types",
- default=",".join(DEFAULT_COLLECTION),
- show_default=True,
- type=str,
- envvar="ENTITY_TYPES",
- help="The list of entity types to export. " "e.g. EXPLORER_BASE | EXPLORER_TOKEN | EXPLORER_TRACE",
-)
-@click.option(
- "-O",
- "--output-types",
- default=None,
- show_default=True,
- type=str,
- envvar="OUTPUT_TYPES",
- help="The list of output types to export, corresponding to more detailed data models. "
- "Specifying this option will prioritize these settings over the entity types specified in -E. "
- "Examples include: block, transaction, log, "
- "token, address_token_balance, erc20_token_transfer, erc721_token_transfer, erc1155_token_transfer, "
- "trace, contract, coin_balance.",
-)
-@click.option(
- "-v",
- "--db-version",
- default="head",
- show_default=True,
- type=str,
- envvar="DB_VERSION",
- help="The database version to initialize the database. using the alembic script's revision ID to "
- "specify a version. "
- "e.g. head, indicates the latest version."
- "or base, indicates the empty database without any table.",
-)
-@click.option(
- "-s",
- "--start-block",
- default=None,
- show_default=True,
- type=int,
- help="Start block",
- envvar="START_BLOCK",
-)
-@click.option(
- "-e",
- "--end-block",
- default=None,
- show_default=True,
- type=int,
- help="End block",
- envvar="END_BLOCK",
-)
-@click.option(
- "--retry-from-record",
- default=True,
- show_default=True,
- type=bool,
- envvar="RETRY_FROM_RECORD",
- help="With the default parameter, the program will always run from the -s parameter, "
- "and when set to True, it will run from the record point between -s and -e",
-)
-@click.option(
- "--blocks-per-file",
- default=1000,
- show_default=True,
- type=int,
- envvar="BLOCKS_PER_FILE",
- help="How many blocks data was written to each file",
-)
-@click.option(
- "--period-seconds",
- default=2,
- show_default=True,
- type=float,
- envvar="PERIOD_SECONDS",
- help="How many seconds to sleep between syncs",
-)
-@click.option(
- "-b",
- "--batch-size",
- default=10,
- show_default=True,
- type=int,
- envvar="BATCH_SIZE",
- help="The number of non-debug RPC requests to batch in a single request",
-)
-@click.option(
- "--debug-batch-size",
- default=1,
- show_default=True,
- type=int,
- envvar="DEBUG_BATCH_SIZE",
- help="The number of debug RPC requests to batch in a single request",
-)
-@click.option(
- "-B",
- "--block-batch-size",
- default=1,
- show_default=True,
- type=int,
- envvar="BLOCK_BATCH_SIZE",
- help="How many blocks to batch in single sync round",
-)
-@click.option(
- "-w",
- "--max-workers",
- default=5,
- show_default=True,
- type=int,
- help="The number of workers during a request to rpc.",
- envvar="MAX_WORKERS",
-)
-@click.option(
- "-pn",
- "--process-numbers",
- default=1,
- show_default=True,
- type=int,
- help="The processor numbers to ues.",
- envvar="PROCESS_NUMBERS",
-)
-@click.option(
- "-ps",
- "--process-size",
- default=None,
- show_default=True,
- type=int,
- help="The data size for every process to handle. Default to {B}/{pn} ,see above",
- envvar="PROCESS_SIZE",
-)
-@click.option(
- "-pto",
- "--process-time-out",
- default=None,
- show_default=True,
- type=int,
- help="Timeout for every processor, default to {ps} * 300 , see above",
- envvar="PROCESS_TIME_OUT",
-)
-@click.option(
- "--delay",
- default=0,
- show_default=True,
- type=int,
- envvar="DELAY",
- help="The limit number of blocks which delays from the network current block number.",
-)
-@click.option(
- "--source-path",
- default=None,
- show_default=True,
- required=False,
- type=str,
- envvar="SOURCE_PATH",
- help="The path to load the data."
- "Load from local csv file e.g. csvfile://your-file-direction; "
- "or local json file e.g. jsonfile://your-file-direction; ",
-)
-@click.option(
- "--source-types",
- default="block,transaction,log",
- show_default=True,
- type=str,
- envvar="SOURCE_TYPES",
- help="The list of types to read from source, corresponding to more detailed data models. "
- "Examples include: block, transaction, log, "
- "token, address_token_balance, erc20_token_transfer, erc721_token_transfer, erc1155_token_transfer, "
- "trace, contract, coin_balance.",
-)
-@click.option(
- "--log-file",
- default=None,
- show_default=True,
- type=str,
- envvar="LOG_FILE",
- help="Log file",
-)
-@click.option(
- "--pid-file",
- default=None,
- show_default=True,
- type=str,
- envvar="PID_FILE",
- help="Pid file",
-)
-@click.option(
- "--sync-recorder",
- default="file:sync_record",
- show_default=True,
- type=str,
- envvar="SYNC_RECORDER",
- help="How to store the sync record data."
- 'e.g pg:base. means sync record data will store in pg as "base" be key'
- 'or file:base. means sync record data will store in file as "base" be file name',
-)
-@click.option(
- "--cache",
- default="memory",
- show_default=True,
- type=str,
- envvar="CACHE_SERVICE",
- help="How to store the cache data."
- "e.g redis. means cache data will store in redis, redis://localhost:6379"
- "or memory. means cache data will store in memory, memory",
-)
-@click.option(
- "-m",
- "--multicall",
- default=False,
- show_default=True,
- type=bool,
- help="if `multicall` is set to True, it will decrease the consume of rpc calls",
- envvar="MULTI_CALL_ENABLE",
-)
-@click.option(
- "--auto-reorg",
- default=False,
- show_default=True,
- type=bool,
- envvar="AUTO_REORG",
- help="Whether to detect reorg in data streams and automatically repair data.",
-)
-@click.option(
- "--config-file",
- default=None,
- show_default=True,
- type=str,
- envvar="CONFIG_FILE",
- help="The path to the configuration file, if provided, the configuration file will be used to load the configuration. Supported formats are json and yaml.",
-)
-@click.option(
- "--force-filter-mode",
- default=False,
- show_default=True,
- type=bool,
- envvar="FORCE_FILTER_MODE",
- help="Force the filter mode to be enabled, even if no filters job are provided.",
-)
-@click.option(
- "--auto-upgrade-db",
- default=True,
- show_default=True,
- type=bool,
- envvar="AUTO_UPGRADE_DB",
- help="Whether to automatically run database migration scripts to update the database to the latest version.",
-)
-@click.option(
- "--log-level",
- default="INFO",
- show_default=True,
- type=str,
- envvar="LOG_LEVEL",
- help="Set the logging output level.",
-)
-@calculate_execution_time
-def stream(
- provider_uri,
- debug_provider_uri,
- postgres_url,
- output,
- db_version,
- start_block,
- end_block,
- entity_types,
- output_types,
- source_types,
- blocks_per_file,
- delay=0,
- period_seconds=10,
- batch_size=10,
- debug_batch_size=1,
- block_batch_size=1,
- max_workers=5,
- process_numbers=1,
- process_size=None,
- process_time_out=None,
- log_file=None,
- pid_file=None,
- source_path=None,
- sync_recorder="file:sync_record",
- retry_from_record=False,
- cache="memory",
- auto_reorg=False,
- multicall=True,
- config_file=None,
- force_filter_mode=False,
- auto_upgrade_db=True,
- log_level="INFO",
-):
- print_logo()
- configure_logging(log_level, log_file)
- configure_signals()
- provider_uri = pick_random_provider_uri(provider_uri)
- debug_provider_uri = pick_random_provider_uri(debug_provider_uri)
- logging.getLogger("ROOT").info("Using provider " + provider_uri)
- logging.getLogger("ROOT").info("Using debug provider " + debug_provider_uri)
-
- # parameter logic checking
- if source_path:
- check_source_load_parameter(source_path, start_block, end_block, auto_reorg)
- check_file_exporter_parameter(output, block_batch_size, blocks_per_file)
-
- # build config
- config = {
- "blocks_per_file": blocks_per_file,
- "source_path": source_path,
- "chain_id": Web3(Web3.HTTPProvider(provider_uri)).eth.chain_id,
- }
-
- if postgres_url:
- service = PostgreSQLService(postgres_url, db_version=db_version, init_schema=auto_upgrade_db)
- config["db_service"] = service
- exception_recorder.init_pg_service(service)
- else:
- logging.getLogger("ROOT").warning("No postgres url provided. Exception recorder will not be useful.")
-
- if config_file:
- file_based_config = {}
- if not os.path.exists(config_file):
- raise click.ClickException(f"Config file {config_file} not found")
- with open(config_file, "r") as f:
- if config_file.endswith(".json"):
- import json
-
- file_based_config = json.load(f)
- elif config_file.endswith(".yaml") or config_file.endswith(".yml"):
- import yaml
-
- file_based_config = yaml.safe_load(f)
- else:
- raise click.ClickException(f"Config file {config_file} is not supported)")
-
- if file_based_config.get("chain_id") != config["chain_id"]:
- raise click.ClickException(
- f"Config file {config_file} is not compatible with chain_id {config['chain_id']}"
- )
- else:
- logging.getLogger("ROOT").info(f"Loading config from file: {config_file}, chain_id: {config['chain_id']}")
- config.update(file_based_config)
- output_types_by_entity_type = []
- if entity_types is not None:
- entity_types = calculate_entity_value(entity_types)
- output_types_by_entity_type = list(set(generate_output_types(entity_types)))
-
- output_types = list(
- set(generate_dataclass_type_list_from_parameter(output_types, "output") + output_types_by_entity_type)
- )
-
- if source_path and source_path.startswith("postgresql://"):
- source_types = generate_dataclass_type_list_from_parameter(source_types, "source")
-
- job_scheduler = JobScheduler(
- batch_web3_provider=ThreadLocalProxy(lambda: get_provider_from_uri(provider_uri, batch=True)),
- batch_web3_debug_provider=ThreadLocalProxy(lambda: get_provider_from_uri(debug_provider_uri, batch=True)),
- item_exporters=create_item_exporters(output, config),
- batch_size=batch_size,
- debug_batch_size=debug_batch_size,
- max_workers=max_workers,
- config=config,
- required_output_types=output_types,
- required_source_types=source_types,
- cache=cache,
- auto_reorg=auto_reorg,
- multicall=multicall,
- force_filter_mode=force_filter_mode,
- )
-
- if process_numbers is None:
- process_numbers = 1
- if process_size is None:
- process_size = int(block_batch_size / process_numbers)
- if process_time_out is None:
- process_time_out = 300 * process_size
-
- controller = StreamController(
- batch_web3_provider=ThreadLocalProxy(lambda: get_provider_from_uri(provider_uri, batch=False)),
- job_scheduler=job_scheduler,
- sync_recorder=create_recorder(sync_recorder, config),
- limit_reader=create_limit_reader(
- source_path, ThreadLocalProxy(lambda: get_provider_from_uri(provider_uri, batch=False))
- ),
- retry_from_record=retry_from_record,
- delay=delay,
- process_numbers=process_numbers,
- process_size=process_size,
- process_time_out=process_time_out,
- )
-
- controller.action(
- start_block=start_block,
- end_block=end_block,
- block_batch_size=block_batch_size,
- period_seconds=period_seconds,
- pid_file=pid_file,
- )
diff --git a/common/utils/db_utils.py b/common/utils/db_utils.py
deleted file mode 100644
index ac1dd2eb6..000000000
--- a/common/utils/db_utils.py
+++ /dev/null
@@ -1,32 +0,0 @@
-from sqlalchemy import text
-
-from common.models import db
-from common.utils.config import get_config
-
-app_config = get_config()
-
-
-def build_entities(model, columns):
- if columns == "*":
- entities = [attr for attr in model.__table__.columns]
- else:
- entities = []
- for column in columns:
- if isinstance(column, tuple):
- col, alias = column
- entities.append(getattr(model, col).label(alias))
- else:
- entities.append(getattr(model, column))
-
- return entities
-
-
-def get_total_row_count(table):
- estimate_transaction = db.session.execute(
- text(
- f"""
- SELECT reltuples::bigint AS estimate FROM pg_class where oid = '{app_config.db_read_sql_alchemy_database_config.schema}.{table}'::regclass;
- """
- )
- ).fetchone()
- return estimate_transaction[0]
diff --git a/config/indexer-config-arbitrum.yaml b/config/indexer-config-arbitrum.yaml
new file mode 100644
index 000000000..5584b410b
--- /dev/null
+++ b/config/indexer-config-arbitrum.yaml
@@ -0,0 +1,47 @@
+chain_id: 42161
+
+uniswap_v3_job:
+ # empty means no filter. only work in price job
+ pool_address:
+
+ # works in price\token job
+ jobs:
+ - type: agni # pancake
+ factory_address: '0x0bfbcf9fa4f9c56b0f40a671ad40e0805a091865'
+ position_token_address: '0x46a15b0b27311cedf172ab29e4f4766fbe7f4364'
+
+
+export_staked_transferred_balance_job:
+ TRANSFERRED_CONTRACTS_DICT:
+ pump_btc:
+ - "0x4413ca15da17db82826caee058c083f573c1f16c"
+ bed_rock:
+ - "0x84e5c854a7ff9f49c888d69deca578d406c26800"
+
+ token_address:
+ # fbtc
+ - "0xc96de26018a54d51c097160568752c4e3bd6c364"
+ # pump btc
+ - "0xf469fbd2abcd6b9de8e169d128226c0fc90a012e"
+ # uniBTC
+ - "0x6b2a01a5f79deb4c2f3c0eda7b01df456fbd726a"
+
+
+export_tokens_and_transfers_job:
+ filter_token_address:
+ # pancake position token address
+ - "0x46a15b0b27311cedf172ab29e4f4766fbe7f4364"
+ # fbtc
+ - "0xc96de26018a54d51c097160568752c4e3bd6c364"
+
+ # pump btc
+ - "0xf469fbd2abcd6b9de8e169d128226c0fc90a012e"
+ # uniBTC
+ - "0x6b2a01a5f79deb4c2f3c0eda7b01df456fbd726a"
+
+ # solvBTC
+ # solvBTC.BBN
+
+
+
+
diff --git a/config/indexer-config-base.yaml b/config/indexer-config-base.yaml
index ca681d185..a9a25fb22 100644
--- a/config/indexer-config-base.yaml
+++ b/config/indexer-config-base.yaml
@@ -1,5 +1,8 @@
chain_id: 8453
+export_transactions_and_logs_job:
+ use_receipt_from_blocks_rpc: True
+
uniswap_v3_job:
# empty means no filter. only work in price job
pool_address:
@@ -7,9 +10,64 @@ uniswap_v3_job:
# works in price\token job
jobs:
- type: uniswapv3 # uniswapv3`
- factory address: '0x33128a8fc17869897dce68ed026d694621f6fdfd'
+ factory_address: '0x33128a8fc17869897dce68ed026d694621f6fdfd'
position_token_address: '0x03a520b32c04bf3beef7beb72e919cf822ed34f1'
- type: uniswapv3 # pancake
- factory address: '0x0bfbcf9fa4f9c56b0f40a671ad40e0805a091865'
- position_token_address: '0x46a15b0b27311cedf172ab29e4f4766fbe7f4364'
\ No newline at end of file
+ factory_address: '0x0bfbcf9fa4f9c56b0f40a671ad40e0805a091865'
+ position_token_address: '0x46a15b0b27311cedf172ab29e4f4766fbe7f4364'
+
+ - type: aerodrome # aerodrome
+ factory_address: "0x5e7bb104d84c7cb9b682aac2f3d509f5f406809a"
+ position_token_address: '0x827922686190790b37229fd06084350e74485b72'
+
+# Add Uniswap v4 configuration
+uniswap_v4_job:
+ # empty means no filter. only work in price job
+ pool_address:
+
+
+ # WETH address for ETH swaps
+ weth_address: '0x4200000000000000000000000000000000000006'
+
+ # WETH hook addresses for tracking ETH swaps
+ weth_hook_addresses: []
+
+ # works in price\token job
+ jobs:
+ - type: uniswapv4
+ factory_address: '0x498581ff718922c3f8e6a244956af099b2652b2b' # PoolManager
+ position_token_address: '0x7c5f5a4bbd8fd63184577525326123b519429bdc' # PositionManager
+ state_view_address: '0xa3c0c9b65bad0b08107aa264b0f3db444b867a71' # StateView
+
+export_tokens_and_transfers_job:
+ filter_token_address:
+ # fbtc
+ - "0xc96de26018a54d51c097160568752c4e3bd6c364"
+ # uniswapv3
+ - "0x03a520b32c04bf3beef7beb72e919cf822ed34f1"
+ - "0x46a15b0b27311cedf172ab29e4f4766fbe7f4364"
+ - "0x827922686190790b37229fd06084350e74485b72"
+ - '0x833589fcd6edb6e08f4c7c32d4f71b54bda02913'
+ - '0x50c5725949a6f0c72e6c4a641f24049a917db0cb'
+ - '0xfde4c96c8593536e31f229ea8f37b2ada2699bb2'
+ - '0x4200000000000000000000000000000000000006'
+
+
+export_meme_token_created_job:
+ clanker_factory_address_v0: "0x250c9FB2b411B48273f69879007803790A6AeA47"
+ clanker_factory_address_v1: "0x9b84fce5dcd9a38d2d01d5d72373f6b6b067c3e1"
+ virtuals_factory_address_v0: "0x41a0f5b16b10748d594b471850bd7488f929beba"
+ virtuals_factory_address_v1: "0x94Bf9622348Cf5598D9A491Fa809194Cf85A0D61"
+ larry_factory_address:
+ - "0x5faAb5D52790916ed9c2C159960006151e311bA0"
+ - "0xb3a720f17902b7d2e8c38c5044c3b20e8ac9c27c"
+
+
+export_block_token_price_job:
+ '0x833589fcd6edb6e08f4c7c32d4f71b54bda02913': 'USDC'
+ '0x50c5725949a6f0c72e6c4a641f24049a917db0cb': 'DAI'
+ '0xfde4c96c8593536e31f229ea8f37b2ada2699bb2': 'USDT'
+ '0x4200000000000000000000000000000000000006': 'WETH'
+ '0x0000000000000000000000000000000000000000': 'ETH'
+
diff --git a/config/indexer-config-bob.yaml b/config/indexer-config-bob.yaml
new file mode 100644
index 000000000..3b29af4b2
--- /dev/null
+++ b/config/indexer-config-bob.yaml
@@ -0,0 +1,44 @@
+chain_id: 60808
+
+uniswap_v3_job:
+ # empty means no filter. only work in price job
+ pool_address:
+
+ # works in price\token job
+ jobs:
+ - type: uniswapv3 # oku
+ factory_address: '0xcb2436774c3e191c85056d248ef4260ce5f27a9d'
+ position_token_address: '0x743e03cceb4af2efa3cc76838f6e8b50b63f184c'
+
+
+export_staked_transferred_balance_job:
+ TRANSFERRED_CONTRACTS_DICT:
+ pump_btc:
+ - "0xd7c019326b5c22a6a2e0aadd1241af94ff7ecf7b"
+ bed_rock:
+ - "0x2ac98db41cbd3172cb7b8fd8a8ab3b91cfe45dcf"
+
+ token_address:
+ # fBTC
+ - "0xc96de26018a54d51c097160568752c4e3bd6c364"
+ # pumpBTC
+ - "0x1fcca65fb6ae3b2758b9b2b394cb227eae404e1e"
+ # uniBTC
+ - "0x236f8c0a61da474db21b693fb2ea7aab0c803894"
+
+
+export_tokens_and_transfers_job:
+ filter_token_address:
+ # OKU position token address
+ - "0x743e03cceb4af2efa3cc76838f6e8b50b63f184c"
+ # fBTC
+ - "0xc96de26018a54d51c097160568752c4e3bd6c364"
+ # pumpBTC
+ - "0x1fcca65fb6ae3b2758b9b2b394cb227eae404e1e"
+ # uniBTC
+ - "0x236f8c0a61da474db21b693fb2ea7aab0c803894"
+
+
+
+
+
diff --git a/config/indexer-config-bsc.yaml b/config/indexer-config-bsc.yaml
index f9d630db9..044a9b572 100644
--- a/config/indexer-config-bsc.yaml
+++ b/config/indexer-config-bsc.yaml
@@ -1,16 +1,63 @@
chain_id: 56
+total_supply_job:
+ token_address:
+ # dodo
+ - "0x7130d2a12b9bcbfae4f2634d864a1ee1ce3ead9c"
+
+
uniswap_v3_job:
# empty means no filter. only work in price job
pool_address:
- '0xe2bb11d6b6a39e55762f5e14d632f0981198b3a7'
- # works in price\token job
- jobs:
- - type: swapsicle # thena`
- factory address: '0x306f06c147f064a010530292a1eb6737c3e378e4'
- position_token_address: '0xa51adb08cbe6ae398046a23bec013979816b77ab'
+# # works in price\token job
+# jobs:
+# - type: swapsicle # thena`
+# factory_address: '0x306f06c147f064a010530292a1eb6737c3e378e4'
+# position_token_address: '0xa51adb08cbe6ae398046a23bec013979816b77ab'
+
+thena_job:
+ gamma_pool_address: '0xfcecab790751c2bf65bcf7bce6ff3555dc12e40b'
+ thena_farming_pool_address: '0xc21b8ed94ad3b1d383ef20605498614e27b222fd'
+ thena_liquidity_pool_address: '0xe2bb11d6b6a39e55762f5e14d632f0981198b3a7'
+
+
+export_staked_transferred_balance_job:
+ TRANSFERRED_CONTRACTS_DICT:
+ pump_btc:
+ - "0x2b4b9047c9fea54705218388bfc7aa7bada4bb5e"
+ bed_rock:
+ - "0x84e5c854a7ff9f49c888d69deca578d406c26800"
+ avalon:
+ - "0x02fedcff97942fe28e8936cdc3d7a480fdd248f0"
+ - "0xc757e47d6bc20feab54e16f2939f51aa4826def7"
+ astherus:
+ - "0x128463a60784c4d3f46c23af3f65ed859ba87974"
+
+
+ token_address:
+ # fbtc
+ - "0xc96de26018a54d51c097160568752c4e3bd6c364"
+ # pump btc
+ - "0xf9c4ff105803a77ecb5dae300871ad76c2794fa4"
+ # uniBTC
+ - "0x6b2a01a5f79deb4c2f3c0eda7b01df456fbd726a"
+
export_tokens_and_transfers_job:
filter_token_address:
- - '0xa51adb08cbe6ae398046a23bec013979816b77ab'
\ No newline at end of file
+ # gamma_pool_address
+ - '0xfcecab790751c2bf65bcf7bce6ff3555dc12e40b'
+ # fbtc
+ - "0xc96de26018a54d51c097160568752c4e3bd6c364"
+ # pump btc
+ - "0xf9c4ff105803a77ecb5dae300871ad76c2794fa4"
+ # uniBTC
+ - "0x6b2a01a5f79deb4c2f3c0eda7b01df456fbd726a"
+
+export_four_meme_job:
+ token_manager2_addresses: "0x5c952063c7fc8610FFDB798152D69F0B9550762b"
+
+export_block_token_price_job:
+ '0xbb4CdB9CBd36B01bD1cBaEBF2De08d9173bc095c': 'WBNB'
diff --git a/config/indexer-config-eth.yaml b/config/indexer-config-eth.yaml
index c4ea60680..c167e34ab 100644
--- a/config/indexer-config-eth.yaml
+++ b/config/indexer-config-eth.yaml
@@ -1,7 +1,12 @@
chain_id: 1
-uniswap_v3_pool_job:
- position_token_address: '0xc36442b4a4522e871399cd717abdd847ab11fe88'
- factory_address: '0x1f98431c8ad98523631ae4a59f267346ea31f984'
+uniswap_v3_job:
+ # empty means no filter. only work in price job
+ pool_address:
+ # works in price\token job
+ jobs:
+ - type: uniswapv3 # uniswapv3
+ factory_address: '0x1f98431c8ad98523631ae4a59f267346ea31f984'
+ position_token_address: '0xc36442b4a4522e871399cd717abdd847ab11fe88'
eigen_layer_job:
STRATEGY_MANAGER:
@@ -29,3 +34,9 @@ pendle_pools_job:
pendle_market_factory_address: '0x27b1dacd74688af24a64bd3c9c1b143118740784'
pendle_market_factory_v3_address: '0x6fcf753f2c67b83f7b09746bbc4fa0047b35d050'
+
+export_block_token_price_job:
+ '0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48': 'USDC'
+ '0x6b175474e89094c44da98b954eedeac495271d0f': 'DAI'
+ '0xdac17f958d2ee523a2206206994597c13d831ec7': 'USDT'
+ '0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2': 'ETH'
\ No newline at end of file
diff --git a/config/indexer-config-mantle.yaml b/config/indexer-config-mantle.yaml
index 78fd820bf..b5dd69050 100644
--- a/config/indexer-config-mantle.yaml
+++ b/config/indexer-config-mantle.yaml
@@ -4,24 +4,30 @@ total_supply_job:
token_address:
# dodo fbtc wbtc
- "0xd39dfbfba9e7eccd813918ffbda10b783ea3b3c6"
+ # teahouse
+ - "0xfc3861c04c5ce0883d9f79308e5a65402141df85"
+ - "0x4ddd37f662871fb49ebbc88a58897961e2c12a60"
uniswap_v3_job:
# empty means no filter. only work in price job
pool_address:
+# teahouse
+# - "0x8b4e2134d1f3da64f9cc044152bf8ee121bb24ce"
+# - "0xcfef46bc49c7502edd960850264b9c2dfe4d75d7"
# works in price\token job
jobs:
- type: uniswapv3 # cle
- factory address: '0xaaa32926fce6be95ea2c51cb4fcb60836d320c42'
+ factory_address: '0xaaa32926fce6be95ea2c51cb4fcb60836d320c42'
position_token_address: '0xaaa78e8c4241990b4ce159e105da08129345946a'
- type: swapsicle # swapsicle
- factory address: '0xc848bc597903b4200b9427a3d7f61e3ff0553913'
+ factory_address: '0xc848bc597903b4200b9427a3d7f61e3ff0553913'
position_token_address: '0x7d24de60a68ae47be4e852cf03dd4d8588b489ec'
- type: agni # agni
- factory address: '0x25780dc8fc3cfbd75f33bfdab65e969b603b2035'
+ factory_address: '0x25780dc8fc3cfbd75f33bfdab65e969b603b2035'
position_token_address: '0x218bf598d1453383e2f4aa7b14ffb9bfb102d637'
- type: agni # fusion
- factory address: '0x530d2766d1988cc1c000c8b7d00334c14b69ad71'
+ factory_address: '0x530d2766d1988cc1c000c8b7d00334c14b69ad71'
position_token_address: '0x5752f085206ab87d8a5ef6166779658add455774'
@@ -38,6 +44,7 @@ export_staked_transferred_balance_job:
- "0x8f778806cbea29f0f64ba6a4b7724bcd5eed543e"
treehouse:
- "0x5e4acca7a9989007cd74ae4ed1b096c000779dcc"
+ # maybe can be removed
dolomite:
- "0xe6ef4f0b2455bab92ce7cc78e35324ab58917de8"
bed_rock:
@@ -51,10 +58,10 @@ export_staked_transferred_balance_job:
- "0xbadc72654ace6ed707fd6833037586f3dc867e0b"
woofi:
- "0x82fde5086784e348aed03eb7b19ded97652db7a8"
- # maybe can be removed
- init_capital:
- - "0x233493e9dc68e548ac27e4933a600a3a4682c0c3"
- - "0x7fa704e73262e5a9f48382087f69c6aba0408eaa"
+ circuit:
+ - "0x5b27576159d201697feb73e7cbe5dafcfdc9b0dc"
+
+
token_address:
# fbtc
@@ -71,6 +78,15 @@ export_staked_transferred_balance_job:
- "0x1d40bafc49c37cda49f2a5427e2fb95e1e3fcf20"
# wecmeth, for woofi
- "0x872b6ff825da431c941d12630754036278ad7049"
+ # circuit
+ - "0x59e641de941cc794cdf6152eda0ef51210373d95"
+
+
+teahouse_job:
+ # created from 70855795, 10-25
+ '0xfc3861c04c5ce0883d9f79308e5a65402141df85': "0x8b4e2134d1f3da64f9cc044152bf8ee121bb24ce"
+ # created from 70852738, 10-25
+ "0x4ddd37f662871fb49ebbc88a58897961e2c12a60": "0xcfef46bc49c7502edd960850264b9c2dfe4d75d7"
export_tokens_and_transfers_job:
@@ -94,4 +110,55 @@ export_tokens_and_transfers_job:
# solvBTC.BBN
- "0x1d40bafc49c37cda49f2a5427e2fb95e1e3fcf20"
# wecmeth, for woofi
- - "0x872b6ff825da431c941d12630754036278ad7049"
\ No newline at end of file
+ - "0x872b6ff825da431c941d12630754036278ad7049"
+ # teahouse
+ - "0xfc3861c04c5ce0883d9f79308e5a65402141df85"
+ - "0x4ddd37f662871fb49ebbc88a58897961e2c12a60"
+
+ # thetanuts
+ - "0xdee7cb1d08ec5e35c4792856f86dd0584db29cfe"
+ # hour_glass
+ - "0x326b1129a3ec2ad5c4016d2bb4b912687890ae6c"
+ # Mitosis
+ - "0x6ff000453a9c14f7d3bf381925c8cde565dbce55"
+ #circuit
+ - "0x59e641de941cc794cdf6152eda0ef51210373d95"
+ # lendle/aurelius
+ - "0x067ddc903148968d49abc3144fd7619820f16949"
+ - "0x334a542b51212b8bcd6f96efd718d55a9b7d1c35"
+ - "0xf91798762cc61971df6df0e15f0904e174387477"
+ - "0xfdd2ebc184b4ff6df14562715452e970c82fe49a"
+ - "0xaa9c890ca3e6b163487de3c11847b50e48230b45"
+ - "0x68a1b2756b41ce837d73a801e18a06e13eac50e1"
+ - "0x76f727f55074931221fc88a188b7915084011dcf"
+ - "0x45ccce9bc8e883ef7805ea73b88d5d528c7cec55"
+ - "0xd739fb7a3b652306d00f92b20439afc637650254"
+ - "0xcbe019c9c44954d388602a99a45a1d7da61321cf"
+ - "0x491f8fbc6b9a5db31c959a702ab6a0dcbea73a48"
+ - "0xc799fe29b67599010a55ec14a8031af2a2521470"
+ - "0x787cb0d29194f0faca73884c383cf4d2501bb874"
+ - "0x833b5c0379a597351c6cd3efe246534bf3ae5f9f"
+ - "0x893da3225a2fcf13cca674d1a1bb5a2ea1f3dd14"
+ - "0x880a809ca9dc0a35f5015d31f1f2273a489695eb"
+ - "0xd632fd1d737c6db356d747d09642bef8ae453f4d"
+ - "0x00dfd5f920ccf08eb0581d605bab413d289c21b4"
+ - "0x4c3c0650ddcb767d71c91fa89ee9e5a2cd335834"
+ - "0x5df9a4be4f9d717b2bfece9ec350dcf4cbcb91d8"
+ - "0xbb406187c01cc1c9eaf9d4b5c924b7fa37aecefd"
+ - "0x7bdb0095429f8eff1efb718aabc912b2489ba5b3"
+ - "0x874712c653aaaa7cfb201317f46e00238c2649bb"
+ - "0x0aa17f21dc8977cdf0141e35543f094fb9edaece"
+ - "0xac3c14071c80819113df501e1ab767be910d5e5a"
+ - "0x44cccbbd7a5a9e2202076ea80c185da0058f1715"
+ - "0x683696523512636b46a826a7e3d1b0658e8e2e1c"
+ - "0xdef3542bb1b2969c1966dd91ebc504f4b37462fe"
+ - "0x2cfa1e69c8a8083aa52cfcf22d8caff7521e1e7e"
+ - "0x08c830f79917205ff1605325fcfbb3efc0c16cb5"
+ - "0x2d55f5558aea4c25fcc1ff78b10265755aff3856"
+ - "0xe71cbaaa6b093fce66211e6f218780685077d8b5"
+ - "0xc3b515bca486520483ef182c3128f72ce270c069"
+ - "0x42f9f9202d5f4412148662cf3bc68d704c8e354f"
+ - "0x0e927aa52a38783c1fd5dfa5c8873cbdbd01d2ca"
+ - "0x18d3e4f9951fedcddd806538857ebed2f5f423b7"
+ - "0xf36afb467d1f05541d998bbbcd5f7167d67bd8fc"
+ - "0xd2ea6612f6c7c11626f7d5d801d08b53bce52511"
\ No newline at end of file
diff --git a/config/indexer-config-monad.yaml b/config/indexer-config-monad.yaml
new file mode 100644
index 000000000..94d07ab9e
--- /dev/null
+++ b/config/indexer-config-monad.yaml
@@ -0,0 +1,4 @@
+chain_id: 10143
+
+export_transactions_and_logs_job:
+ use_receipt_from_blocks_rpc: True
\ No newline at end of file
diff --git a/config/indexer-config.yaml b/config/indexer-config.yaml
index a829315a5..187215c7f 100644
--- a/config/indexer-config.yaml
+++ b/config/indexer-config.yaml
@@ -38,3 +38,7 @@ eigen_layer_job:
address: "0x858646372cc42e1a627fce94aa7a7033e7cf075a"
DELEGATION:
address: "0x39053d51b77dc0d36036fc1fcc8cb819df8ef37a"
+
+export_aave_v2_job:
+ POOL_CONFIGURE: "0x311bb771e4f8952e6da169b425e7e92d6ac45756"
+ POOL_V2: "0x7d2768de32b0b80b7a3454c06bdac94a69ddc7a9"
diff --git a/docker-compose/docker-compose.yaml b/docker-compose/docker-compose.yaml
index 715f755af..1c6007ad3 100644
--- a/docker-compose/docker-compose.yaml
+++ b/docker-compose/docker-compose.yaml
@@ -19,9 +19,12 @@ services:
<<: *common-settings
container_name: indexer
environment:
- - AUTO_UPGRADE_DB=true
+ - INIT_SCHEMA=true
- ENTITY_TYPES=EXPLORER_BASE
- SYNC_RECORDER=pg:main_recorder
+ - RETRY_FROM_RECORD=true
+ ports:
+ - 9200:9200
hemera-uop-indexer:
<<: *common-settings
diff --git a/enumeration/entity_type.py b/enumeration/entity_type.py
deleted file mode 100644
index c510210c5..000000000
--- a/enumeration/entity_type.py
+++ /dev/null
@@ -1,215 +0,0 @@
-from enum import IntFlag
-from functools import reduce
-
-from indexer.domain.block import Block, UpdateBlockInternalCount
-from indexer.domain.block_ts_mapper import BlockTsMapper
-from indexer.domain.contract import Contract
-from indexer.domain.contract_internal_transaction import ContractInternalTransaction
-from indexer.domain.current_token_balance import CurrentTokenBalance
-from indexer.domain.log import Log
-from indexer.domain.token import *
-from indexer.domain.token_balance import TokenBalance
-from indexer.domain.token_id_infos import *
-from indexer.domain.token_transfer import ERC20TokenTransfer, ERC721TokenTransfer, ERC1155TokenTransfer
-from indexer.domain.trace import Trace
-from indexer.domain.transaction import Transaction
-from indexer.modules.custom.address_index.domain import *
-from indexer.modules.custom.address_index.domain.address_contract_operation import AddressContractOperation
-from indexer.modules.custom.address_index.domain.address_internal_transaction import AddressInternalTransaction
-from indexer.modules.custom.address_index.domain.address_nft_1155_holders import AddressNft1155Holder
-from indexer.modules.custom.all_features_value_record import AllFeatureValueRecordBlueChipHolders
-from indexer.modules.custom.blue_chip.domain.feature_blue_chip import BlueChipHolder
-from indexer.modules.custom.deposit_to_l2.domain.address_token_deposit import AddressTokenDeposit
-from indexer.modules.custom.deposit_to_l2.domain.token_deposit_transaction import TokenDepositTransaction
-from indexer.modules.custom.eigen_layer.domains.eigen_layer_domain import EigenLayerAction, EigenLayerAddressCurrent
-from indexer.modules.custom.hemera_ens.ens_domain import (
- ENSAddressChangeD,
- ENSAddressD,
- ENSMiddleD,
- ENSNameRenewD,
- ENSRegisterD,
-)
-from indexer.modules.custom.karak.karak_domain import KarakActionD, KarakAddressCurrentD, KarakVaultTokenD
-from indexer.modules.custom.opensea.domain.address_opensea_transactions import AddressOpenseaTransaction
-from indexer.modules.custom.opensea.domain.opensea_order import OpenseaOrder
-from indexer.modules.custom.uniswap_v2.domain.feature_uniswap_v2 import (
- UniswapV2Erc20CurrentTotalSupply,
- UniswapV2Erc20TotalSupply,
- UniswapV2Pool,
- UniswapV2SwapEvent,
-)
-from indexer.modules.custom.uniswap_v3.domains.feature_uniswap_v3 import (
- UniswapV3Pool,
- UniswapV3PoolCurrentPrice,
- UniswapV3PoolFromSwapEvent,
- UniswapV3PoolFromToken,
- UniswapV3PoolPrice,
- UniswapV3SwapEvent,
- UniswapV3Token,
- UniswapV3TokenCollectFee,
- UniswapV3TokenCurrentStatus,
- UniswapV3TokenDetail,
- UniswapV3TokenUpdateLiquidity,
-)
-from indexer.modules.user_ops.domain.user_operations import UserOperationsResult
-
-
-class EntityType(IntFlag):
- EXPLORER_BASE = 1 << 0
- EXPLORER_TOKEN = 1 << 1
- EXPLORER_TRACE = 1 << 2
-
- BRIDGE = 1 << 3
-
- UNISWAP_V3 = 1 << 4
-
- USER_OPS = 1 << 5
-
- BLUE_CHIP = 1 << 6
-
- ADDRESS_INDEX = 1 << 7
-
- DEPOSIT_TO_L2 = 1 << 8
-
- OPEN_SEA = 1 << 9
-
- ENS = 1 << 10
-
- KARAK = 1 << 11
-
- EIGEN_LAYER = 1 << 13
-
- UNISWAP_V2 = 1 << 14
-
- EXPLORER = EXPLORER_BASE | EXPLORER_TOKEN | EXPLORER_TRACE
-
- @staticmethod
- def combine_all_entity_types():
- return reduce(lambda x, y: x | y, EntityType)
-
- @staticmethod
- def entity_filter_mode(entity_types):
- if entity_types ^ EntityType.BRIDGE == 0:
- return True
- return False
-
-
-ALL_ENTITY_COLLECTIONS = EntityType.__members__.keys()
-DEFAULT_COLLECTION = []
-
-
-def calculate_entity_value(entity_types):
- if entity_types is None or entity_types == "":
- return 0
- entities = EntityType(0)
- for entity_type in [entity.strip().upper() for entity in entity_types.split(",")]:
- if entity_type in EntityType.__members__:
- entities |= EntityType[entity_type]
- else:
- available_types = ",".join(ALL_ENTITY_COLLECTIONS)
- raise ValueError(
- f"{entity_type} is not an available entity type. Supply a comma-separated list of types from {available_types}"
- )
- return entities
-
-
-def generate_output_types(entity_types):
- if entity_types & EntityType.EXPLORER_BASE:
- yield Block
- yield BlockTsMapper
- yield Transaction
- yield Log
-
- if entity_types & EntityType.EXPLORER_TOKEN:
- yield Token
- yield UpdateToken
- yield ERC20TokenTransfer
- yield ERC721TokenTransfer
- yield ERC1155TokenTransfer
- yield TokenBalance
- yield CurrentTokenBalance
-
- yield UpdateERC1155TokenIdDetail
- yield ERC1155TokenIdDetail
- yield UpdateERC721TokenIdDetail
- yield ERC721TokenIdDetail
- yield ERC721TokenIdChange
-
- if entity_types & EntityType.EXPLORER_TRACE:
- yield Trace
- yield Contract
- # yield CoinBalance
- yield ContractInternalTransaction
- yield UpdateBlockInternalCount
-
- if entity_types & EntityType.UNISWAP_V3:
- yield UniswapV3Pool
- yield UniswapV3PoolPrice
- yield UniswapV3PoolCurrentPrice
- yield UniswapV3SwapEvent
- yield UniswapV3PoolFromSwapEvent
- yield UniswapV3Token
- yield UniswapV3TokenDetail
- yield UniswapV3TokenCurrentStatus
- yield UniswapV3PoolFromToken
-
- if entity_types & EntityType.USER_OPS:
- yield UserOperationsResult
-
- if entity_types & EntityType.ADDRESS_INDEX:
- yield Block
- yield Transaction
- yield Log
- yield Token
- yield ERC20TokenTransfer
- yield ERC721TokenTransfer
- yield ERC1155TokenTransfer
- yield AddressNftTransfer
- yield AddressTokenHolder
- yield AddressTokenTransfer
- yield TokenAddressNftInventory
- yield AddressTransaction
- yield AddressNft1155Holder
- yield AddressContractOperation
- yield AddressInternalTransaction
-
- if entity_types & EntityType.BLUE_CHIP:
- yield Block
- yield Transaction
- yield ERC721TokenTransfer
- yield Token
- yield UpdateToken
- yield TokenBalance
- yield CurrentTokenBalance
- yield AllFeatureValueRecordBlueChipHolders
- yield BlueChipHolder
-
- if entity_types & EntityType.DEPOSIT_TO_L2:
- yield TokenDepositTransaction
- yield AddressTokenDeposit
-
- if entity_types & EntityType.ENS:
- yield ENSMiddleD
- yield ENSRegisterD
- yield ENSNameRenewD
- yield ENSAddressChangeD
- yield ENSAddressD
-
- if entity_types & EntityType.OPEN_SEA:
- yield AddressOpenseaTransaction
- yield OpenseaOrder
-
- if entity_types & EntityType.KARAK:
- yield KarakActionD
- yield KarakVaultTokenD
- yield KarakAddressCurrentD
-
- if entity_types & EntityType.EIGEN_LAYER:
- yield EigenLayerAction
- yield EigenLayerAddressCurrent
-
- if entity_types & EntityType.UNISWAP_V2:
- yield UniswapV2Pool
- yield UniswapV2SwapEvent
- yield UniswapV2Erc20TotalSupply
- yield UniswapV2Erc20CurrentTotalSupply
diff --git a/hemera.py b/hemera.py
index 229e34827..4250e3b50 100644
--- a/hemera.py
+++ b/hemera.py
@@ -1,3 +1,3 @@
-from cli import cli
+from hemera.cli import cli
cli()
diff --git a/hemera/README.md b/hemera/README.md
new file mode 100644
index 000000000..9245777a8
--- /dev/null
+++ b/hemera/README.md
@@ -0,0 +1,532 @@
+
Hemera Indexer
+By Hemera Protocol
+
+
+[](https://github.com/HemeraProtocol/hemera-indexer/actions)
+[](https://codecov.io/gh/HemeraProtocol/hemera-indexer)
+[](https://www.apache.org/licenses/LICENSE-2.0.txt)
+[](https://github.com/psf/black)
+[](https://x.com/HemeraProtocol)
+[](https://t.me/+7OHzn5kvcCdjZGNl)
+[](https://discord.com/invite/socialscan)
+
+> [!NOTE]
+> The Hemera Indexer is a work-in-progress project. If you need to use it in production, please consult the Hemera Team first.
+
+## About Hemera Protocol
+
+Hemera Protocol is a decentralized, account-centric programmable indexing network created to function as a public goods data infrastructure, enhancing the capabilities of data networks in web3. This platform supports many web3 applications, from straightforward to intricate, such as blockchain explorers, on-chain asset portfolios, social graphs, zero-knowledge (ZK) coprocessors, community quality auditing, and web3 identities. All these can benefit from or be built on top of Hemera.
+
+## About Hemera Indexer
+
+As the foundation of the Hemera Protocol, the blockchain indexer plays a crucial role. It is the primary component that enables efficient and organized access to blockchain data.
+Initially inspired by open-source projects like Ethereum ETL, we expanded its capabilities as the Ethereum ecosystem evolved, with the emergence of more Layer 2 chains and new ERC standards. Recognizing the need for a robust solution, we decided to develop our own indexer as the first step in building the Hemera Protocol Network.
+As of July 5, 2024, the initial open-source version of the Hemera Indexer offers comprehensive functionality, allowing for the indexing of any EVM-compatible chains and providing all necessary data for a basic blockchain explorer. In the coming weeks, we plan to incorporate additional features from our in-house version into the open-source version.
+
+## Features Offered
+
+#### Exportable Entities
+
+The system can export the following entities:
+
+- Blocks
+- Transactions
+- Logs
+- ERC20 / ERC721 / ERC1155 tokens
+- ERC20 / ERC721 / ERC1155 Token transfers
+- ERC20 / ERC721 / ERC1155 Token balance
+- Contracts
+- Traces / Internal transactions
+- L1 -> L2 Transactions
+- L2 -> L1 Transactions
+- Rollup Batches
+- DA Transactions
+- User Operations
+
+#### Supported Export Formats
+
+The data can be exported into the following formats:
+
+- Postgresql SQL
+- JSONL
+- CSV
+
+#### Output Types and Entity Types Explanation
+
+##### Entity Types
+
+Entity Types are high-level categories that group related data models. They are defined in the `EntityType` enum and can be combined using bitwise operations.
+
+##### Key Points:
+- Specified using the `-E` or `--entity-types` option
+- Examples: EXPLORER_BASE, EXPLORER_TOKEN, EXPLORER_TRACE, etc.
+- Multiple types can be combined using commas
+
+##### Output Types
+
+Output Types correspond to more detailed data models and are typically associated with specific Entity Types.
+
+##### Key Points:
+- Specified using the `-O` or `--output-types` option
+- Examples: Block, Transaction, Log, Token, AddressTokenBalance, etc.
+- Takes precedence over Entity Types if specified
+- Directly corresponds to data class names in the code (Domain)
+
+##### Relationship between Entity Types and Output Types
+
+1. Entity Types are used to generate a set of Output Types:
+ - The `generate_output_types` function maps Entity Types to their corresponding Output Types.
+ - Each Entity Type yields a set of related data classes (Output Types).
+
+2. When specifying Output Types directly:
+ - It overrides the Entity Type selection.
+ - Allows for more granular control over the exported data.
+
+#### Output Types and Data Classes
+
+It's important to note that when using the `--output-types` option, you should specify the names that directly correspond to the data class names in the code. For example:
+
+```
+--output-types Block,Transaction,Log,Token,ERC20TokenTransfer
+```
+
+These names should match exactly with the data class definitions in your codebase. The Output Types are essentially the same as the data class names, allowing for precise selection of the data models you wish to export.
+
+#### Usage Examples
+
+1. Using Entity Types:
+ ```
+ --entity-types EXPLORER_BASE,EXPLORER_TOKEN
+ ```
+ This will generate Output Types including Block, Transaction, Log, Token, ERC20TokenTransfer, etc.
+
+2. Using Output Types:
+ ```
+ --output-types Block,Transaction,Token
+ ```
+ This will only generate the specified Output Types, regardless of Entity Types.
+
+#### Note
+
+When developing or using this system, consider the following:
+- Entity Types provide a broader, category-based selection of data.
+- Output Types offer more precise control over the exact data models to be exported.
+- The choice between using Entity Types or Output Types depends on the specific requirements of the data export task.
+
+
+These names should match exactly with the data class definitions in your codebase. The Output Types are essentially the same as the data class names, allowing for precise selection of the data models you wish to export.
+
+#### Additional features
+
+- Ability to select arbitrary block ranges for more flexible data indexing
+- Option to choose any entities for targeted data extraction
+- Automated reorg detection process to ensure data consistency and integrity
+
+## Contents
+
+
+- [Install and Run Hemera Indexer](#install-and-run-hemera-indexer)
+ - [Prerequisites](#prerequisites)
+ - [Hardware Requirements](#hardware-requirements)
+ - [Run Hemera Indexer](#run-hemera-indexer)
+ - [Run In Docker](#run-in-docker)
+ - [Run From Source Code](#run-from-source-code)
+- [Configure Hemera Indexer](#configure-hemera-indexer)
+ - [Basic Concepts](#basic-concepts)
+ - [Parameters](#parameters)
+ - [Export Result](#export-result)
+
+
+## Install and Run Hemera Indexer
+
+### Prerequisites
+
+- VM Instance (or your local machine)
+- RPC Node of your EVM-compatible blockchain
+
+### Hardware Requirements
+
+We recommend you have this configuration to run Hemera Indexer:
+
+- 4-Core CPU
+- at least 8 GB RAM
+- an SSD drive with enough space left
+
+#### Disk Usage
+
+Based on the 2024 Ethereum, every 10k blocks, which is approximately 1.5 million transactions, consumes disk size as below
+
+- 9GB PostgreSQL database
+- 9.3GB CSV file, if you opt for the CSVV export.
+- 15GB JSON file, if you opt for the JSON export
+ That is about 35GB for every 25k blocks, for every 100k blocks, we recommend at least 150 GB for every 100k eth blocks.
+
+#### Use VM From Cloud Services
+
+If you don't have a VM in place, you can create VMs from cloud providers.
+[Create an AWS EC2 Instance](docs/AWS.md)
+
+#### RPC Usage
+
+The Indexer will consume a large number of RPC requests. Make sure you have a robust and fast RPC endpoint. Most of the time, the RPC endpoint will be the bottleneck for the indexer.
+
+### Clone the Repository
+
+```bash
+git clone https://github.com/HemeraProtocol/hemera-indexer.git
+or
+git clone git@github.com:HemeraProtocol/hemera-indexer.git
+```
+
+### Run Hemera Indexer
+
+We recommend running from docker containers using the provided `docker-compose.yaml` .
+If you prefer running from source code, please check out [Run From Source Code](#run-from-source-code).
+
+### Run In Docker
+
+#### Install Docker & Docker Compose
+
+If you have trouble running the following commands, consider referring to
+the [official docker installation guide](https://docs.docker.com/engine/install/ubuntu/#install-using-the-repository)
+for the latest instructions.
+
+##### Ubuntu and Debian
+```bash
+# Add Docker's official GPG key:
+sudo apt-get update
+sudo apt-get install ca-certificates curl
+sudo install -m 0755 -d /etc/apt/keyrings
+sudo curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc
+sudo chmod a+r /etc/apt/keyrings/docker.asc
+
+# Add the repository to Apt sources:
+echo \
+ "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu \
+ $(. /etc/os-release && echo "$VERSION_CODENAME") stable" | \
+ sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
+sudo apt-get update
+
+# Install docker and docker compose
+sudo apt-get install docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin
+docker compose version
+```
+
+##### RPM-based distros
+```bash
+sudo yum update -y
+sudo yum install docker -y
+sudo service docker start
+sudo systemctl enable docker
+sudo usermod -a -G docker ec2-user
+
+newgrp docker
+docker --version
+docker run hello-world
+
+DOCKER_CONFIG=${DOCKER_CONFIG:-$HOME/.docker}
+mkdir -p $DOCKER_CONFIG/cli-plugins
+curl -SL https://github.com/docker/compose/releases/download/v2.29.6/docker-compose-linux-x86_64 -o $DOCKER_CONFIG/cli-plugins/docker-compose
+chmod +x $DOCKER_CONFIG/cli-plugins/docker-compose
+docker compose version
+```
+
+#### Run the Docker Compose
+
+```bash
+cd hemera-indexer
+cd docker-compose
+```
+
+Alternatively, you might want to edit environment variables in `docker-compose.yaml`. Please check
+out [configuration manual](#configure-hemera-indexer) on how to configure the environment variables.
+
+```bash
+vim docker-compose.yaml
+```
+
+Now, run the following command to spin up the containers.
+
+```bash
+sudo docker compose up
+```
+
+You should be able to see similar logs from your console that indicate Hemera Indexer is running properly.
+
+```
+[+] Running 5/0
+ ✔ Container redis Created 0.0s
+ ✔ Container postgresql Created 0.0s
+ ✔ Container indexer Created 0.0s
+ ✔ Container indexer-trace Created 0.0s
+ ✔ Container hemera-api Created 0.0s
+Attaching to hemera-api, indexer, indexer-trace, postgresql, redis
+```
+
+### Run From Source Code
+
+#### Install developer tools
+
+Skip this step if you already have both installed.
+
+```bash
+sudo apt update
+sudo apt install make
+```
+
+#### Run development
+
+To deploy your project, simply run:
+
+```bash
+make development
+```
+
+This command will:
+1. Create a Python virtual environment
+2. Activate the virtual environment
+3. Install necessary system packages
+4. Install Python dependencies
+
+After running this command, your environment will be set up and ready to use.
+
+Remember to activate the virtual environment (`source ./venv/bin/activate`) when you want to work on your project in the future.
+
+```bash
+source ./venv/bin/activate
+```
+
+
+#### Prepare Your PostgreSQL Instance
+
+Hemera Indexer requires a PostgreSQL database to store all indexed data. You may skip this step if you already have a PostgreSQL set up.
+
+##### Setup PostgreSQL
+
+Follow the instructions about how to set up a PostgreSQL database here: [Setup PostgreSQL on Ubuntu](https://www.cherryservers.com/blog/how-to-install-and-setup-postgresql-server-on-ubuntu-20-04).
+
+##### Configure
+
+Configure the `OUTPUT` or `--output` parameter according to your PostgreSQL role information. Check out [Configure Hemera Indexer](#output-or---output) for details.
+
+E.g. `postgresql://${YOUR_USER}:${YOUR_PASSWORD}@${YOUR_HOST}:5432/${YOUR_DATABASE}`.
+
+#### Run
+
+Please check out [Configure Hemera Indexer](#configure-hemera-indexer) on how to configure the indexer.
+
+```bash
+python hemera.py stream \
+ --provider-uri https://ethereum.publicnode.com \
+ --postgres-url postgresql://devuser:devpassword@localhost:5432/hemera_indexer \
+ --output jsonfile://output/eth_blocks_20000001_20010000/json,csvfile://output/hemera_indexer/csv,postgresql://devuser:devpassword@localhost:5432/eth_blocks_20000001_20010000 \
+ --start-block 20000001 \
+ --end-block 20010000 \
+ # alternatively you can spin up a separate process for traces, as it takes more time
+ # --entity-types trace,contract,coin_balance
+ --entity-types EXPLORER_BASE \
+ --block-batch-size 200 \
+ --batch-size 200 \
+ --max-workers 32
+```
+
+Once you have successfully bootstrapped Hemera Indexer, you should be able to view similar logs as below.
+
+```bash
+2024-06-25 16:37:38,456 - root [INFO] - Using provider https://eth.llamarpc.com
+2024-06-25 16:37:38,456 - root [INFO] - Using debug provider https://eth.llamarpc.com
+2024-06-25 16:37:38,485 - alembic.runtime.migration [INFO] - Context impl PostgresqlImpl.
+2024-06-25 16:37:38,485 - alembic.runtime.migration [INFO] - Will assume transactional DDL.
+2024-06-25 16:37:38,502 - alembic.runtime.migration [INFO] - Context impl PostgresqlImpl.
+2024-06-25 16:37:38,502 - alembic.runtime.migration [INFO] - Will assume transactional DDL.
+2024-06-25 16:37:39,485 - root [INFO] - Current block 20167548, target block 20137200, last synced block 20137199, blocks to sync 1
+2024-06-25 16:37:39,486 - ProgressLogger [INFO] - Started work. Items to process: 1.
+2024-06-25 16:37:40,267 - ProgressLogger [INFO] - 1 items processed. Progress is 100%.
+2024-06-25 16:37:40,268 - ProgressLogger [INFO] - Finished work. Total items processed: 1. Took 0:00:00.782177.
+2024-06-25 16:37:40,283 - exporters.postgres_item_exporter [INFO] - Exporting items to table block_ts_mapper, blocks end, Item count: 2, Took 0:00:00.014799
+2024-06-25 16:37:40,283 - ProgressLogger [INFO] - Started work.
+
+```
+
+### Export Result
+
+Hemera Indexer allows you to export the blockchain data to a database, or to JSON/CSV files.
+
+### Export From PostgreSQL Database
+
+#### Connect to Your Postgresql Instance
+
+Use any PostgreSQL client to connect to your PostgreSQL instance, please make sure the `user`, `password`, and `port` is the same as your configuration.
+
+#### Run In Docker
+
+By default, the PostgreSQL port is open on and mapped to port 5432 of your ec2 instance, you can verify or change it in the PostgreSQL section of the `docker-compose.yaml`.
+
+#### Configure Your Network
+
+If you are using any cloud services, make sure the PostgreSQL port is accessible by updating the network rules.
+
+If you are using AWS and EC2, you can check out [this post](https://www.intelligentdiscovery.io/controls/ec2/aws-ec2-postgresql-open) on how to configure the security group.
+
+### Export To Output Files
+
+#### Run In Docker
+
+By default, the `docker-compose.yaml` mounts the `output` folder to `docker-compose/output`, assuming that you are running from `docker-compose` folder.
+You can find exported results in `docker-compose/output`.
+
+#### Run From Source Code
+
+The database and exported file locations are the same as what you configured in `OUTPUT` or `--output` parameter.
+
+E.g., If you specify the `OUTPUT` or `--output` parameter as below
+
+```bash
+# Command line parameter
+python hemera.py stream \
+ --provider-uri https://ethereum.publicnode.com \
+ --postgres-url postgresql://devuser:devpassword@localhost:5432/hemera_indexer \
+ --output jsonfile://output/eth_blocks_20000001_20010000/json,csvfile://output/hemera_indexer/csv,postgresql://devuser:devpassword@localhost:5432/eth_blocks_20000001_20010000 \
+ --start-block 20000001 \
+ --end-block 20010000 \
+ --entity-types EXPLORER_BASE \
+ --block-batch-size 200 \
+ --batch-size 200 \
+ --max-workers 32
+
+# Or using environment variable
+export OUTPUT = postgresql://user:password@localhost:5432/hemera_indexer,jsonfile://output/json, csvfile://output/csv
+```
+
+You will be able to find those results in the `output` folder of your current location.
+
+## Basic Concepts
+
+Here are some important concepts to understand from Hemera Indexer:
+
+### Dataclass
+Dataclasses are extensively utilized during the indexing process. All outputs from Hemera Indexer are defined as dataclasses. Each indexing job depends on certain dataclasses to run and generates one or more dataclasses as the output.
+
+### Job
+A job is an indexing task that has input (dependency) dataclasses and output dataclasses. You can define one or more jobs to run during the indexing process. The Hemera Indexer will automatically determine the job dependencies and decide the order in which the jobs will run.
+
+### Entity
+An Entity Type is a higher-level aggregation of desired output. For example, EXPLORER_BASE refers to basic explorer data, including blocks, transactions, and logs. By specifying an entity type, you can easily set up for your use case.
+
+## Configure Hemera Indexer
+
+Hemera indexer can read configuration from cmd line arguments or environment variables.
+
+- If you run Hemera Indexer in [Docker](#run-in-docker), then the environment variable is easier to configure.
+- If you prefer running from [Source Code](#run-from-source-code), command line arguments are more intuitive.
+
+Run with `python hemera.py stream --help` to get the latest instructions for arguments.
+
+### Parameters
+
+- If the name of the parameter is in `UPPER_CASE` then it's an environment variable.
+- If the name of the parameter starts with `--` then it's a command line argument.
+
+Avoid specifying the same parameter from both the environment variable and the command line argument.
+
+#### `PROVIDER_URI` or `--provider-uri` or `-p`
+
+[**Default**: `https://ethereum-rpc.publicnode.com`]
+The URI of the web3 rpc provider, e.g. `file://$HOME/Library/Ethereum/geth.ipc` or `https://ethereum-rpc.publicnode.com`.
+
+#### `DEBUG_PROVIDER_URI` or `--debug-provider-uri` or `-d`
+
+[**Default**: `https://ethereum-rpc.publicnode.com`]
+The URI of the web3 debug rpc provider, e.g. `file://$HOME/Library/Ethereum/geth.ipc` or `https://ethereum-rpc.publicnode.com`.
+
+#### `POSTGRES_URL` or `--postgres-url` or `-pg`
+
+[**Required**]
+The PostgreSQL connection URL that the Hemera Indexer used to maintain its state. e.g. `postgresql://user:password@127.0.0.1:5432/postgres`.
+
+#### `OUTPUT` or `--output` or `-o`
+
+[**Required**]
+You may specify the output parameter so Hemera Indexer will export the data to CSV or JSON files. If not specified the data will be printed to the console.
+
+If you have multiple outputs, use "," to concat the files.
+The file location will be relative to your current location if you run from source code, or the `output` folder as configured in `docker-compose.yaml`.
+
+e.g.
+
+- `postgresql://user:password@localhost:5432/hemera_indexer`: Output will be exported to your postgres.
+- `jsonfile://output/json`: Json files will be exported to folder `output/json`
+- `csvfile://output/csv`: Csv files will be exported to folder `output/csv`
+- `console,jsonfile://output/json,csvfile://output/csv`: Multiple destinations are supported.
+
+#### `ENTITY_TYPES` or `--entity-types` or `-E`
+
+[**Default**: `EXPLORER_BASE`]
+The list of entity types to export. e.g. `EXPLORER_BASE`, `EXPLORER_TOKEN`, `EXPLORER_TRACE`.
+
+#### `OUTPUT_TYPES` or `--output-types` or `-O`
+
+The list of output types to export, corresponding to more detailed data models. Specifying this option will prioritize these settings over the entity types specified in -E. Available options include: Block, Transaction, Log, Token, AddressTokenBalance, etc.
+
+You may spawn up multiple Hemera Indexer processes, each of them specifying different output types to accelerate the indexing process. For example, indexing `trace` data may take much longer than other entities, you may want to run a separate process to index `trace` data. Checkout `docker-compose/docker-compose.yaml` for examples.
+
+#### `DB_VERSION` or `--db-version` or `-v`
+
+[**Default**: `head`]
+The database version to initialize the database. Using the Alembic script's revision ID to specify a version.
+e.g. `head`, indicates the latest version.
+Or `base`, indicates the empty database without any table.
+Default value: `head`
+
+#### `START_BLOCK` or `--start-block` or `-s`
+
+The block number to start from, e.g. `0`, `1000`, etc.
+If you don't specify this, Hemera Indexer will read the last synced block from the PostgreSQL database and resume from it.
+
+#### `END_BLOCK` or `--end-block` or `-e`
+
+The block number that ends at, e.g. `10000`, `20000`, etc.
+
+#### `BLOCKS_PER_FILE` or `--blocks-per-file`
+
+[**Default**: `1000`]
+The number of block records to write to each file.
+
+#### `PERIOD_SECONDS` or `--period-seconds`
+
+[**Default**: `10`]
+Seconds to sleep between each sync with the latest blockchain state.
+
+#### `BATCH_SIZE` or `--batch-size` or `-b`
+
+[**Default**: `10`]
+The number of non-debug rpc requests to batch in a single request.
+
+#### `DEBUG_BATCH_SIZE` or `--debug-batch-size`
+
+[**Default**: `1`]
+The number of debug rpc to batch in a single request.
+
+#### `BLOCK_BATCH_SIZE` or `--block-batch-size` or `-B`
+
+[**Default**: `1`]
+The number of blocks to batch in a single sync round.
+
+#### `MAX_WORKERS` or `--max-workers` or `-w`
+
+[**Default**: `5`]
+The number of workers, e.g. `4`, `5`, etc.
+
+#### `LOG_FILE` or `--log-file`
+
+The log file to use. e.g. `path/to/logfile.log`.
+
+#### `SYNC_RECORDER` or `--sync-recorder`
+
+[**Default**: `file_sync_record`]
+How to store the sync record data. e.g. `pg_base`. This means sync record data will store in pg as `base` be key. or you can use `file_base` which means sync record data will store in file as `base` be file name',
+
+#### `CACHE_SERVICE` or `-cache`
+
+[**Default**: `memory`]
+Use an alternative cache solution to store middleware data generated by the Indexer. e.g `redis`, cache data will store in redis, redis://localhost:6379, or memory which means cache data will store in memory.
\ No newline at end of file
diff --git a/hemera/__init__.py b/hemera/__init__.py
new file mode 100644
index 000000000..6e45a91db
--- /dev/null
+++ b/hemera/__init__.py
@@ -0,0 +1 @@
+__version__ = "1.0.0a1"
diff --git a/api/__init__.py b/hemera/api/__init__.py
similarity index 100%
rename from api/__init__.py
rename to hemera/api/__init__.py
diff --git a/api/app/__init__.py b/hemera/api/app/__init__.py
similarity index 100%
rename from api/app/__init__.py
rename to hemera/api/app/__init__.py
diff --git a/api/app/address/__init__.py b/hemera/api/app/address/__init__.py
similarity index 100%
rename from api/app/address/__init__.py
rename to hemera/api/app/address/__init__.py
diff --git a/api/app/address/features.py b/hemera/api/app/address/features.py
similarity index 100%
rename from api/app/address/features.py
rename to hemera/api/app/address/features.py
diff --git a/api/app/address/models.py b/hemera/api/app/address/models.py
similarity index 97%
rename from api/app/address/models.py
rename to hemera/api/app/address/models.py
index b5b2d64e7..bf8d5967e 100644
--- a/api/app/address/models.py
+++ b/hemera/api/app/address/models.py
@@ -1,7 +1,7 @@
from sqlalchemy import Column, Integer
from sqlalchemy.dialects.postgresql import BOOLEAN, BYTEA, INTEGER, NUMERIC, TEXT, TIMESTAMP
-from common.models import HemeraModel
+from hemera.common.models import HemeraModel
class AddressBaseProfile(HemeraModel):
diff --git a/api/app/address/routes.py b/hemera/api/app/address/routes.py
similarity index 94%
rename from api/app/address/routes.py
rename to hemera/api/app/address/routes.py
index ca38604da..9f79a1700 100644
--- a/api/app/address/routes.py
+++ b/hemera/api/app/address/routes.py
@@ -3,9 +3,9 @@
from flask import request
from flask_restx import Resource
-from api.app.address import address_features_namespace
-from api.app.address.features import feature_registry
-from api.app.main import app
+from hemera.api.app.address import address_features_namespace
+from hemera.api.app.address.features import feature_registry
+from hemera.api.app.main import app
PAGE_SIZE = 10
MAX_TRANSACTION = 500000
diff --git a/hemera/api/app/api.py b/hemera/api/app/api.py
new file mode 100644
index 000000000..4b19074eb
--- /dev/null
+++ b/hemera/api/app/api.py
@@ -0,0 +1,39 @@
+#!/usr/bin/python3
+# -*- coding: utf-8 -*-
+
+from flask_restx import Api
+
+from hemera.api.app.address.routes import address_features_namespace
+from hemera.api.app.contract.routes import contract_namespace
+from hemera.api.app.explorer.routes import explorer_namespace
+from hemera.api.app.l2_explorer.routes import l2_explorer_namespace
+from hemera.api.app.user_operation.routes import user_operation_namespace
+from hemera_udf.address_index.endpoint.routes import address_profile_namespace
+
+# keep the `*`, make sure registry processed
+from hemera_udf.deposit_to_l2.endpoint.routes import *
+from hemera_udf.eigen_layer.endpoint.routes import *
+from hemera_udf.hemera_ens.endpoint.routes import *
+from hemera_udf.init_capital.endpoints.routes import *
+from hemera_udf.merchant_moe.endpoints.routes import merchant_moe_namespace
+from hemera_udf.opensea.endpoint.routes import *
+from hemera_udf.staking_fbtc.endpoints.routes import staking_namespace
+from hemera_udf.uniswap_v3.endpoints.routes import *
+
+api = Api()
+
+api.add_namespace(explorer_namespace)
+api.add_namespace(opensea_namespace)
+api.add_namespace(contract_namespace)
+api.add_namespace(uniswap_v3_namespace)
+api.add_namespace(token_deposit_namespace)
+api.add_namespace(user_operation_namespace)
+api.add_namespace(staking_namespace)
+api.add_namespace(merchant_moe_namespace)
+
+api.add_namespace(l2_explorer_namespace)
+api.add_namespace(af_ens_namespace)
+api.add_namespace(address_profile_namespace)
+
+api.add_namespace(address_features_namespace)
+api.add_namespace(init_capital_namespace)
diff --git a/api/app/cache.py b/hemera/api/app/cache.py
similarity index 95%
rename from api/app/cache.py
rename to hemera/api/app/cache.py
index f1cac5cd4..8dea03404 100644
--- a/api/app/cache.py
+++ b/hemera/api/app/cache.py
@@ -4,7 +4,7 @@
import redis
from flask_caching import Cache
-from common.utils.config import get_config
+from hemera.common.utils.config import get_config
app_config = get_config()
# Use cache
diff --git a/api/app/config.py b/hemera/api/app/config.py
similarity index 100%
rename from api/app/config.py
rename to hemera/api/app/config.py
diff --git a/api/app/contract/__init__.py b/hemera/api/app/contract/__init__.py
similarity index 100%
rename from api/app/contract/__init__.py
rename to hemera/api/app/contract/__init__.py
diff --git a/api/app/contract/contract_verify.py b/hemera/api/app/contract/contract_verify.py
similarity index 96%
rename from api/app/contract/contract_verify.py
rename to hemera/api/app/contract/contract_verify.py
index 2357743e9..b1223cc20 100644
--- a/api/app/contract/contract_verify.py
+++ b/hemera/api/app/contract/contract_verify.py
@@ -3,12 +3,12 @@
import requests
-from api.app.utils.web3_utils import get_code, get_storage_at, w3
-from common.models import db
-from common.models.contracts import Contracts
-from common.utils.config import get_config
-from common.utils.exception_control import APIError
-from common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes
+from hemera.api.app.utils.web3_utils import get_code, get_storage_at, w3
+from hemera.common.models import db
+from hemera.common.models.contracts import Contracts
+from hemera.common.utils.config import get_config
+from hemera.common.utils.exception_control import APIError
+from hemera.common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes
config = get_config()
diff --git a/api/app/contract/routes.py b/hemera/api/app/contract/routes.py
similarity index 89%
rename from api/app/contract/routes.py
rename to hemera/api/app/contract/routes.py
index bef5f56c9..412117574 100644
--- a/api/app/contract/routes.py
+++ b/hemera/api/app/contract/routes.py
@@ -1,9 +1,9 @@
import flask
from flask_restx import Resource
-from api.app.cache import cache
-from api.app.contract import contract_namespace
-from api.app.contract.contract_verify import (
+from hemera.api.app.cache import cache
+from hemera.api.app.contract import contract_namespace
+from hemera.api.app.contract.contract_verify import (
check_contract_verification_status,
command_normal_contract_data,
get_abi_by_chain_id_address,
@@ -19,18 +19,19 @@
send_sync_verification_request,
validate_input,
)
-from api.app.limiter import limiter
-from common.models import db as postgres_db
-from common.models.contracts import Contracts
-from common.utils.exception_control import APIError
-from common.utils.format_utils import as_dict, hex_str_to_bytes
-from common.utils.web3_utils import ZERO_ADDRESS
+from hemera.api.app.limiter import limiter
+from hemera.common.models import db as postgres_db
+from hemera.common.models.contracts import Contracts
+from hemera.common.utils.exception_control import APIError
+from hemera.common.utils.format_utils import as_dict, hex_str_to_bytes
+from hemera.common.utils.web3_utils import ZERO_ADDRESS, is_eth_address
@contract_namespace.route("/v1/explorer/verify_contract/verify")
@contract_namespace.route("/v2/explorer/verify_contract/verify")
class ExplorerVerifyContract(Resource):
def post(_):
+ wallet_address = "0x8f72840be9414436da8a76ff08a1f6924f0efb83"
request_form = flask.request.form
address = request_form.get("address", "").lower()
compiler_type = request_form.get("compiler_type")
@@ -49,12 +50,11 @@ def post(_):
contracts = get_contract_by_address(address)
check_contract_verification_status(contracts)
-
creation_code, deployed_code = get_creation_or_deployed_code(contracts)
payload = {
"address": address,
- "wallet_address": ZERO_ADDRESS,
+ "wallet_address": wallet_address,
"compiler_type": compiler_type,
"compiler_version": compiler_version,
"evm_version": evm_version,
@@ -171,7 +171,7 @@ class ExplorerVerifyContract(Resource):
def post(self):
request_body = flask.request.json
proxy_contract_address = request_body.get("proxy_contract_address")
- if not proxy_contract_address:
+ if not proxy_contract_address or not is_eth_address(proxy_contract_address):
raise APIError("Please sent correct proxy contract address")
implementation_address = get_implementation_contract(proxy_contract_address)
@@ -204,11 +204,17 @@ def post(self):
proxy_contract_address = request_body.get("proxy_contract_address")
implementation_contract_address = request_body.get("implementation_contract_address")
- if not proxy_contract_address or not implementation_contract_address:
+ if (
+ not proxy_contract_address
+ or not implementation_contract_address
+ or not is_eth_address(proxy_contract_address)
+ or not is_eth_address(implementation_contract_address)
+ ):
raise APIError("Not such proxy contract address", code=400)
- contract = Contracts.query.filter(Contracts.address == proxy_contract_address.lower()).first()
- contract.verified_implementation_contract = implementation_contract_address.lower()
+ contract = Contracts.query.filter(Contracts.address == hex_str_to_bytes(proxy_contract_address.lower())).first()
+ contract.verified_implementation_contract = hex_str_to_bytes(implementation_contract_address.lower())
+ contract.is_verified = True
postgres_db.session.add(contract)
postgres_db.session.commit()
@@ -226,6 +232,7 @@ def get(self):
@limiter.limit("10 per minute")
def post(self):
+ wallet_address = "0x8f72840be9414436da8a76ff08a1f6924f0efb83"
request_form = flask.request.form
action = request_form.get("action")
module = request_form.get("module")
@@ -257,7 +264,6 @@ def post(self):
contracts = get_contract_by_address(address)
if contracts.is_verified:
return {"message": "This contract is verified", "status": "0"}, 200
-
creation_code, deployed_code = get_creation_or_deployed_code(contracts)
payload = {
"address": address,
@@ -269,6 +275,7 @@ def post(self):
"optimization_runs": optimization_runs,
"input_str": input_str,
"constructor_arguments": constructor_arguments,
+ "wallet_address": wallet_address,
"creation_code": creation_code,
"deployed_code": deployed_code,
}
diff --git a/api/app/db_service/__init__.py b/hemera/api/app/db_service/__init__.py
similarity index 100%
rename from api/app/db_service/__init__.py
rename to hemera/api/app/db_service/__init__.py
diff --git a/api/app/db_service/af_token_deposit.py b/hemera/api/app/db_service/af_token_deposit.py
similarity index 86%
rename from api/app/db_service/af_token_deposit.py
rename to hemera/api/app/db_service/af_token_deposit.py
index 2774e1df1..8ed1d2eb8 100644
--- a/api/app/db_service/af_token_deposit.py
+++ b/hemera/api/app/db_service/af_token_deposit.py
@@ -1,8 +1,8 @@
-from common.models import db
-from common.utils.db_utils import build_entities
-from common.utils.format_utils import hex_str_to_bytes
-from indexer.modules.custom.deposit_to_l2.models.af_token_deposits__transactions import AFTokenDepositsTransactions
-from indexer.modules.custom.deposit_to_l2.models.af_token_deposits_current import AFTokenDepositsCurrent
+from hemera.common.models import db
+from hemera.common.utils.db_utils import build_entities
+from hemera.common.utils.format_utils import hex_str_to_bytes
+from hemera_udf.deposit_to_l2.models.af_token_deposits__transactions import AFTokenDepositsTransactions
+from hemera_udf.deposit_to_l2.models.af_token_deposits_current import AFTokenDepositsCurrent
def get_transactions_by_condition(filter_condition=None, columns="*", limit=None, offset=None):
diff --git a/api/app/db_service/blocks.py b/hemera/api/app/db_service/blocks.py
similarity index 86%
rename from api/app/db_service/blocks.py
rename to hemera/api/app/db_service/blocks.py
index 0cf205bd2..48d09036f 100644
--- a/api/app/db_service/blocks.py
+++ b/hemera/api/app/db_service/blocks.py
@@ -1,7 +1,7 @@
-from common.models import db
-from common.models.blocks import Blocks
-from common.utils.db_utils import build_entities
-from common.utils.format_utils import hex_str_to_bytes
+from hemera.common.models import db
+from hemera.common.models.blocks import Blocks
+from hemera.common.utils.db_utils import build_entities
+from hemera.common.utils.format_utils import hex_str_to_bytes
def get_last_block(columns="*"):
diff --git a/api/app/db_service/contract_internal_transactions.py b/hemera/api/app/db_service/contract_internal_transactions.py
similarity index 85%
rename from api/app/db_service/contract_internal_transactions.py
rename to hemera/api/app/db_service/contract_internal_transactions.py
index 68e0a0907..2bdb2c16c 100644
--- a/api/app/db_service/contract_internal_transactions.py
+++ b/hemera/api/app/db_service/contract_internal_transactions.py
@@ -1,7 +1,7 @@
-from common.models import db
-from common.models.contract_internal_transactions import ContractInternalTransactions
-from common.utils.db_utils import build_entities
-from common.utils.format_utils import hex_str_to_bytes
+from hemera.common.models import db
+from hemera.common.models.contract_internal_transactions import ContractInternalTransactions
+from hemera.common.utils.db_utils import build_entities
+from hemera.common.utils.format_utils import hex_str_to_bytes
def get_internal_transactions_by_transaction_hash(transaction_hash, columns="*"):
diff --git a/api/app/db_service/contracts.py b/hemera/api/app/db_service/contracts.py
similarity index 74%
rename from api/app/db_service/contracts.py
rename to hemera/api/app/db_service/contracts.py
index c691a7c3d..666efd669 100644
--- a/api/app/db_service/contracts.py
+++ b/hemera/api/app/db_service/contracts.py
@@ -1,7 +1,7 @@
-from common.models import db
-from common.models.contracts import Contracts
-from common.utils.db_utils import build_entities
-from common.utils.format_utils import hex_str_to_bytes
+from hemera.common.models import db
+from hemera.common.models.contracts import Contracts
+from hemera.common.utils.db_utils import build_entities
+from hemera.common.utils.format_utils import hex_str_to_bytes
def get_contract_by_address(address: str, columns="*"):
diff --git a/api/app/db_service/daily_transactions_aggregates.py b/hemera/api/app/db_service/daily_transactions_aggregates.py
similarity index 65%
rename from api/app/db_service/daily_transactions_aggregates.py
rename to hemera/api/app/db_service/daily_transactions_aggregates.py
index d47dcab46..ff2540d74 100644
--- a/api/app/db_service/daily_transactions_aggregates.py
+++ b/hemera/api/app/db_service/daily_transactions_aggregates.py
@@ -1,6 +1,6 @@
-from common.models import db
-from common.utils.db_utils import build_entities
-from indexer.modules.custom.stats.models.daily_transactions_stats import DailyTransactionsStats
+from hemera.common.models import db
+from hemera.common.utils.db_utils import build_entities
+from hemera_udf.stats.models.daily_transactions_stats import DailyTransactionsStats
def get_daily_transactions_cnt(columns="*", limit=10):
diff --git a/api/app/db_service/logs.py b/hemera/api/app/db_service/logs.py
similarity index 82%
rename from api/app/db_service/logs.py
rename to hemera/api/app/db_service/logs.py
index 9fec3976c..368d525ba 100644
--- a/api/app/db_service/logs.py
+++ b/hemera/api/app/db_service/logs.py
@@ -1,7 +1,7 @@
-from common.models import db
-from common.models.logs import Logs
-from common.models.transactions import Transactions
-from common.utils.format_utils import hex_str_to_bytes
+from hemera.common.models import db
+from hemera.common.models.logs import Logs
+from hemera.common.models.transactions import Transactions
+from hemera.common.utils.format_utils import hex_str_to_bytes
def get_logs_with_input_by_hash(hash, columns="*"):
diff --git a/api/app/db_service/tokens.py b/hemera/api/app/db_service/tokens.py
similarity index 91%
rename from api/app/db_service/tokens.py
rename to hemera/api/app/db_service/tokens.py
index 4fd627aee..9cb3974b8 100644
--- a/api/app/db_service/tokens.py
+++ b/hemera/api/app/db_service/tokens.py
@@ -1,18 +1,18 @@
from sqlalchemy import and_, func, select
-from api.app.db_service.wallet_addresses import get_token_txn_cnt_by_address
-from api.app.utils.fill_info import fill_address_display_to_transactions, fill_is_contract_to_transactions
-from common.models import db
-from common.models.erc20_token_transfers import ERC20TokenTransfers
-from common.models.erc721_token_transfers import ERC721TokenTransfers
-from common.models.erc1155_token_transfers import ERC1155TokenTransfers
-from common.models.scheduled_metadata import ScheduledMetadata
-from common.models.token_prices import TokenPrices
-from common.models.tokens import Tokens
-from common.utils.config import get_config
-from common.utils.db_utils import build_entities, get_total_row_count
-from common.utils.exception_control import APIError
-from common.utils.format_utils import as_dict, hex_str_to_bytes
+from hemera.api.app.db_service.wallet_addresses import get_token_txn_cnt_by_address
+from hemera.api.app.utils.fill_info import fill_address_display_to_transactions, fill_is_contract_to_transactions
+from hemera.common.models import db
+from hemera.common.models.erc20_token_transfers import ERC20TokenTransfers
+from hemera.common.models.erc721_token_transfers import ERC721TokenTransfers
+from hemera.common.models.erc1155_token_transfers import ERC1155TokenTransfers
+from hemera.common.models.scheduled_metadata import ScheduledMetadata
+from hemera.common.models.token_prices import TokenPrices
+from hemera.common.models.tokens import Tokens
+from hemera.common.utils.config import get_config
+from hemera.common.utils.db_utils import build_entities, get_total_row_count
+from hemera.common.utils.exception_control import APIError
+from hemera.common.utils.format_utils import as_dict, hex_str_to_bytes
app_config = get_config()
diff --git a/api/app/db_service/traces.py b/hemera/api/app/db_service/traces.py
similarity index 76%
rename from api/app/db_service/traces.py
rename to hemera/api/app/db_service/traces.py
index d36d603c4..421ff9cbf 100644
--- a/api/app/db_service/traces.py
+++ b/hemera/api/app/db_service/traces.py
@@ -1,7 +1,7 @@
-from common.models import db
-from common.models.traces import Traces
-from common.utils.db_utils import build_entities
-from common.utils.format_utils import hex_str_to_bytes
+from hemera.common.models import db
+from hemera.common.models.traces import Traces
+from hemera.common.utils.db_utils import build_entities
+from hemera.common.utils.format_utils import hex_str_to_bytes
def get_traces_by_transaction_hash(transaction_hash, columns="*"):
diff --git a/api/app/db_service/transactions.py b/hemera/api/app/db_service/transactions.py
similarity index 86%
rename from api/app/db_service/transactions.py
rename to hemera/api/app/db_service/transactions.py
index 44c3aa1c8..72160780e 100644
--- a/api/app/db_service/transactions.py
+++ b/hemera/api/app/db_service/transactions.py
@@ -2,15 +2,15 @@
from sqlalchemy import and_, func, or_
-from api.app.cache import cache
-from api.app.db_service.wallet_addresses import get_txn_cnt_by_address
-from common.models import db
-from common.models.scheduled_metadata import ScheduledMetadata
-from common.models.transactions import Transactions
-from common.utils.db_utils import build_entities
-from common.utils.format_utils import hex_str_to_bytes
-from indexer.modules.custom.address_index.models.address_transactions import AddressTransactions
-from indexer.modules.custom.stats.models.daily_transactions_stats import DailyTransactionsStats
+from hemera.api.app.cache import cache
+from hemera.api.app.db_service.wallet_addresses import get_txn_cnt_by_address
+from hemera.common.models import db
+from hemera.common.models.scheduled_metadata import ScheduledMetadata
+from hemera.common.models.transactions import Transactions
+from hemera.common.utils.db_utils import build_entities
+from hemera.common.utils.format_utils import hex_str_to_bytes
+from hemera_udf.address_index.models.address_transactions import AddressTransactions
+from hemera_udf.stats.models.daily_transactions_stats import DailyTransactionsStats
MAX_ADDRESS_TXN_COUNT = 100000
@@ -139,8 +139,9 @@ def get_total_txn_count():
latest_10_min_txn_cnt = Transactions.query.filter(Transactions.block_timestamp >= ten_minutes_ago).count()
avg_txn_per_minute = latest_10_min_txn_cnt / 10
+ block_date_datetime = datetime.combine(block_date, datetime.min.time())
- minutes_since_last_block = int((current_time - block_date).total_seconds() / 60)
+ minutes_since_last_block = int((current_time - block_date_datetime).total_seconds() / 60)
estimated_txn = int(avg_txn_per_minute * minutes_since_last_block)
diff --git a/api/app/db_service/wallet_addresses.py b/hemera/api/app/db_service/wallet_addresses.py
similarity index 89%
rename from api/app/db_service/wallet_addresses.py
rename to hemera/api/app/db_service/wallet_addresses.py
index db4933919..be858f56e 100644
--- a/api/app/db_service/wallet_addresses.py
+++ b/hemera/api/app/db_service/wallet_addresses.py
@@ -1,12 +1,12 @@
-from api.app.cache import cache
-from api.app.contract.contract_verify import get_contract_names
-from api.app.ens.ens import ENSClient
-from common.models import db
-from common.models.contracts import Contracts
-from common.models.tokens import Tokens
-from common.utils.config import get_config
-from common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes
-from indexer.modules.custom.address_index.models.address_index_stats import AddressIndexStats
+from hemera.api.app.cache import cache
+from hemera.api.app.contract.contract_verify import get_contract_names
+from hemera.api.app.ens.ens import ENSClient
+from hemera.common.models import db
+from hemera.common.models.contracts import Contracts
+from hemera.common.models.tokens import Tokens
+from hemera.common.utils.config import get_config
+from hemera.common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes
+from hemera_udf.address_index.models.address_index_stats import AddressIndexStats
app_config = get_config()
diff --git a/api/app/ens/__init__.py b/hemera/api/app/ens/__init__.py
similarity index 100%
rename from api/app/ens/__init__.py
rename to hemera/api/app/ens/__init__.py
diff --git a/api/app/ens/ens.py b/hemera/api/app/ens/ens.py
similarity index 100%
rename from api/app/ens/ens.py
rename to hemera/api/app/ens/ens.py
diff --git a/api/app/explorer/__init__.py b/hemera/api/app/explorer/__init__.py
similarity index 100%
rename from api/app/explorer/__init__.py
rename to hemera/api/app/explorer/__init__.py
diff --git a/api/app/explorer/routes.py b/hemera/api/app/explorer/routes.py
similarity index 96%
rename from api/app/explorer/routes.py
rename to hemera/api/app/explorer/routes.py
index 01c624afd..5dd8b83d2 100644
--- a/api/app/explorer/routes.py
+++ b/hemera/api/app/explorer/routes.py
@@ -15,18 +15,28 @@
from sqlalchemy.sql import and_, func, nullslast, or_
from sqlalchemy.sql.sqltypes import Numeric
-from api.app.cache import cache
-from api.app.contract.contract_verify import get_abis_for_method, get_sha256_hash, get_similar_addresses
-from api.app.db_service.blocks import get_block_by_hash, get_block_by_number, get_blocks_by_condition, get_last_block
-from api.app.db_service.contract_internal_transactions import (
+from hemera.api.app.cache import cache
+from hemera.api.app.contract.contract_verify import (
+ get_abis_for_method,
+ get_implementation_contract,
+ get_sha256_hash,
+ get_similar_addresses,
+)
+from hemera.api.app.db_service.blocks import (
+ get_block_by_hash,
+ get_block_by_number,
+ get_blocks_by_condition,
+ get_last_block,
+)
+from hemera.api.app.db_service.contract_internal_transactions import (
get_internal_transactions_by_condition,
get_internal_transactions_by_transaction_hash,
get_internal_transactions_cnt_by_condition,
)
-from api.app.db_service.contracts import get_contract_by_address
-from api.app.db_service.daily_transactions_aggregates import get_daily_transactions_cnt
-from api.app.db_service.logs import get_logs_with_input_by_address, get_logs_with_input_by_hash
-from api.app.db_service.tokens import (
+from hemera.api.app.db_service.contracts import get_contract_by_address
+from hemera.api.app.db_service.daily_transactions_aggregates import get_daily_transactions_cnt
+from hemera.api.app.db_service.logs import get_logs_with_input_by_address, get_logs_with_input_by_hash
+from hemera.api.app.db_service.tokens import (
get_address_token_transfer_cnt,
get_raw_token_transfers,
get_token_address_token_transfer_cnt,
@@ -39,8 +49,8 @@
parse_token_transfers,
type_to_token_transfer_table,
)
-from api.app.db_service.traces import get_traces_by_condition, get_traces_by_transaction_hash
-from api.app.db_service.transactions import (
+from hemera.api.app.db_service.traces import get_traces_by_condition, get_traces_by_transaction_hash
+from hemera.api.app.db_service.transactions import (
get_address_transaction_cnt,
get_address_transaction_cnt_v2,
get_total_txn_count,
@@ -51,52 +61,52 @@
get_transactions_by_to_address,
get_transactions_cnt_by_condition,
)
-from api.app.db_service.wallet_addresses import get_address_display_mapping, get_ens_mapping
-from api.app.explorer import explorer_namespace
-from api.app.utils.fill_info import (
+from hemera.api.app.db_service.wallet_addresses import get_address_display_mapping, get_ens_mapping
+from hemera.api.app.explorer import explorer_namespace
+from hemera.api.app.utils.fill_info import (
fill_address_display_to_transactions,
fill_is_contract_to_transactions,
process_token_transfer,
)
-from api.app.utils.format_utils import format_coin_value_with_unit, format_dollar_value
-from api.app.utils.parse_utils import parse_log_with_transaction_input_list, parse_transactions
-from api.app.utils.token_utils import get_token_price
-from api.app.utils.web3_utils import get_balance, get_code, get_gas_price
-from common.models import db
-from common.models.blocks import Blocks
-from common.models.contract_internal_transactions import ContractInternalTransactions
-from common.models.contracts import Contracts
-from common.models.current_token_balances import CurrentTokenBalances
-from common.models.erc20_token_transfers import ERC20TokenTransfers
-from common.models.erc721_token_transfers import ERC721TokenTransfers
-from common.models.erc1155_token_transfers import ERC1155TokenTransfers
-from common.models.token_balances import AddressTokenBalances
-from common.models.tokens import Tokens
-from common.models.traces import Traces
-from common.models.transactions import Transactions
-from common.utils.abi_code_utils import Function, decode_function, decode_log_data
-from common.utils.config import get_config
-from common.utils.db_utils import get_total_row_count
-from common.utils.exception_control import APIError
-from common.utils.format_utils import as_dict, bytes_to_hex_str, format_to_dict, hex_str_to_bytes, row_to_dict
-from common.utils.web3_utils import (
+from hemera.api.app.utils.format_utils import format_coin_value_with_unit, format_dollar_value
+from hemera.api.app.utils.parse_utils import parse_log_with_transaction_input_list, parse_transactions
+from hemera.api.app.utils.token_utils import get_token_price
+from hemera.api.app.utils.web3_utils import get_balance, get_code, get_gas_price
+from hemera.common.models import db
+from hemera.common.models.blocks import Blocks
+from hemera.common.models.contract_internal_transactions import ContractInternalTransactions
+from hemera.common.models.contracts import Contracts
+from hemera.common.models.current_token_balances import CurrentTokenBalances
+from hemera.common.models.erc20_token_transfers import ERC20TokenTransfers
+from hemera.common.models.erc721_token_transfers import ERC721TokenTransfers
+from hemera.common.models.erc1155_token_transfers import ERC1155TokenTransfers
+from hemera.common.models.token_balances import AddressTokenBalances
+from hemera.common.models.tokens import Tokens
+from hemera.common.models.traces import Traces
+from hemera.common.models.transactions import Transactions
+from hemera.common.utils.abi_code_utils import Function, decode_function, decode_log_data
+from hemera.common.utils.config import get_config
+from hemera.common.utils.db_utils import get_total_row_count
+from hemera.common.utils.exception_control import APIError
+from hemera.common.utils.format_utils import as_dict, bytes_to_hex_str, format_to_dict, hex_str_to_bytes, row_to_dict
+from hemera.common.utils.web3_utils import (
get_debug_trace_transaction,
is_eth_address,
is_eth_transaction_hash,
to_checksum_address,
)
-from indexer.modules.custom.address_index.models.address_index_stats import AddressIndexStats
-from indexer.modules.custom.address_index.utils.helpers import (
+from hemera_udf.address_index.models.address_index_stats import AddressIndexStats
+from hemera_udf.address_index.utils.helpers import (
get_address_erc20_token_transfer_cnt,
get_address_token_transfers,
get_address_transactions,
parse_address_token_transfers,
parse_address_transactions,
)
-from indexer.modules.custom.stats.models.daily_addresses_stats import DailyAddressesStats
-from indexer.modules.custom.stats.models.daily_blocks_stats import DailyBlocksStats
-from indexer.modules.custom.stats.models.daily_tokens_stats import DailyTokensStats
-from indexer.modules.custom.stats.models.daily_transactions_stats import DailyTransactionsStats
+from hemera_udf.stats.models.daily_addresses_stats import DailyAddressesStats
+from hemera_udf.stats.models.daily_blocks_stats import DailyBlocksStats
+from hemera_udf.stats.models.daily_tokens_stats import DailyTokensStats
+from hemera_udf.stats.models.daily_transactions_stats import DailyTransactionsStats
PAGE_SIZE = 25
MAX_TRANSACTION = 500000
@@ -1264,7 +1274,7 @@ def get(self, address):
profile_json["contract_creator"] = bytes_to_hex_str(contract.contract_creator)
profile_json["transaction_hash"] = bytes_to_hex_str(contract.transaction_hash)
profile_json["is_verified"] = contract.is_verified
- profile_json["is_proxy"] = contract.is_proxy
+ profile_json["is_proxy"] = contract.is_verified or get_implementation_contract(address) is not None
profile_json["implementation_contract"] = (
bytes_to_hex_str(contract.implementation_contract) if contract.implementation_contract else None
)
diff --git a/api/app/l2_explorer/__init__.py b/hemera/api/app/l2_explorer/__init__.py
similarity index 100%
rename from api/app/l2_explorer/__init__.py
rename to hemera/api/app/l2_explorer/__init__.py
diff --git a/api/app/l2_explorer/routes.py b/hemera/api/app/l2_explorer/routes.py
similarity index 93%
rename from api/app/l2_explorer/routes.py
rename to hemera/api/app/l2_explorer/routes.py
index bc06fd4b2..70fa4c460 100644
--- a/api/app/l2_explorer/routes.py
+++ b/hemera/api/app/l2_explorer/routes.py
@@ -1,40 +1,26 @@
import binascii
import re
-from datetime import timedelta
from operator import or_
from flask import request
from flask_restx import Resource
from sqlalchemy import and_, func
-from api.app import explorer
-from api.app.cache import cache
-from api.app.l2_explorer import l2_explorer_namespace
-from api.app.utils.utils import is_l1_block_finalized
-from common.models import db as postgres_db
-from common.models.blocks import Blocks # DailyBridgeTransactionsAggregates,
-from common.models.bridge import (
- ArbitrumStateBatches,
- ArbitrumTransactionBatches,
+from hemera.api.app.cache import cache
+from hemera.api.app.l2_explorer import l2_explorer_namespace
+from hemera.common.models import db as postgres_db
+from hemera.common.models.tokens import Tokens
+from hemera.common.utils.bridge_utils import BridgeTransactionParser
+from hemera.common.utils.config import get_config
+from hemera.common.utils.exception_control import APIError
+from hemera.common.utils.format_utils import format_to_dict
+from hemera.common.utils.web3_utils import is_eth_address
+from hemera_udf.bridge.models.bridge import (
BridgeTokens,
L1ToL2BridgeTransactions,
L2ToL1BridgeTransactions,
- LineaBatches,
- MantleDAStores,
- MantleDAStoreTransactionMapping,
OpBedrockStateBatches,
- OpDATransactions,
- StateBatches,
- ZkEvmBatches,
)
-from common.models.scheduled_metadata import ScheduledMetadata
-from common.models.tokens import Tokens
-from common.models.transactions import Transactions
-from common.utils.bridge_utils import BridgeTransactionParser
-from common.utils.config import get_config
-from common.utils.exception_control import APIError
-from common.utils.format_utils import as_dict, format_to_dict, format_value_for_json
-from common.utils.web3_utils import is_eth_address
app_config = get_config()
diff --git a/api/app/limiter.py b/hemera/api/app/limiter.py
similarity index 92%
rename from api/app/limiter.py
rename to hemera/api/app/limiter.py
index 3039dec49..72755f330 100644
--- a/api/app/limiter.py
+++ b/hemera/api/app/limiter.py
@@ -19,6 +19,6 @@ def get_real_ip() -> str:
# https://flask-limiter.readthedocs.io/en/stable/index.html
limiter = Limiter(
key_func=get_real_ip,
- default_limits=["1800 per hour", "180 per minute"],
+ default_limits=["36000 per hour", "180 per minute"],
storage_uri="memory://",
)
diff --git a/api/app/main.py b/hemera/api/app/main.py
similarity index 92%
rename from api/app/main.py
rename to hemera/api/app/main.py
index f5f902887..7ebcdd37e 100644
--- a/api/app/main.py
+++ b/hemera/api/app/main.py
@@ -5,11 +5,11 @@
from flask import Flask, request
from flask_cors import CORS
-from api.app.cache import cache, redis_db
-from api.app.limiter import limiter
-from common.models import db
-from common.utils.config import get_config
-from common.utils.exception_control import APIError
+from hemera.api.app.cache import cache, redis_db
+from hemera.api.app.limiter import limiter
+from hemera.common.models import db
+from hemera.common.utils.config import get_config
+from hemera.common.utils.exception_control import APIError
# from app.serializing import ma
@@ -50,7 +50,7 @@
db.init_app(app)
# Add API Namespace
-from api.app.api import api
+from hemera.api.app.api import api
api.init_app(app)
diff --git a/api/app/token/__init__.py b/hemera/api/app/token/__init__.py
similarity index 100%
rename from api/app/token/__init__.py
rename to hemera/api/app/token/__init__.py
diff --git a/api/app/user_operation/__init__.py b/hemera/api/app/user_operation/__init__.py
similarity index 100%
rename from api/app/user_operation/__init__.py
rename to hemera/api/app/user_operation/__init__.py
diff --git a/api/app/user_operation/routes.py b/hemera/api/app/user_operation/routes.py
similarity index 93%
rename from api/app/user_operation/routes.py
rename to hemera/api/app/user_operation/routes.py
index 733c673bd..a926c43ee 100644
--- a/api/app/user_operation/routes.py
+++ b/hemera/api/app/user_operation/routes.py
@@ -3,22 +3,22 @@
import flask
from flask_restx import Resource
-from api.app.cache import cache
-from api.app.user_operation import user_operation_namespace
-from api.app.utils.fill_info import fill_address_display_to_transactions, process_token_transfer
-from api.app.utils.parse_utils import parse_log_with_transaction_input_list
-from common.models import db
-from common.models.erc20_token_transfers import ERC20TokenTransfers
-from common.models.erc721_token_transfers import ERC721TokenTransfers
-from common.models.erc1155_token_transfers import ERC1155TokenTransfers
-from common.models.logs import Logs
-from common.models.tokens import Tokens
-from common.models.transactions import Transactions
-from common.utils.config import get_config
-from common.utils.db_utils import get_total_row_count
-from common.utils.exception_control import APIError
-from common.utils.format_utils import format_value_for_json, hex_str_to_bytes
-from indexer.modules.user_ops.models.user_operation_results import UserOperationResult
+from hemera.api.app.cache import cache
+from hemera.api.app.user_operation import user_operation_namespace
+from hemera.api.app.utils.fill_info import fill_address_display_to_transactions, process_token_transfer
+from hemera.api.app.utils.parse_utils import parse_log_with_transaction_input_list
+from hemera.common.models import db
+from hemera.common.models.erc20_token_transfers import ERC20TokenTransfers
+from hemera.common.models.erc721_token_transfers import ERC721TokenTransfers
+from hemera.common.models.erc1155_token_transfers import ERC1155TokenTransfers
+from hemera.common.models.logs import Logs
+from hemera.common.models.tokens import Tokens
+from hemera.common.models.transactions import Transactions
+from hemera.common.utils.config import get_config
+from hemera.common.utils.db_utils import get_total_row_count
+from hemera.common.utils.exception_control import APIError
+from hemera.common.utils.format_utils import format_value_for_json, hex_str_to_bytes
+from hemera_udf.user_ops.models.user_operation_results import UserOperationResult
PAGE_SIZE = 25
MAX_TRANSACTION = 500000
diff --git a/api/app/utils/__init__.py b/hemera/api/app/utils/__init__.py
similarity index 100%
rename from api/app/utils/__init__.py
rename to hemera/api/app/utils/__init__.py
diff --git a/api/app/utils/fill_info.py b/hemera/api/app/utils/fill_info.py
similarity index 93%
rename from api/app/utils/fill_info.py
rename to hemera/api/app/utils/fill_info.py
index 35e660871..f2ff4613b 100644
--- a/api/app/utils/fill_info.py
+++ b/hemera/api/app/utils/fill_info.py
@@ -1,6 +1,6 @@
-from api.app.db_service.contracts import get_contracts_by_addresses
-from api.app.db_service.wallet_addresses import get_address_display_mapping
-from common.utils.format_utils import format_to_dict, hex_str_to_bytes
+from hemera.api.app.db_service.contracts import get_contracts_by_addresses
+from hemera.api.app.db_service.wallet_addresses import get_address_display_mapping
+from hemera.common.utils.format_utils import format_to_dict, hex_str_to_bytes
def fill_address_display_to_logs(log_list, all_address_list=None):
diff --git a/api/app/utils/format_utils.py b/hemera/api/app/utils/format_utils.py
similarity index 100%
rename from api/app/utils/format_utils.py
rename to hemera/api/app/utils/format_utils.py
diff --git a/api/app/utils/parse_utils.py b/hemera/api/app/utils/parse_utils.py
similarity index 93%
rename from api/app/utils/parse_utils.py
rename to hemera/api/app/utils/parse_utils.py
index a6b5d3488..2ce4987d3 100644
--- a/api/app/utils/parse_utils.py
+++ b/hemera/api/app/utils/parse_utils.py
@@ -5,17 +5,17 @@
from flask import current_app
from web3 import Web3
-from api.app.contract.contract_verify import get_abis_for_logs, get_names_from_method_or_topic_list
-from api.app.db_service.contracts import get_contracts_by_addresses
-from api.app.db_service.tokens import get_token_by_address
-from api.app.utils.fill_info import fill_address_display_to_logs, fill_address_display_to_transactions
-from api.app.utils.format_utils import format_transaction
-from api.app.utils.token_utils import get_token_price
-from common.models.transactions import Transactions
-from common.utils.abi_code_utils import decode_log_data
-from common.utils.config import get_config
-from common.utils.format_utils import bytes_to_hex_str, format_to_dict, row_to_dict
-from common.utils.web3_utils import chain_id_name_mapping
+from hemera.api.app.contract.contract_verify import get_abis_for_logs, get_names_from_method_or_topic_list
+from hemera.api.app.db_service.contracts import get_contracts_by_addresses
+from hemera.api.app.db_service.tokens import get_token_by_address
+from hemera.api.app.utils.fill_info import fill_address_display_to_logs, fill_address_display_to_transactions
+from hemera.api.app.utils.format_utils import format_transaction
+from hemera.api.app.utils.token_utils import get_token_price
+from hemera.common.models.transactions import Transactions
+from hemera.common.utils.abi_code_utils import decode_log_data
+from hemera.common.utils.config import get_config
+from hemera.common.utils.format_utils import bytes_to_hex_str, format_to_dict, row_to_dict
+from hemera.common.utils.web3_utils import chain_id_name_mapping
app_config = get_config()
diff --git a/api/app/utils/token_utils.py b/hemera/api/app/utils/token_utils.py
similarity index 81%
rename from api/app/utils/token_utils.py
rename to hemera/api/app/utils/token_utils.py
index 8105141b8..f29d0c75e 100644
--- a/api/app/utils/token_utils.py
+++ b/hemera/api/app/utils/token_utils.py
@@ -2,11 +2,11 @@
from decimal import Decimal
from typing import List
-from api.app.cache import cache
-from common.models import db
-from common.models.token_hourly_price import CoinPrices, TokenHourlyPrices
-from common.models.token_prices import TokenPrices
-from common.utils.format_utils import as_dict
+from hemera.api.app.cache import cache
+from hemera.common.models import db
+from hemera.common.models.token_hourly_price import CoinPrices, TokenHourlyPrices
+from hemera.common.models.token_prices import TokenPrices
+from hemera.common.utils.format_utils import as_dict
@cache.memoize(300)
diff --git a/api/app/utils/utils.py b/hemera/api/app/utils/utils.py
similarity index 92%
rename from api/app/utils/utils.py
rename to hemera/api/app/utils/utils.py
index d72b56fff..6619e2508 100644
--- a/api/app/utils/utils.py
+++ b/hemera/api/app/utils/utils.py
@@ -2,8 +2,8 @@
# -*- coding: utf-8 -*-
from datetime import datetime, timedelta
-from common.utils.config import get_config
-from common.utils.db_utils import get_total_row_count
+from hemera.common.utils.config import get_config
+from hemera.common.utils.db_utils import get_total_row_count
app_config = get_config()
diff --git a/api/app/utils/web3_utils.py b/hemera/api/app/utils/web3_utils.py
similarity index 91%
rename from api/app/utils/web3_utils.py
rename to hemera/api/app/utils/web3_utils.py
index 0b58c862b..a7c244515 100644
--- a/api/app/utils/web3_utils.py
+++ b/hemera/api/app/utils/web3_utils.py
@@ -3,8 +3,8 @@
from web3 import Web3
-from api.app.cache import app_config, cache
-from common.utils.abi_code_utils import decode_data
+from hemera.api.app.cache import app_config, cache
+from hemera.common.utils.abi_code_utils import decode_data
w3 = Web3(Web3.HTTPProvider(app_config.rpc))
diff --git a/api/tests/app/test_cyber_mainnet_explorer.py b/hemera/api/tests/app/test_cyber_mainnet_explorer.py
similarity index 100%
rename from api/tests/app/test_cyber_mainnet_explorer.py
rename to hemera/api/tests/app/test_cyber_mainnet_explorer.py
diff --git a/api/tests/conftest.py b/hemera/api/tests/conftest.py
similarity index 89%
rename from api/tests/conftest.py
rename to hemera/api/tests/conftest.py
index 563663496..ab580c758 100644
--- a/api/tests/conftest.py
+++ b/hemera/api/tests/conftest.py
@@ -1,7 +1,7 @@
import pytest
-from api.app.config import *
-from common.utils.config import set_config
+from hemera.api.app.config import *
+from hemera.common.utils.config import set_config
@pytest.fixture(scope="module")
@@ -37,7 +37,7 @@ def test_client():
rpc="https://story-network.rpc.caldera.xyz/http",
)
set_config(app_config)
- from api.app.main import app
+ from hemera.api.app.main import app
with app.test_client() as testing_client:
with app.app_context():
diff --git a/hemera/cli/__init__.py b/hemera/cli/__init__.py
new file mode 100644
index 000000000..a87b1dcab
--- /dev/null
+++ b/hemera/cli/__init__.py
@@ -0,0 +1,61 @@
+import click
+
+from hemera.cli.api import api
+from hemera.cli.async_stream import async_stream
+from hemera.cli.backtest import backtest
+from hemera.cli.db import db
+from hemera.cli.init import init
+from hemera.cli.reorg import reorg
+from hemera.cli.stream import stream
+from hemera.common.utils.config import check_and_set_default_env
+from hemera.indexer.utils.logging_utils import logging_basic_config
+
+logging_basic_config()
+
+from importlib import metadata
+
+
+def get_version():
+ return metadata.version("hemera")
+
+
+def load_environ():
+ # Job control
+ check_and_set_default_env("JOB_RETRIES", "5")
+ check_and_set_default_env("PGSOURCE_ACCURACY", "false")
+
+ # Postgres commit control
+ check_and_set_default_env("COMMIT_BATCH_SIZE", "1000")
+
+ # BufferService control
+ check_and_set_default_env("BUFFER_BLOCK_SIZE", "1")
+ check_and_set_default_env("MAX_BUFFER_SIZE", "1")
+ check_and_set_default_env("ASYNC_SUBMIT", "false")
+ check_and_set_default_env("CONCURRENT_SUBMITTERS", "1")
+ check_and_set_default_env("CRASH_INSTANTLY", "true")
+ check_and_set_default_env("EXPORT_STRATEGY", "{}")
+
+ # Multicall control
+ check_and_set_default_env("GAS_LIMIT", "5000000")
+ check_and_set_default_env("BATCH_SIZE", "250")
+ check_and_set_default_env("CALLS_LIMIT", "2000")
+ check_and_set_default_env("DEFAULT_MULTICALL_ADDRESS", "0xcA11bde05977b3631167028862bE2a173976CA11")
+
+ # MetricsCollector control
+ check_and_set_default_env("METRICS_CLIENT_PORT", "9200")
+
+
+@click.group()
+@click.version_option(version=get_version())
+@click.pass_context
+def cli(ctx):
+ load_environ()
+
+
+cli.add_command(backtest, "backtest")
+cli.add_command(stream, "stream")
+cli.add_command(async_stream, "async_stream")
+cli.add_command(api, "api")
+cli.add_command(reorg, "reorg")
+cli.add_command(db, "db")
+cli.add_command(init, "init")
diff --git a/hemera/cli/api.py b/hemera/cli/api.py
new file mode 100644
index 000000000..d43bf2261
--- /dev/null
+++ b/hemera/cli/api.py
@@ -0,0 +1,11 @@
+import click
+
+from hemera.common.logo import print_logo
+
+
+@click.command(context_settings=dict(help_option_names=["-h", "--help"]))
+def api():
+ print_logo()
+ from hemera.api.app.main import app
+
+ app.run("0.0.0.0", 8082, threaded=True, debug=True, use_reloader=False)
diff --git a/hemera/cli/async_stream.py b/hemera/cli/async_stream.py
new file mode 100644
index 000000000..78d4adc1f
--- /dev/null
+++ b/hemera/cli/async_stream.py
@@ -0,0 +1,137 @@
+import os
+
+import click
+
+from hemera.cli.core.stream_process import stream_process
+from hemera.cli.options.log import log_setting
+from hemera.cli.options.performance import block_step, delay_control, multi_performance, single_performance
+from hemera.cli.options.progress import index_range, index_record
+from hemera.cli.options.rpc import rpc_provider
+from hemera.cli.options.schedule import filter_mode, job_config, job_schedule, metrics_config, reorg_switch
+from hemera.cli.options.source import source_control
+from hemera.cli.options.storage import (
+ cache_target,
+ file_size,
+ pid_file_storage,
+ postgres,
+ postgres_initial,
+ sink_target,
+)
+from hemera.indexer.utils.parameter_utils import default_if_none
+
+
+@click.command(context_settings=dict(help_option_names=["-h", "--help"]))
+@metrics_config
+@rpc_provider
+@job_schedule
+@filter_mode
+@reorg_switch
+@job_config
+@source_control
+@sink_target
+@file_size
+@cache_target
+@postgres
+@postgres_initial
+@index_range
+@index_record
+@block_step
+@single_performance
+@multi_performance
+@delay_control
+@log_setting
+@pid_file_storage
+def async_stream(
+ instance_name,
+ provider_uri,
+ debug_provider_uri,
+ entity_types,
+ output_types,
+ force_filter_mode,
+ auto_reorg,
+ config_file,
+ source_path,
+ source_types,
+ output,
+ blocks_per_file,
+ cache,
+ postgres_url,
+ db_version,
+ init_schema,
+ start_block,
+ end_block,
+ sync_recorder,
+ retry_from_record,
+ persistence_type,
+ block_batch_size,
+ batch_size,
+ debug_batch_size,
+ max_workers,
+ multicall,
+ process_numbers,
+ process_size,
+ process_time_out,
+ period_seconds,
+ delay,
+ log_file,
+ log_level,
+ pid_file,
+):
+ os.environ["JOB_RETRIES"] = "3"
+
+ # BufferService env
+ os.environ["ASYNC_SUBMIT"] = "true"
+ os.environ["CONCURRENT_SUBMITTERS"] = "5"
+ os.environ["CRASH_INSTANTLY"] = "false"
+
+ # pg performance
+ os.environ["COMMIT_BATCH_SIZE"] = "8000"
+
+ block_batch_size = default_if_none(block_batch_size, 10)
+ batch_size = default_if_none(batch_size, 50)
+ debug_batch_size = default_if_none(debug_batch_size, 1)
+ multicall = default_if_none(multicall, True)
+
+ process_numbers = default_if_none(process_numbers, 1)
+
+ period_seconds = default_if_none(period_seconds, 10)
+ delay = default_if_none(delay, 0)
+
+ retry_from_record = default_if_none(retry_from_record, True)
+
+ stream_process(
+ instance_name,
+ provider_uri,
+ debug_provider_uri,
+ entity_types,
+ output_types,
+ force_filter_mode,
+ auto_reorg,
+ config_file,
+ source_path,
+ source_types,
+ output,
+ blocks_per_file,
+ cache,
+ postgres_url,
+ db_version,
+ init_schema,
+ start_block,
+ end_block,
+ sync_recorder,
+ retry_from_record,
+ persistence_type,
+ block_batch_size,
+ batch_size,
+ debug_batch_size,
+ max_workers,
+ multicall,
+ process_numbers,
+ process_size,
+ process_time_out,
+ period_seconds,
+ delay,
+ log_file,
+ log_level,
+ pid_file,
+ )
diff --git a/hemera/cli/backtest.py b/hemera/cli/backtest.py
new file mode 100644
index 000000000..90fa30959
--- /dev/null
+++ b/hemera/cli/backtest.py
@@ -0,0 +1,129 @@
+import os
+
+import click
+
+from hemera.cli.core.stream_process import stream_process
+from hemera.cli.options.log import log_setting
+from hemera.cli.options.performance import block_step, delay_control, multi_performance, single_performance
+from hemera.cli.options.progress import index_range, index_record
+from hemera.cli.options.rpc import rpc_provider
+from hemera.cli.options.schedule import filter_mode, job_config, job_schedule, metrics_config, reorg_switch
+from hemera.cli.options.source import source_control
+from hemera.cli.options.storage import (
+ cache_target,
+ file_size,
+ pid_file_storage,
+ postgres,
+ postgres_initial,
+ sink_target,
+)
+from hemera.indexer.utils.parameter_utils import default_if_none
+
+
+@click.command(context_settings=dict(help_option_names=["-h", "--help"]))
+@metrics_config
+@rpc_provider
+@job_schedule
+@filter_mode
+@reorg_switch
+@job_config
+@source_control
+@sink_target
+@file_size
+@cache_target
+@postgres
+@postgres_initial
+@index_range
+@index_record
+@block_step
+@single_performance
+@multi_performance
+@delay_control
+@log_setting
+@pid_file_storage
+def backtest(
+ instance_name,
+ provider_uri,
+ debug_provider_uri,
+ entity_types,
+ output_types,
+ force_filter_mode,
+ auto_reorg,
+ config_file,
+ source_path,
+ source_types,
+ output,
+ blocks_per_file,
+ cache,
+ postgres_url,
+ db_version,
+ init_schema,
+ start_block,
+ end_block,
+ sync_recorder,
+ retry_from_record,
+ persistence_type,
+ block_batch_size,
+ batch_size,
+ debug_batch_size,
+ max_workers,
+ multicall,
+ process_numbers,
+ process_size,
+ process_time_out,
+ period_seconds,
+ delay,
+ log_file,
+ log_level,
+ pid_file,
+):
+ os.environ["JOB_RETRIES"] = "1"
+
+ block_batch_size = default_if_none(block_batch_size, 1)
+ batch_size = default_if_none(batch_size, 1)
+ debug_batch_size = default_if_none(debug_batch_size, 1)
+ multicall = default_if_none(multicall, False)
+
+ process_numbers = default_if_none(process_numbers, 1)
+
+ period_seconds = default_if_none(period_seconds, 10)
+ delay = default_if_none(delay, 0)
+
+ retry_from_record = default_if_none(retry_from_record, False)
+
+ stream_process(
+ instance_name,
+ provider_uri,
+ debug_provider_uri,
+ entity_types,
+ output_types,
+ force_filter_mode,
+ auto_reorg,
+ config_file,
+ source_path,
+ source_types,
+ output,
+ blocks_per_file,
+ cache,
+ postgres_url,
+ db_version,
+ init_schema,
+ start_block,
+ end_block,
+ sync_recorder,
+ retry_from_record,
+ persistence_type,
+ block_batch_size,
+ batch_size,
+ debug_batch_size,
+ max_workers,
+ multicall,
+ process_numbers,
+ process_size,
+ process_time_out,
+ period_seconds,
+ delay,
+ log_file,
+ log_level,
+ pid_file,
+ )
diff --git a/common/__init__.py b/hemera/cli/core/__init__.py
similarity index 100%
rename from common/__init__.py
rename to hemera/cli/core/__init__.py
diff --git a/hemera/cli/core/stream_process.py b/hemera/cli/core/stream_process.py
new file mode 100644
index 000000000..9f84d28cd
--- /dev/null
+++ b/hemera/cli/core/stream_process.py
@@ -0,0 +1,203 @@
+import logging
+import os
+import time
+
+import click
+from web3 import Web3
+
+from hemera.common.enumeration.entity_type import calculate_entity_value, generate_output_types
+from hemera.common.logo import print_logo
+from hemera.common.services.postgresql_service import PostgreSQLService
+from hemera.common.utils.module_loading import import_submodules
+from hemera.indexer.controller.scheduler.job_scheduler import JobScheduler
+from hemera.indexer.controller.stream_controller import StreamController
+from hemera.indexer.exporters.item_exporter import create_item_exporters
+from hemera.indexer.utils.buffer_service import BufferService
+from hemera.indexer.utils.limit_reader import create_limit_reader
+from hemera.indexer.utils.logging_utils import configure_logging, configure_signals
+from hemera.indexer.utils.metrics_collector import MetricsCollector
+from hemera.indexer.utils.metrics_persistence import init_persistence
+from hemera.indexer.utils.parameter_utils import (
+ check_file_exporter_parameter,
+ check_source_load_parameter,
+ generate_dataclass_type_list_from_parameter,
+)
+from hemera.indexer.utils.provider import get_provider_from_uri
+from hemera.indexer.utils.rpc_utils import pick_random_provider_uri
+from hemera.indexer.utils.sync_recorder import create_recorder
+from hemera.indexer.utils.thread_local_proxy import ThreadLocalProxy
+
+
+def calculate_execution_time(func):
+ def wrapper(*args, **kwargs):
+ start_time = time.time()
+ result = func(*args, **kwargs)
+ end_time = time.time()
+ execution_time = end_time - start_time
+ print(f"function {func.__name__} time: {execution_time:.6f} s")
+ return result
+
+ return wrapper
+
+
+@calculate_execution_time
+def stream_process(
+ instance_name,
+ provider_uri,
+ debug_provider_uri,
+ entity_types,
+ output_types,
+ force_filter_mode,
+ auto_reorg,
+ config_file,
+ source_path,
+ source_types,
+ output,
+ blocks_per_file,
+ cache,
+ postgres_url,
+ db_version,
+ init_schema,
+ start_block,
+ end_block,
+ sync_recorder,
+ retry_from_record,
+ persistence_type,
+ block_batch_size,
+ batch_size,
+ debug_batch_size,
+ max_workers,
+ multicall,
+ process_numbers,
+ process_size,
+ process_time_out,
+ period_seconds,
+ delay,
+ log_file,
+ log_level,
+ pid_file,
+):
+ print_logo()
+ import_submodules("hemera_udf")
+ configure_logging(log_level, log_file)
+ configure_signals()
+ provider_uri = pick_random_provider_uri(provider_uri)
+ debug_provider_uri = pick_random_provider_uri(debug_provider_uri)
+ logging.getLogger("ROOT").info("Using provider " + provider_uri)
+ logging.getLogger("ROOT").info("Using debug provider " + debug_provider_uri)
+
+ # parameter logic checking
+ if source_path:
+ check_source_load_parameter(source_path, start_block, end_block, auto_reorg)
+ check_file_exporter_parameter(output, block_batch_size, blocks_per_file)
+
+ # build config
+ config = {
+ "blocks_per_file": blocks_per_file,
+ "source_path": source_path,
+ "chain_id": Web3(Web3.HTTPProvider(provider_uri)).eth.chain_id,
+ }
+
+ if postgres_url:
+ service = PostgreSQLService(postgres_url, db_version=db_version, init_schema=init_schema)
+ config["db_service"] = service
+ else:
+ logging.getLogger("ROOT").warning("No postgres url provided. Exception recorder will not be useful.")
+
+ if config_file:
+ file_based_config = {}
+ if not os.path.exists(config_file):
+ raise click.ClickException(f"Config file {config_file} not found")
+ with open(config_file, "r") as f:
+ if config_file.endswith(".json"):
+ import json
+
+ file_based_config = json.load(f)
+ elif config_file.endswith(".yaml") or config_file.endswith(".yml"):
+ import yaml
+
+ file_based_config = yaml.safe_load(f)
+ else:
+ raise click.ClickException(f"Config file {config_file} is not supported)")
+
+ if file_based_config.get("chain_id") != config["chain_id"]:
+ raise click.ClickException(
+ f"Config file {config_file} is not compatible with chain_id {config['chain_id']}"
+ )
+ else:
+ logging.getLogger("ROOT").info(f"Loading config from file: {config_file}, chain_id: {config['chain_id']}")
+ config.update(file_based_config)
+ output_types_by_entity_type = []
+ if entity_types is not None:
+ entity_types = calculate_entity_value(entity_types)
+ output_types_by_entity_type = list(set(generate_output_types(entity_types)))
+
+ output_types = list(
+ set(generate_dataclass_type_list_from_parameter(output_types, "output") + output_types_by_entity_type)
+ )
+
+ if source_path and source_path.startswith("postgresql://"):
+ source_types = generate_dataclass_type_list_from_parameter(source_types, "source")
+
+ metrics = MetricsCollector(
+ instance_name=instance_name,
+ persistence=init_persistence(instance_name=instance_name, persistence_type=persistence_type, config=config),
+ )
+
+ sync_recorder = create_recorder(sync_recorder, config, multi_mode=process_numbers > 1)
+ buffer_service = BufferService(
+ item_exporters=create_item_exporters(output, config),
+ required_output_types=[output.type() for output in output_types],
+ success_callback=sync_recorder.handle_success,
+ exception_callback=sync_recorder.set_failure_record,
+ metrics=metrics,
+ )
+
+ job_scheduler = JobScheduler(
+ batch_web3_provider=ThreadLocalProxy(lambda: get_provider_from_uri(provider_uri, batch=True)),
+ batch_web3_debug_provider=ThreadLocalProxy(lambda: get_provider_from_uri(debug_provider_uri, batch=True)),
+ buffer_service=buffer_service,
+ batch_size=batch_size,
+ debug_batch_size=debug_batch_size,
+ max_workers=max_workers,
+ config=config,
+ required_output_types=output_types,
+ required_source_types=source_types,
+ cache=cache,
+ auto_reorg=auto_reorg,
+ multicall=multicall,
+ force_filter_mode=force_filter_mode,
+ metrics=metrics,
+ )
+
+ if process_numbers is None:
+ process_numbers = 1
+ if process_size is None:
+ process_size = int(block_batch_size / process_numbers)
+ if process_time_out is None:
+ process_time_out = 300 * process_size
+
+ controller = StreamController(
+ batch_web3_provider=ThreadLocalProxy(lambda: get_provider_from_uri(provider_uri, batch=False)),
+ job_scheduler=job_scheduler,
+ sync_recorder=sync_recorder,
+ limit_reader=create_limit_reader(
+ source_path, ThreadLocalProxy(lambda: get_provider_from_uri(provider_uri, batch=False))
+ ),
+ retry_from_record=retry_from_record,
+ delay=delay,
+ process_numbers=process_numbers,
+ process_size=process_size,
+ process_time_out=process_time_out,
+ metrics=metrics,
+ )
+
+ controller.action(
+ start_block=start_block,
+ end_block=end_block,
+ block_batch_size=block_batch_size,
+ period_seconds=period_seconds,
+ pid_file=pid_file,
+ )
+
+ buffer_service.shutdown()
diff --git a/hemera/cli/db.py b/hemera/cli/db.py
new file mode 100644
index 000000000..d39da3705
--- /dev/null
+++ b/hemera/cli/db.py
@@ -0,0 +1,108 @@
+import logging
+from typing import List
+
+import click
+from sqlalchemy import text
+
+from hemera.cli.options.storage import postgres, postgres_initial
+from hemera.common.logo import print_logo
+from hemera.common.models import HemeraModel
+from hemera.common.services.postgresql_service import PostgreSQLService
+from hemera.common.utils.module_loading import import_submodules
+
+logger = logging.getLogger("DB Client")
+
+
+@click.command(context_settings=dict(help_option_names=["-h", "--help"]))
+@postgres
+@postgres_initial
+@click.option(
+ "-c",
+ "--create-tables",
+ type=str,
+ default=None,
+ required=False,
+ help="Table names that need to be created in the database. e.g. blocks,transactions",
+)
+@click.option(
+ "-d",
+ "--drop-tables",
+ type=str,
+ default=None,
+ required=False,
+ help="Table names that need to be dropped in the database. e.g. blocks,transactions",
+)
+@click.option(
+ "-t",
+ "--truncate-tables",
+ type=str,
+ default=None,
+ required=False,
+ help="Table names that need to clean up data. e.g. blocks,transactions",
+)
+def db(postgres_url, init_schema, db_version, create_tables=None, drop_tables=None, truncate_tables=None):
+ print_logo()
+ service = PostgreSQLService(jdbc_url=postgres_url, db_version=db_version, init_schema=init_schema)
+
+ if create_tables or drop_tables or truncate_tables:
+ import_submodules("hemera_udf")
+ exist_models = {
+ table.__tablename__: table
+ for table in HemeraModel.get_all_hemera_model_dict()
+ if hasattr(table, "__tablename__")
+ }
+
+ if create_tables:
+ tables = create_tables.split(",")
+ if len(tables) > 0:
+ create(service, tables, exist_models)
+
+ logger.info("Table creation has been finished.")
+
+ if drop_tables:
+ tables = drop_tables.split(",")
+ if len(tables) > 0:
+ drop(service, tables, exist_models)
+
+ logger.info("Table deletion has been finished.")
+
+ if truncate_tables:
+ tables = truncate_tables.split(",")
+ if len(tables) > 0:
+ truncate(service, tables, exist_models)
+
+ logger.info("Table cleanup has been finished.")
+
+ logger.info("db operation finished, now exit.")
+
+
+def create(service: PostgreSQLService, tables: List[str], exist_models: dict):
+ engine = service.get_service_engine()
+ for table in tables:
+ if table in exist_models:
+ exist_models[table].__table__.create(engine, checkfirst=True)
+ logger.info(f"Table {table} created successfully.")
+ else:
+ logger.warning(f"No Table {table} model definition was found.")
+
+
+def drop(service: PostgreSQLService, tables: List[str], exist_models: dict):
+ engine = service.get_service_engine()
+ for table in tables:
+ if table in exist_models:
+ exist_models[table].__table__.drop(engine, checkfirst=True)
+ logger.info(f"Table {table} dropped successfully.")
+ else:
+ logger.warning(f"No Table {table} model definition was found.")
+
+
+def truncate(service: PostgreSQLService, tables: List[str], exist_models: dict):
+ session = service.get_service_session()
+ for table in tables:
+ if table in exist_models:
+ session.execute(text(f"TRUNCATE TABLE {table}"))
+ session.commit()
+ logger.info(f"Table {table} truncated successfully.")
+ else:
+ logger.warning(f"No Table {table} model definition was found.")
+ session.close()
diff --git a/hemera/cli/init.py b/hemera/cli/init.py
new file mode 100644
index 000000000..9646d7f4f
--- /dev/null
+++ b/hemera/cli/init.py
@@ -0,0 +1,112 @@
+import logging
+import os.path
+
+import click
+
+from hemera.cli.options.schedule import metrics_config
+from hemera.cli.options.storage import postgres, postgres_initial
+from hemera.common.logo import print_logo
+from hemera.common.services.postgresql_service import PostgreSQLService
+from hemera.common.utils.file_utils import get_project_root
+from hemera.common.utils.format_utils import to_camel_case, to_space_camel_case
+from hemera.indexer.utils.metrics_persistence import BasePersistence, init_persistence
+from hemera.indexer.utils.template_generator import TemplateGenerator
+
+logger = logging.getLogger("Init Client")
+
+
+@click.command(context_settings=dict(help_option_names=["-h", "--help"]))
+@metrics_config
+@click.option(
+ "--jobs",
+ type=str,
+ default=None,
+ required=False,
+ help="The custom job's name that needs to initialized. "
+ "If you need to initialize multiple jobs at the same time, please separate by ','"
+ "e.g. ens,deposit_to_l2",
+)
+@click.option(
+ "--db",
+ is_flag=True,
+ required=False,
+ help="The --db flag triggers the database initialization process. ",
+)
+@click.option(
+ "--metrics",
+ is_flag=True,
+ required=False,
+ help="The --metrics flag triggers the metrics' persistence data initialization process. ",
+)
+@postgres
+@postgres_initial
+def init(jobs, db, metrics, postgres_url, db_version, init_schema, instance_name, persistence_type):
+ print_logo()
+ if db:
+ init_schema = True
+ PostgreSQLService(jdbc_url=postgres_url, db_version=db_version, init_schema=init_schema)
+ logger.info("Database successfully initialized.")
+
+ if jobs:
+ jobs = [job.lower() for job in jobs.split(",")]
+ jobs_space_initialize_before_check(jobs)
+
+ if metrics:
+ config = {}
+ if postgres_url:
+ service = PostgreSQLService(jdbc_url=postgres_url)
+ config["db_service"] = service
+ persistence = init_persistence(instance_name=instance_name, persistence_type=persistence_type, config=config)
+ persistence.init()
+ logger.info(f"The instance: {instance_name}'s metrics has been successfully initialized.")
+
+
+def jobs_space_initialize_before_check(jobs):
+ project_root = get_project_root()
+ custom_jobs_path = os.path.join(project_root, "hemera_udf")
+ exists_job = os.listdir(custom_jobs_path)
+
+ empty_generator = TemplateGenerator()
+ init_generator = TemplateGenerator(
+ template_file=os.path.join(project_root, "hemera/resource/template/custom_init.example")
+ )
+ job_generator = TemplateGenerator(
+ template_file=os.path.join(project_root, "hemera/resource/template/export_custom_job.example")
+ )
+ domain_generator = TemplateGenerator(
+ template_file=os.path.join(project_root, "hemera/resource/template/custom_domain.example")
+ )
+ model_generator = TemplateGenerator(
+ template_file=os.path.join(project_root, "hemera/resource/template/custom_module.example")
+ )
+ namespace_generator = TemplateGenerator(
+ template_file=os.path.join(project_root, "hemera/resource/template/custom_api_namespace.example")
+ )
+
+ for job in jobs:
+ if job in exists_job:
+ logger.error(f"In the folder './hemera_udf/', custom job named {job} already exists.")
+ continue
+
+ custom_job_path = os.path.join(custom_jobs_path, job)
+
+ init_generator.add_replacements(key="${job_name}", value=job)
+ init_generator.add_replacements(key="${entity_name}", value=job.upper())
+ init_generator.generate_file(target_path=os.path.join(custom_job_path, "__init__.py"))
+
+ job_generator.add_replacements(key="${job}", value=job)
+ job_generator.add_replacements(key="${job_name}", value=to_camel_case(job))
+ job_generator.generate_file(target_path=os.path.join(custom_job_path, f"export_{job}_job.py"))
+
+ domain_generator.generate_file(target_path=os.path.join(custom_job_path, "domains.py"))
+
+ model_generator.add_replacements(key="${job}", value=job)
+ model_generator.generate_file(target_path=os.path.join(custom_job_path, "models", f"{job}_module.py"))
+ empty_generator.generate_file(target_path=os.path.join(custom_job_path, "models", "__init__.py"))
+
+ namespace_generator.add_replacements(key="${job}", value=job)
+ namespace_generator.add_replacements(key="${job_descript}", value=to_space_camel_case(job))
+ namespace_generator.generate_file(target_path=os.path.join(custom_job_path, "endpoint", "__init__.py"))
+ empty_generator.generate_file(target_path=os.path.join(custom_job_path, "endpoint", "routes.py"))
+
+ logger.info(f"{job} successfully initialized.")
diff --git a/common/converter/__init__.py b/hemera/cli/options/__init__.py
similarity index 100%
rename from common/converter/__init__.py
rename to hemera/cli/options/__init__.py
diff --git a/hemera/cli/options/log.py b/hemera/cli/options/log.py
new file mode 100644
index 000000000..4407e08f2
--- /dev/null
+++ b/hemera/cli/options/log.py
@@ -0,0 +1,23 @@
+import click
+
+
+def log_setting(options):
+ options = click.option(
+ "--log-file",
+ default=None,
+ show_default=True,
+ type=str,
+ envvar="LOG_FILE",
+ help="Log file",
+ )(options)
+
+ options = click.option(
+ "--log-level",
+ default="INFO",
+ show_default=True,
+ type=str,
+ envvar="LOG_LEVEL",
+ help="Set the logging output level.",
+ )(options)
+
+ return options
diff --git a/hemera/cli/options/performance.py b/hemera/cli/options/performance.py
new file mode 100644
index 000000000..52541613f
--- /dev/null
+++ b/hemera/cli/options/performance.py
@@ -0,0 +1,105 @@
+import click
+
+
+def delay_control(options):
+ options = click.option(
+ "--period-seconds",
+ show_default=True,
+ type=float,
+ envvar="PERIOD_SECONDS",
+ help="How many seconds to sleep between syncs",
+ )(options)
+
+ options = click.option(
+ "--delay",
+ show_default=True,
+ type=int,
+ envvar="DELAY",
+ help="The limit number of blocks which delays from the network current block number.",
+ )(options)
+
+ return options
+
+
+def block_step(options):
+ options = click.option(
+ "-B",
+ "--block-batch-size",
+ show_default=True,
+ type=int,
+ envvar="BLOCK_BATCH_SIZE",
+ help="How many blocks to batch in single sync round",
+ )(options)
+
+ return options
+
+
+def single_performance(options):
+ options = click.option(
+ "-b",
+ "--batch-size",
+ show_default=True,
+ type=int,
+ envvar="BATCH_SIZE",
+ help="The number of non-debug RPC requests to batch in a single request",
+ )(options)
+
+ options = click.option(
+ "--debug-batch-size",
+ show_default=True,
+ type=int,
+ envvar="DEBUG_BATCH_SIZE",
+ help="The number of debug RPC requests to batch in a single request",
+ )(options)
+
+ options = click.option(
+ "-w",
+ "--max-workers",
+ default=5,
+ show_default=True,
+ type=int,
+ help="The number of workers during a request to rpc.",
+ envvar="MAX_WORKERS",
+ )(options)
+
+ options = click.option(
+ "-m",
+ "--multicall",
+ show_default=True,
+ type=bool,
+ help="if `multicall` is set to True, it will decrease the consume of rpc calls",
+ envvar="MULTI_CALL_ENABLE",
+ )(options)
+
+ return options
+
+
+def multi_performance(options):
+ options = click.option(
+ "-pn",
+ "--process-numbers",
+ show_default=True,
+ type=int,
+ help="The processor numbers to ues.",
+ envvar="PROCESS_NUMBERS",
+ )(options)
+
+ options = click.option(
+ "-ps",
+ "--process-size",
+ show_default=True,
+ type=int,
+ help="The data size for every process to handle. Default to {B}/{pn} ,see above",
+ envvar="PROCESS_SIZE",
+ )(options)
+
+ options = click.option(
+ "-pto",
+ "--process-time-out",
+ show_default=True,
+ type=int,
+ help="Timeout for every processor, default to {ps} * 300 , see above",
+ envvar="PROCESS_TIME_OUT",
+ )(options)
+
+ return options
diff --git a/hemera/cli/options/progress.py b/hemera/cli/options/progress.py
new file mode 100644
index 000000000..3b1ce6382
--- /dev/null
+++ b/hemera/cli/options/progress.py
@@ -0,0 +1,46 @@
+import click
+
+
+def index_range(options):
+ options = click.option(
+ "-s",
+ "--start-block",
+ show_default=True,
+ type=int,
+ help="Start block",
+ envvar="START_BLOCK",
+ )(options)
+
+ options = click.option(
+ "-e",
+ "--end-block",
+ show_default=True,
+ type=int,
+ help="End block",
+ envvar="END_BLOCK",
+ )(options)
+ return options
+
+
+def index_record(options):
+ options = click.option(
+ "--sync-recorder",
+ default="file:sync_record",
+ show_default=True,
+ type=str,
+ envvar="SYNC_RECORDER",
+ help="How to store the sync record data."
+ 'e.g pg:base. means sync record data will store in pg as "base" be key'
+ 'or file:base. means sync record data will store in file as "base" be file name',
+ )(options)
+
+ options = click.option(
+ "--retry-from-record",
+ show_default=True,
+ type=bool,
+ envvar="RETRY_FROM_RECORD",
+ help="With the default parameter, the program will always run from the -s parameter, "
+ "and when set to True, it will run from the record point between -s and -e",
+ )(options)
+
+ return options
diff --git a/hemera/cli/options/rpc.py b/hemera/cli/options/rpc.py
new file mode 100644
index 000000000..db78954ab
--- /dev/null
+++ b/hemera/cli/options/rpc.py
@@ -0,0 +1,27 @@
+import click
+
+
+def rpc_provider(options):
+ options = click.option(
+ "-p",
+ "--provider-uri",
+ default="https://ethereum-rpc.publicnode.com",
+ show_default=True,
+ type=str,
+ envvar="PROVIDER_URI",
+ help="The URI of the web3 provider e.g. "
+ "file://$HOME/Library/Ethereum/geth.ipc or https://ethereum-rpc.publicnode.com",
+ )(options)
+
+ options = click.option(
+ "-d",
+ "--debug-provider-uri",
+ default="https://ethereum-rpc.publicnode.com",
+ show_default=True,
+ type=str,
+ envvar="DEBUG_PROVIDER_URI",
+ help="The URI of the web3 debug provider e.g. "
+ "file://$HOME/Library/Ethereum/geth.ipc or https://ethereum-rpc.publicnode.com",
+ )(options)
+
+ return options
diff --git a/hemera/cli/options/schedule.py b/hemera/cli/options/schedule.py
new file mode 100644
index 000000000..2a164573b
--- /dev/null
+++ b/hemera/cli/options/schedule.py
@@ -0,0 +1,92 @@
+import click
+
+from hemera.common.enumeration.entity_type import DEFAULT_COLLECTION
+
+
+def metrics_config(options):
+ options = click.option(
+ "--instance-name",
+ default="default",
+ show_default=True,
+ type=str,
+ envvar="INSTANCE_NAME",
+ help="The instance name displayed in the monitoring background.",
+ )(options)
+
+ options = click.option(
+ "--persistence-type",
+ default="file",
+ show_default=True,
+ type=str,
+ envvar="PERSISTENCE_TYPE",
+ help="How to persist metrics data."
+ "e.g postgres means persist metrics data by postgres and use instance name be key"
+ "or file means persist metrics data by file and use instance name be file name",
+ )(options)
+
+ return options
+
+
+def job_schedule(options):
+ options = click.option(
+ "-E",
+ "--entity-types",
+ default=",".join(DEFAULT_COLLECTION),
+ show_default=True,
+ type=str,
+ envvar="ENTITY_TYPES",
+ help="The list of entity types to export. " "e.g. EXPLORER_BASE | EXPLORER_TOKEN | EXPLORER_TRACE",
+ )(options)
+
+ options = click.option(
+ "-O",
+ "--output-types",
+ show_default=True,
+ type=str,
+ envvar="OUTPUT_TYPES",
+ help="The list of output types to export, corresponding to more detailed data models. "
+ "Specifying this option will prioritize these settings over the entity types specified in -E. "
+ "Examples include: block, transaction, log, "
+ "token, address_token_balance, erc20_token_transfer, erc721_token_transfer, erc1155_token_transfer, "
+ "trace, contract, coin_balance.",
+ )(options)
+
+ return options
+
+
+def job_config(options):
+ options = click.option(
+ "--config-file",
+ show_default=True,
+ type=str,
+ envvar="CONFIG_FILE",
+ help="The path to the configuration file, if provided, the configuration file will be used to load the configuration. Supported formats are json and yaml.",
+ )(options)
+
+ return options
+
+
+def filter_mode(options):
+ options = click.option(
+ "--force-filter-mode",
+ default=False,
+ show_default=True,
+ type=bool,
+ envvar="FORCE_FILTER_MODE",
+ help="Force the filter mode to be enabled, even if no filters job are provided.",
+ )(options)
+
+ return options
+
+
+def reorg_switch(options):
+ options = click.option(
+ "--auto-reorg",
+ default=False,
+ show_default=True,
+ type=bool,
+ envvar="AUTO_REORG",
+ help="Whether to detect reorg in data streams and automatically repair data.",
+ )(options)
+
+ return options
diff --git a/hemera/cli/options/source.py b/hemera/cli/options/source.py
new file mode 100644
index 000000000..dd5105225
--- /dev/null
+++ b/hemera/cli/options/source.py
@@ -0,0 +1,29 @@
+import click
+
+
+def source_control(options):
+ options = click.option(
+ "--source-path",
+ show_default=True,
+ required=False,
+ type=str,
+ envvar="SOURCE_PATH",
+ help="The path to load the data."
+ "Load from postgres e.g. postgresql://postgres:admin@127.0.0.1:5432/ethereum"
+ "or local csv file e.g. csvfile://your-file-direction; "
+ "or local json file e.g. jsonfile://your-file-direction; ",
+ )(options)
+
+ options = click.option(
+ "--source-types",
+ default="block,transaction,log",
+ show_default=True,
+ type=str,
+ envvar="SOURCE_TYPES",
+ help="The list of types to read from source, corresponding to more detailed data models. "
+ "Examples include: block, transaction, log, "
+ "token, address_token_balance, erc20_token_transfer, erc721_token_transfer, erc1155_token_transfer, "
+ "trace, contract, coin_balance.",
+ )(options)
+
+ return options
diff --git a/hemera/cli/options/storage.py b/hemera/cli/options/storage.py
new file mode 100644
index 000000000..f7a1154ee
--- /dev/null
+++ b/hemera/cli/options/storage.py
@@ -0,0 +1,99 @@
+import click
+
+
+def cache_target(options):
+ options = click.option(
+ "--cache",
+ default="memory",
+ show_default=True,
+ type=str,
+ envvar="CACHE_SERVICE",
+ help="How to store the cache data."
+ "e.g redis. means cache data will store in redis, redis://localhost:6379"
+ "or memory. means cache data will store in memory, memory",
+ )(options)
+
+ return options
+
+
+def sink_target(options):
+ options = click.option(
+ "-o",
+ "--output",
+ type=str,
+ envvar="OUTPUT",
+ help="The output selection."
+ "Print to console e.g. console; "
+ "or postgresql e.g. postgres"
+ "or local json file e.g. jsonfile://your-file-path; "
+ "or local csv file e.g. csvfile://your-file-path; "
+ "or both. e.g. console,jsonfile://your-file-path,csvfile://your-file-path",
+ )(options)
+
+ return options
+
+
+def file_size(options):
+ options = click.option(
+ "--blocks-per-file",
+ default=1000,
+ show_default=True,
+ type=int,
+ envvar="BLOCKS_PER_FILE",
+ help="How many blocks data was written to each file",
+ )(options)
+
+ return options
+
+
+def postgres(options):
+ options = click.option(
+ "-pg",
+ "--postgres-url",
+ type=str,
+ required=False,
+ envvar="POSTGRES_URL",
+ help="The required postgres connection url."
+ "e.g. postgresql+psycopg2://postgres:admin@127.0.0.1:5432/ethereum",
+ )(options)
+
+ return options
+
+
+def postgres_initial(options):
+ options = click.option(
+ "-v",
+ "--db-version",
+ default="head",
+ show_default=True,
+ type=str,
+ envvar="DB_VERSION",
+ help="The database version to initialize the database. using the alembic script's revision ID to "
+ "specify a version. "
+ "e.g. head, indicates the latest version."
+ "or base, indicates the empty database without any table.",
+ )(options)
+
+ options = click.option(
+ "-i",
+ "--init-schema",
+ is_flag=True,
+ required=False,
+ show_default=True,
+ envvar="INIT_SCHEMA",
+ help="Whether to automatically run database migration scripts to update the database to the specify version.",
+ )(options)
+
+ return options
+
+
+def pid_file_storage(options):
+ options = click.option(
+ "--pid-file",
+ show_default=True,
+ type=str,
+ envvar="PID_FILE",
+ help="Pid file",
+ )(options)
+
+ return options
diff --git a/hemera/cli/reorg.py b/hemera/cli/reorg.py
new file mode 100644
index 000000000..72472285d
--- /dev/null
+++ b/hemera/cli/reorg.py
@@ -0,0 +1,159 @@
+import logging
+import os
+
+import click
+
+from hemera.cli.options.log import log_setting
+from hemera.cli.options.performance import single_performance
+from hemera.cli.options.rpc import rpc_provider
+from hemera.cli.options.schedule import job_config
+from hemera.cli.options.storage import cache_target, postgres
+from hemera.common.enumeration.entity_type import EntityType, generate_output_types
+from hemera.common.logo import print_logo
+from hemera.common.services.postgresql_service import PostgreSQLService
+from hemera.common.utils.module_loading import import_submodules
+from hemera.indexer.controller.reorg_controller import ReorgController
+from hemera.indexer.controller.scheduler.reorg_scheduler import ReorgScheduler
+from hemera.indexer.exporters.postgres_item_exporter import PostgresItemExporter
+from hemera.indexer.utils.exception_recorder import ExceptionRecorder
+from hemera.indexer.utils.logging_utils import configure_logging, configure_signals
+from hemera.indexer.utils.parameter_utils import default_if_none
+from hemera.indexer.utils.provider import get_provider_from_uri
+from hemera.indexer.utils.reorg import check_reorg
+from hemera.indexer.utils.rpc_utils import pick_random_provider_uri
+from hemera.indexer.utils.thread_local_proxy import ThreadLocalProxy
+
+exception_recorder = ExceptionRecorder()
+
+
+@click.command(context_settings=dict(help_option_names=["-h", "--help"]))
+@rpc_provider
+@job_config
+@postgres
+@single_performance
+@cache_target
+@log_setting
+@click.option(
+ "--block-number",
+ show_default=True,
+ type=int,
+ envvar="BLOCK_NUMBER",
+ help="Specify the block number to reorging.",
+)
+@click.option(
+ "-r",
+ "--ranges",
+ default=10,
+ show_default=True,
+ type=int,
+ envvar="RANGES",
+ help="Specify the range limit for data fixing.",
+)
+@click.option(
+ "--check-ranges",
+ show_default=True,
+ type=int,
+ envvar="CHECK_RANGES",
+ help="Specify the range for block continuous checking.",
+)
+def reorg(
+ provider_uri,
+ debug_provider_uri,
+ config_file,
+ postgres_url,
+ batch_size,
+ debug_batch_size,
+ max_workers,
+ multicall,
+ cache,
+ log_file,
+ log_level,
+ block_number,
+ ranges,
+ check_ranges,
+):
+ batch_size = default_if_none(batch_size, 1)
+ debug_batch_size = default_if_none(debug_batch_size, 1)
+ multicall = default_if_none(multicall, True)
+
+ print_logo()
+ import_submodules("hemera_udf")
+ configure_logging(log_level=log_level, log_file=log_file)
+ configure_signals()
+
+ provider_uri = pick_random_provider_uri(provider_uri)
+ debug_provider_uri = pick_random_provider_uri(debug_provider_uri)
+ logging.info("Using provider " + provider_uri)
+ logging.info("Using debug provider " + debug_provider_uri)
+
+ # build postgresql service
+ if postgres_url:
+ service = PostgreSQLService(postgres_url)
+ config = {"db_service": service}
+ exception_recorder.init_pg_service(service)
+ else:
+ logging.error("No postgres url provided. Exception recorder will not be useful.")
+ exit(1)
+
+ if config_file:
+ if not os.path.exists(config_file):
+ raise click.ClickException(f"Config file {config_file} not found")
+ with open(config_file, "r") as f:
+ if config_file.endswith(".json"):
+ import json
+
+ config.update(json.load(f))
+ elif config_file.endswith(".yaml") or config_file.endswith(".yml"):
+ import yaml
+
+ config.update(yaml.safe_load(f))
+ else:
+ raise click.ClickException(f"Config file {config_file} is not supported)")
+
+ entity_types = EntityType.combine_all_entity_types()
+ output_types = list(generate_output_types(entity_types))
+
+ job_scheduler = ReorgScheduler(
+ batch_web3_provider=ThreadLocalProxy(lambda: get_provider_from_uri(provider_uri, batch=True)),
+ batch_web3_debug_provider=ThreadLocalProxy(lambda: get_provider_from_uri(debug_provider_uri, batch=True)),
+ item_exporters=PostgresItemExporter(postgres_url=postgres_url),
+ batch_size=batch_size,
+ debug_batch_size=debug_batch_size,
+ max_workers=max_workers,
+ required_output_types=output_types,
+ config=config,
+ cache=cache,
+ multicall=multicall,
+ )
+
+ controller = ReorgController(
+ batch_web3_provider=ThreadLocalProxy(lambda: get_provider_from_uri(provider_uri, batch=False)),
+ job_scheduler=job_scheduler,
+ ranges=ranges,
+ service=service,
+ )
+
+ if not block_number:
+ current_block = controller.get_current_block_number()
+ if check_ranges:
+ check_begin = current_block - check_ranges
+ check_reorg(service, check_begin)
+ else:
+ check_reorg(service)
+
+ while True:
+ if block_number:
+ controller.action(block_number=block_number)
+ else:
+ job = controller.wake_up_next_job()
+ if job:
+ logging.info(f"Waking up uncompleted job: {job.job_id}.")
+
+ controller.action(
+ job_id=job.job_id,
+ block_number=job.last_fixed_block_number - 1,
+ remains=job.remain_process,
+ )
+ else:
+ logging.info("No more uncompleted jobs to wake-up, reorg process will terminate.")
+ break
diff --git a/hemera/cli/stream.py b/hemera/cli/stream.py
new file mode 100644
index 000000000..e557d4bf9
--- /dev/null
+++ b/hemera/cli/stream.py
@@ -0,0 +1,125 @@
+import click
+
+from hemera.cli.core.stream_process import stream_process
+from hemera.cli.options.log import log_setting
+from hemera.cli.options.performance import block_step, delay_control, multi_performance, single_performance
+from hemera.cli.options.progress import index_range, index_record
+from hemera.cli.options.rpc import rpc_provider
+from hemera.cli.options.schedule import filter_mode, job_config, job_schedule, metrics_config, reorg_switch
+from hemera.cli.options.source import source_control
+from hemera.cli.options.storage import (
+ cache_target,
+ file_size,
+ pid_file_storage,
+ postgres,
+ postgres_initial,
+ sink_target,
+)
+from hemera.indexer.utils.parameter_utils import default_if_none
+
+
+@click.command(context_settings=dict(help_option_names=["-h", "--help"]))
+@metrics_config
+@rpc_provider
+@job_schedule
+@filter_mode
+@reorg_switch
+@job_config
+@source_control
+@sink_target
+@file_size
+@cache_target
+@postgres
+@postgres_initial
+@index_range
+@index_record
+@block_step
+@single_performance
+@multi_performance
+@delay_control
+@log_setting
+@pid_file_storage
+def stream(
+ instance_name,
+ provider_uri,
+ debug_provider_uri,
+ entity_types,
+ output_types,
+ force_filter_mode,
+ auto_reorg,
+ config_file,
+ source_path,
+ source_types,
+ output,
+ blocks_per_file,
+ cache,
+ postgres_url,
+ db_version,
+ init_schema,
+ start_block,
+ end_block,
+ sync_recorder,
+ retry_from_record,
+ persistence_type,
+ block_batch_size,
+ batch_size,
+ debug_batch_size,
+ max_workers,
+ multicall,
+ process_numbers,
+ process_size,
+ process_time_out,
+ period_seconds,
+ delay,
+ log_file,
+ log_level,
+ pid_file,
+):
+ block_batch_size = default_if_none(block_batch_size, 1)
+ batch_size = default_if_none(batch_size, 1)
+ debug_batch_size = default_if_none(debug_batch_size, 1)
+ multicall = default_if_none(multicall, False)
+
+ process_numbers = default_if_none(process_numbers, 1)
+
+ period_seconds = default_if_none(period_seconds, 10)
+ delay = default_if_none(delay, 0)
+
+ retry_from_record = default_if_none(retry_from_record, True)
+
+ stream_process(
+ instance_name,
+ provider_uri,
+ debug_provider_uri,
+ entity_types,
+ output_types,
+ force_filter_mode,
+ auto_reorg,
+ config_file,
+ source_path,
+ source_types,
+ output,
+ blocks_per_file,
+ cache,
+ postgres_url,
+ db_version,
+ init_schema,
+ start_block,
+ end_block,
+ sync_recorder,
+ retry_from_record,
+ persistence_type,
+ block_batch_size,
+ batch_size,
+ debug_batch_size,
+ max_workers,
+ multicall,
+ process_numbers,
+ process_size,
+ process_time_out,
+ period_seconds,
+ delay,
+ log_file,
+ log_level,
+ pid_file,
+ )
diff --git a/common/services/__init__.py b/hemera/common/__init__.py
similarity index 100%
rename from common/services/__init__.py
rename to hemera/common/__init__.py
diff --git a/common/utils/__init__.py b/hemera/common/converter/__init__.py
similarity index 100%
rename from common/utils/__init__.py
rename to hemera/common/converter/__init__.py
diff --git a/common/converter/pg_converter.py b/hemera/common/converter/pg_converter.py
similarity index 59%
rename from common/converter/pg_converter.py
rename to hemera/common/converter/pg_converter.py
index 4550cbb95..6fccf3017 100644
--- a/common/converter/pg_converter.py
+++ b/hemera/common/converter/pg_converter.py
@@ -1,15 +1,11 @@
-from common.models import HemeraModel, model_path_patterns
-from common.utils.module_loading import import_string, scan_subclass_by_path_patterns
+from hemera.common.models import *
def scan_convert_config():
- class_mapping = scan_subclass_by_path_patterns(model_path_patterns, HemeraModel)
+ class_mapping = HemeraModel.get_all_subclasses()
config_mapping = {}
- for class_name, path in class_mapping.items():
- full_class_path = path["cls_import_path"]
- module = import_string(full_class_path)
-
+ for _, module in class_mapping.items():
module_configs = module.model_domain_mapping()
if module_configs:
for config in module_configs:
diff --git a/enumeration/__init__.py b/hemera/common/enumeration/__init__.py
similarity index 100%
rename from enumeration/__init__.py
rename to hemera/common/enumeration/__init__.py
diff --git a/hemera/common/enumeration/entity_type.py b/hemera/common/enumeration/entity_type.py
new file mode 100644
index 000000000..86de00c5f
--- /dev/null
+++ b/hemera/common/enumeration/entity_type.py
@@ -0,0 +1,193 @@
+from enum import IntFlag
+from functools import reduce
+from typing import Dict, Generator, Set, Type
+
+from hemera.indexer.domains.block import Block, UpdateBlockInternalCount
+from hemera.indexer.domains.block_ts_mapper import BlockTsMapper
+from hemera.indexer.domains.contract import Contract
+from hemera.indexer.domains.contract_internal_transaction import ContractInternalTransaction
+from hemera.indexer.domains.current_token_balance import CurrentTokenBalance
+from hemera.indexer.domains.log import Log
+from hemera.indexer.domains.token import MarkBalanceToken, Token, UpdateToken
+from hemera.indexer.domains.token_balance import TokenBalance
+from hemera.indexer.domains.token_id_infos import (
+ ERC721TokenIdChange,
+ ERC721TokenIdDetail,
+ ERC1155TokenIdDetail,
+ UpdateERC721TokenIdDetail,
+ UpdateERC1155TokenIdDetail,
+)
+from hemera.indexer.domains.token_transfer import ERC20TokenTransfer, ERC721TokenTransfer, ERC1155TokenTransfer
+from hemera.indexer.domains.trace import Trace
+from hemera.indexer.domains.transaction import Transaction
+
+
+class DynamicEntityTypeRegistry:
+ """Registry for managing entity type registrations, output mappings and dynamic types."""
+
+ _next_bit = 14 # Start after the last predefined bit in EntityType
+ _dynamic_types: Dict[str, int] = {}
+ _output_types: Dict[int, Set[Type]] = {}
+
+ @classmethod
+ def register(cls, name: str) -> int:
+ """Register a new entity type and return its bit value."""
+ if name in cls._dynamic_types:
+ return cls._dynamic_types[name]
+
+ if hasattr(EntityType, name):
+ return getattr(EntityType, name)
+
+ bit_value = 1 << cls._next_bit
+ cls._dynamic_types[name] = bit_value
+ cls._next_bit += 1
+ return bit_value
+
+ @classmethod
+ def register_output_types(cls, entity_type: int, output_types: Set[Type]) -> None:
+ """Register output types for a specific entity type flag."""
+ cls._output_types[entity_type] = output_types
+
+ @classmethod
+ def get_value(cls, name: str) -> int:
+ """Get the bit value for a registered type."""
+ return cls._dynamic_types.get(name)
+
+ @classmethod
+ def is_registered(cls, name: str) -> bool:
+ """Check if a type is registered."""
+ return name in cls._dynamic_types
+
+ @classmethod
+ def get_all_types(cls):
+ """Get all registered types including both static and dynamic."""
+ static_types = {name: value for name, value in EntityType.__members__.items()}
+ return {**static_types, **cls._dynamic_types}
+
+ @classmethod
+ def get_output_types(cls, entity_types: int) -> Generator[Type, None, None]:
+ """Get all output types for given entity types, removing duplicates."""
+ seen_types = set()
+ # Check static output types first
+ for bit_value, types in StaticOutputTypes._output_types.items():
+ if entity_types & bit_value:
+ for type_class in types:
+ if type_class not in seen_types:
+ seen_types.add(type_class)
+ yield type_class
+
+ # Then check dynamic output types
+ for bit_value, types in cls._output_types.items():
+ if entity_types & bit_value:
+ for type_class in types:
+ if type_class not in seen_types:
+ seen_types.add(type_class)
+ yield type_class
+
+
+class StaticOutputTypes:
+ """Manages output types for static EntityType members."""
+
+ _output_types: Dict[int, Set[Type]] = {}
+
+ @classmethod
+ def register_output_types(cls, entity_type: int, output_types: Set[Type]) -> None:
+ """Register output types for a static entity type."""
+ cls._output_types[entity_type] = output_types
+
+
+class EntityType(IntFlag):
+ """
+ Entity types using bit flags with both static and dynamic types.
+ Static types are defined here, dynamic types are managed by DynamicEntityTypeRegistry.
+ """
+
+ # Core package
+ EXPLORER_BASE = 1 << 0
+ EXPLORER_TOKEN = 1 << 1
+ EXPLORER_TRACE = 1 << 2
+
+ # Composite type
+ EXPLORER = EXPLORER_BASE | EXPLORER_TOKEN | EXPLORER_TRACE
+
+ @staticmethod
+ def combine_all_entity_types():
+ """Combine all entity types using bitwise OR."""
+ all_values = list(EntityType.__members__.values())
+ all_values.extend(DynamicEntityTypeRegistry._dynamic_types.values())
+ return reduce(lambda x, y: x | y, all_values)
+
+ @staticmethod
+ def entity_filter_mode(entity_types):
+ """Check if entity types match bridge mode exactly."""
+ if entity_types ^ EntityType.BRIDGE == 0:
+ return True
+ return False
+
+
+DEFAULT_COLLECTION = []
+
+
+def register_all_output_types():
+ """Register output types for all entity types (both static and dynamic)."""
+ # Register static output types
+ StaticOutputTypes.register_output_types(EntityType.EXPLORER_BASE, {Block, BlockTsMapper, Transaction, Log})
+
+ StaticOutputTypes.register_output_types(
+ EntityType.EXPLORER_TOKEN,
+ {
+ Token,
+ UpdateToken,
+ ERC20TokenTransfer,
+ ERC721TokenTransfer,
+ ERC1155TokenTransfer,
+ TokenBalance,
+ CurrentTokenBalance,
+ MarkBalanceToken,
+ UpdateERC1155TokenIdDetail,
+ ERC1155TokenIdDetail,
+ UpdateERC721TokenIdDetail,
+ ERC721TokenIdDetail,
+ ERC721TokenIdChange,
+ },
+ )
+
+ StaticOutputTypes.register_output_types(
+ EntityType.EXPLORER_TRACE,
+ {
+ Trace,
+ Contract,
+ ContractInternalTransaction,
+ UpdateBlockInternalCount,
+ # CoinBalance
+ },
+ )
+
+
+register_all_output_types()
+
+
+def calculate_entity_value(entity_types: str) -> int:
+ """Convert entity type strings to combined bit value."""
+ if entity_types is None or entity_types == "":
+ return 0
+
+ entities = EntityType(0)
+ for entity_type in [entity.strip().upper() for entity in entity_types.split(",")]:
+ if entity_type in EntityType.__members__:
+ entities |= EntityType[entity_type]
+ elif DynamicEntityTypeRegistry.is_registered(entity_type):
+ entities |= DynamicEntityTypeRegistry.get_value(entity_type)
+ else:
+ all_types = list(EntityType.__members__.keys())
+ all_types.extend(DynamicEntityTypeRegistry._dynamic_types.keys())
+ available_types = ",".join(all_types)
+ raise ValueError(
+ f"{entity_type} is not an available entity type. Supply a comma-separated list of types from {available_types}"
+ )
+ return entities
+
+
+def generate_output_types(entity_types: int) -> Generator[Type, None, None]:
+ """Generate output types for both static and dynamic entity types."""
+ yield from DynamicEntityTypeRegistry.get_output_types(entity_types)
diff --git a/enumeration/record_level.py b/hemera/common/enumeration/record_level.py
similarity index 100%
rename from enumeration/record_level.py
rename to hemera/common/enumeration/record_level.py
diff --git a/enumeration/schedule_mode.py b/hemera/common/enumeration/schedule_mode.py
similarity index 100%
rename from enumeration/schedule_mode.py
rename to hemera/common/enumeration/schedule_mode.py
diff --git a/enumeration/token_type.py b/hemera/common/enumeration/token_type.py
similarity index 100%
rename from enumeration/token_type.py
rename to hemera/common/enumeration/token_type.py
diff --git a/cli/logo.py b/hemera/common/logo.py
similarity index 100%
rename from cli/logo.py
rename to hemera/common/logo.py
diff --git a/common/models/__init__.py b/hemera/common/models/__init__.py
similarity index 54%
rename from common/models/__init__.py
rename to hemera/common/models/__init__.py
index 6e28d3161..c9a6354ac 100644
--- a/common/models/__init__.py
+++ b/hemera/common/models/__init__.py
@@ -1,5 +1,6 @@
+from dataclasses import fields
from datetime import datetime, timezone
-from typing import Type
+from typing import Any, Dict, Type
from flask_sqlalchemy import SQLAlchemy
from psycopg2._json import Json
@@ -7,24 +8,45 @@
from sqlalchemy import Numeric as SQL_Numeric
from sqlalchemy.dialects.postgresql import ARRAY, BYTEA, JSON, JSONB, NUMERIC, TIMESTAMP
-from common.utils.format_utils import hex_str_to_bytes
-from common.utils.module_loading import import_string, scan_subclass_by_path_patterns
-from indexer.domain import Domain
-
-model_path_patterns = [
- "common/models",
- "indexer/modules/*/models",
- "indexer/modules/custom/*/models",
- "indexer/aggr_jobs/*/models",
-]
+from hemera.common.utils.format_utils import hex_str_to_bytes
+from hemera.common.utils.module_loading import import_string, import_submodules
+from hemera.indexer.domains import Domain
model_path_exclude = []
# db = RouteSQLAlchemy(session_options={"autoflush": False})
db = SQLAlchemy(session_options={"autoflush": False})
+from sqlalchemy import BigInteger, Boolean, DateTime, Integer, LargeBinary, Numeric
+
+
+class HemeraMeta(type(db.Model)):
+ _registry = {}
+
+ def __new__(mcs, name, bases, attrs):
+ new_cls = super().__new__(mcs, name, bases, attrs)
+
+ if name != "HemeraModel" and issubclass(new_cls, HemeraModel):
+ mcs._registry[name] = new_cls
+
+ return new_cls
+
+ @classmethod
+ def get_all_subclasses(mcs):
+ import_submodules("hemera.common.models")
+
+ def get_subclasses(cls):
+ subclasses = set()
+ for subclass in cls.__subclasses__():
+ subclasses.add(subclass)
+ subclasses.update(get_subclasses(subclass))
+ return subclasses
+
+ all_subclasses = get_subclasses(HemeraModel)
+ return {subclass: subclass for subclass in all_subclasses}
-class HemeraModel(db.Model):
+
+class HemeraModel(db.Model, metaclass=HemeraMeta):
__abstract__ = True
__query_order__ = []
@@ -37,6 +59,26 @@ def model_domain_mapping():
def schema(self):
return "public"
+ @classmethod
+ def get_all_annotation_keys(cls):
+ keys = set()
+ for clz in cls.__mro__:
+ if "__annotations__" in clz.__dict__:
+ keys.update(clz.__annotations__.keys())
+
+ return keys
+
+ @classmethod
+ def get_all_hemera_model_dict(cls):
+ return HemeraMeta.get_all_subclasses()
+
+ def dict_to_entity(self, data_dict: Dict[str, Any]):
+ valid_keys = {field.name for field in fields(self.__class__)}
+ filtered_data = {k: v for k, v in data_dict.items() if k in valid_keys}
+
+ for key, value in filtered_data.items():
+ setattr(self, key, value)
+
def get_column_type(table: Type[HemeraModel], column_name):
return table.__table__.c[column_name].type
@@ -47,14 +89,16 @@ def general_converter(table: Type[HemeraModel], data: Domain, is_update=False):
for key in data.__dict__.keys():
if key in table.__table__.c:
column_type = get_column_type(table, key)
- if isinstance(column_type, BYTEA) and not isinstance(getattr(data, key), bytes):
+ if (isinstance(column_type, BYTEA) or isinstance(column_type, LargeBinary)) and not isinstance(
+ getattr(data, key), bytes
+ ):
if isinstance(getattr(data, key), str):
converted_data[key] = hex_str_to_bytes(getattr(data, key)) if getattr(data, key) else None
elif isinstance(getattr(data, key), int):
converted_data[key] = getattr(data, key).to_bytes(32, byteorder="big")
else:
converted_data[key] = None
- elif isinstance(column_type, TIMESTAMP):
+ elif isinstance(column_type, TIMESTAMP) or isinstance(column_type, DateTime):
converted_data[key] = datetime.utcfromtimestamp(getattr(data, key))
elif isinstance(column_type, ARRAY) and isinstance(column_type.item_type, BYTEA):
converted_data[key] = [hex_str_to_bytes(address) for address in getattr(data, key)]
@@ -64,6 +108,7 @@ def general_converter(table: Type[HemeraModel], data: Domain, is_update=False):
isinstance(column_type, NUMERIC)
or isinstance(column_type, SQL_NUMERIC)
or isinstance(column_type, SQL_Numeric)
+ or isinstance(column_type, Numeric)
) and isinstance(getattr(data, key), str):
converted_data[key] = None
else:
@@ -79,9 +124,10 @@ def general_converter(table: Type[HemeraModel], data: Domain, is_update=False):
def import_all_models():
- for name in __models_imports:
+ hemera_model_subclass = HemeraModel.get_all_subclasses()
+ for name in hemera_model_subclass.keys():
if name != "ImportError":
- path = __models_imports.get(name)
+ path = hemera_model_subclass.get(name)
if not path:
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
@@ -90,11 +136,3 @@ def import_all_models():
# Store for next time
globals()[name] = val
return val
-
-
-__models_imports = {
- k: v["module_import_path"]
- for k, v in scan_subclass_by_path_patterns(
- model_path_patterns, HemeraModel, exclude_path=model_path_exclude
- ).items()
-}
diff --git a/common/models/block_timestamp_mapper.py b/hemera/common/models/block_timestamp_mapper.py
similarity index 83%
rename from common/models/block_timestamp_mapper.py
rename to hemera/common/models/block_timestamp_mapper.py
index 9f0f56873..1974eaf58 100644
--- a/common/models/block_timestamp_mapper.py
+++ b/hemera/common/models/block_timestamp_mapper.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, Index, desc
from sqlalchemy.dialects.postgresql import BIGINT, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.domain.block_ts_mapper import BlockTsMapper
+from hemera.common.models import HemeraModel, general_converter
+from hemera.indexer.domains.block_ts_mapper import BlockTsMapper
class BlockTimestampMapper(HemeraModel):
diff --git a/common/models/blocks.py b/hemera/common/models/blocks.py
similarity index 94%
rename from common/models/blocks.py
rename to hemera/common/models/blocks.py
index f5be00104..1fa753233 100644
--- a/common/models/blocks.py
+++ b/hemera/common/models/blocks.py
@@ -3,8 +3,8 @@
from sqlalchemy import Column, Index, desc, func, text
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, NUMERIC, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.domain.block import Block, UpdateBlockInternalCount
+from hemera.common.models import HemeraModel, general_converter
+from hemera.indexer.domains.block import Block, UpdateBlockInternalCount
class Blocks(HemeraModel):
diff --git a/common/models/coin_balances.py b/hemera/common/models/coin_balances.py
similarity index 89%
rename from common/models/coin_balances.py
rename to hemera/common/models/coin_balances.py
index 32eaa8d8b..80fba580a 100644
--- a/common/models/coin_balances.py
+++ b/hemera/common/models/coin_balances.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, Index, PrimaryKeyConstraint, desc, func, text
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, NUMERIC, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.domain.coin_balance import CoinBalance
+from hemera.common.models import HemeraModel, general_converter
+from hemera.indexer.domains.coin_balance import CoinBalance
class CoinBalances(HemeraModel):
diff --git a/common/models/contract_internal_transactions.py b/hemera/common/models/contract_internal_transactions.py
similarity index 93%
rename from common/models/contract_internal_transactions.py
rename to hemera/common/models/contract_internal_transactions.py
index d7026b10d..8199c38ed 100644
--- a/common/models/contract_internal_transactions.py
+++ b/hemera/common/models/contract_internal_transactions.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, Index, desc, func, text
from sqlalchemy.dialects.postgresql import ARRAY, BIGINT, BOOLEAN, BYTEA, INTEGER, NUMERIC, TEXT, TIMESTAMP, VARCHAR
-from common.models import HemeraModel, general_converter
-from indexer.domain.contract_internal_transaction import ContractInternalTransaction
+from hemera.common.models import HemeraModel, general_converter
+from hemera.indexer.domains.contract_internal_transaction import ContractInternalTransaction
class ContractInternalTransactions(HemeraModel):
diff --git a/common/models/contracts.py b/hemera/common/models/contracts.py
similarity index 80%
rename from common/models/contracts.py
rename to hemera/common/models/contracts.py
index 674ec30c5..622119756 100644
--- a/common/models/contracts.py
+++ b/hemera/common/models/contracts.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, Computed, func, text
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, INTEGER, JSONB, TIMESTAMP, VARCHAR
-from common.models import HemeraModel, general_converter
-from indexer.domain.contract import Contract
+from hemera.common.models import HemeraModel, general_converter
+from hemera.indexer.domains.contract import Contract, ContractFromTransaction
class Contracts(HemeraModel):
@@ -48,5 +48,11 @@ def model_domain_mapping():
"conflict_do_update": False,
"update_strategy": None,
"converter": general_converter,
- }
+ },
+ {
+ "domain": ContractFromTransaction,
+ "conflict_do_update": False,
+ "update_strategy": None,
+ "converter": general_converter,
+ },
]
diff --git a/common/models/current_token_balances.py b/hemera/common/models/current_token_balances.py
similarity index 88%
rename from common/models/current_token_balances.py
rename to hemera/common/models/current_token_balances.py
index c1c5a8b25..61ddc5b27 100644
--- a/common/models/current_token_balances.py
+++ b/hemera/common/models/current_token_balances.py
@@ -1,9 +1,9 @@
from sqlalchemy import Column, Index, PrimaryKeyConstraint, desc, func, text
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, NUMERIC, TIMESTAMP, VARCHAR
-from common.models import HemeraModel
-from common.models.token_balances import token_balances_general_converter
-from indexer.domain.current_token_balance import CurrentTokenBalance
+from hemera.common.models import HemeraModel
+from hemera.common.models.token_balances import token_balances_general_converter
+from hemera.indexer.domains.current_token_balance import CurrentTokenBalance
class CurrentTokenBalances(HemeraModel):
diff --git a/common/models/erc1155_token_id_details.py b/hemera/common/models/erc1155_token_id_details.py
similarity index 86%
rename from common/models/erc1155_token_id_details.py
rename to hemera/common/models/erc1155_token_id_details.py
index f6ed9ff8f..978b486f8 100644
--- a/common/models/erc1155_token_id_details.py
+++ b/hemera/common/models/erc1155_token_id_details.py
@@ -1,9 +1,9 @@
from sqlalchemy import Column, Index, PrimaryKeyConstraint, desc, func, text
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, JSONB, NUMERIC, TIMESTAMP, VARCHAR
-from common.models import HemeraModel, general_converter
-from common.models.erc721_token_id_details import token_uri_format_converter
-from indexer.domain.token_id_infos import ERC1155TokenIdDetail, UpdateERC1155TokenIdDetail
+from hemera.common.models import HemeraModel, general_converter
+from hemera.common.models.erc721_token_id_details import token_uri_format_converter
+from hemera.indexer.domains.token_id_infos import ERC1155TokenIdDetail, UpdateERC1155TokenIdDetail
class ERC1155TokenIdDetails(HemeraModel):
diff --git a/common/models/erc1155_token_transfers.py b/hemera/common/models/erc1155_token_transfers.py
similarity index 94%
rename from common/models/erc1155_token_transfers.py
rename to hemera/common/models/erc1155_token_transfers.py
index 741d9345c..f725ed51d 100644
--- a/common/models/erc1155_token_transfers.py
+++ b/hemera/common/models/erc1155_token_transfers.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, Index, PrimaryKeyConstraint, desc, func, text
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, INTEGER, NUMERIC, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.domain.token_transfer import ERC1155TokenTransfer
+from hemera.common.models import HemeraModel, general_converter
+from hemera.indexer.domains.token_transfer import ERC1155TokenTransfer
class ERC1155TokenTransfers(HemeraModel):
diff --git a/common/models/erc20_token_transfers.py b/hemera/common/models/erc20_token_transfers.py
similarity index 94%
rename from common/models/erc20_token_transfers.py
rename to hemera/common/models/erc20_token_transfers.py
index 946e7fc1e..b2dcaf9ef 100644
--- a/common/models/erc20_token_transfers.py
+++ b/hemera/common/models/erc20_token_transfers.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, Index, PrimaryKeyConstraint, desc, func, text
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, INTEGER, NUMERIC, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.domain.token_transfer import ERC20TokenTransfer
+from hemera.common.models import HemeraModel, general_converter
+from hemera.indexer.domains.token_transfer import ERC20TokenTransfer
class ERC20TokenTransfers(HemeraModel):
diff --git a/common/models/erc721_token_id_changes.py b/hemera/common/models/erc721_token_id_changes.py
similarity index 90%
rename from common/models/erc721_token_id_changes.py
rename to hemera/common/models/erc721_token_id_changes.py
index 10145abcc..f8dc0ab92 100644
--- a/common/models/erc721_token_id_changes.py
+++ b/hemera/common/models/erc721_token_id_changes.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, Index, PrimaryKeyConstraint, desc, func, text
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, NUMERIC, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.domain.token_id_infos import ERC721TokenIdChange
+from hemera.common.models import HemeraModel, general_converter
+from hemera.indexer.domains.token_id_infos import ERC721TokenIdChange
class ERC721TokenIdChanges(HemeraModel):
diff --git a/common/models/erc721_token_id_details.py b/hemera/common/models/erc721_token_id_details.py
similarity index 91%
rename from common/models/erc721_token_id_details.py
rename to hemera/common/models/erc721_token_id_details.py
index 7197fc0d8..d3e44a16f 100644
--- a/common/models/erc721_token_id_details.py
+++ b/hemera/common/models/erc721_token_id_details.py
@@ -4,8 +4,8 @@
from sqlalchemy import Column, Index, PrimaryKeyConstraint, desc, func, text
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, JSONB, NUMERIC, TIMESTAMP, VARCHAR
-from common.models import HemeraModel, general_converter
-from indexer.domain.token_id_infos import ERC721TokenIdDetail, UpdateERC721TokenIdDetail
+from hemera.common.models import HemeraModel, general_converter
+from hemera.indexer.domains.token_id_infos import ERC721TokenIdDetail, UpdateERC721TokenIdDetail
def token_uri_format_converter(table: Type[HemeraModel], data, is_update=False):
diff --git a/common/models/erc721_token_transfers.py b/hemera/common/models/erc721_token_transfers.py
similarity index 94%
rename from common/models/erc721_token_transfers.py
rename to hemera/common/models/erc721_token_transfers.py
index fbb471c4c..6b810ff24 100644
--- a/common/models/erc721_token_transfers.py
+++ b/hemera/common/models/erc721_token_transfers.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, Index, PrimaryKeyConstraint, desc, func, text
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, INTEGER, NUMERIC, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.domain.token_transfer import ERC721TokenTransfer
+from hemera.common.models import HemeraModel, general_converter
+from hemera.indexer.domains.token_transfer import ERC721TokenTransfer
class ERC721TokenTransfers(HemeraModel):
diff --git a/common/models/exception_records.py b/hemera/common/models/exception_records.py
similarity index 92%
rename from common/models/exception_records.py
rename to hemera/common/models/exception_records.py
index 9a32268b0..8ac2b2a8e 100644
--- a/common/models/exception_records.py
+++ b/hemera/common/models/exception_records.py
@@ -3,7 +3,7 @@
from sqlalchemy import Column
from sqlalchemy.dialects.postgresql import BIGINT, JSONB, TIMESTAMP, VARCHAR
-from common.models import HemeraModel
+from hemera.common.models import HemeraModel
class ExceptionRecords(HemeraModel):
diff --git a/hemera/common/models/failure_records.py b/hemera/common/models/failure_records.py
new file mode 100644
index 000000000..4c64497f2
--- /dev/null
+++ b/hemera/common/models/failure_records.py
@@ -0,0 +1,16 @@
+from sqlalchemy import Column
+from sqlalchemy.dialects.postgresql import BIGINT, JSON, TIMESTAMP, VARCHAR
+
+from hemera.common.models import HemeraModel
+
+
+class FailureRecords(HemeraModel):
+ __tablename__ = "failure_records"
+ record_id = Column(BIGINT, primary_key=True, autoincrement=True)
+ mission_sign = Column(VARCHAR)
+ output_types = Column(VARCHAR)
+ start_block_number = Column(BIGINT)
+ end_block_number = Column(BIGINT)
+ exception_stage = Column(VARCHAR)
+ exception = Column(JSON)
+ crash_time = Column(TIMESTAMP)
diff --git a/common/models/fix_record.py b/hemera/common/models/fix_record.py
similarity index 91%
rename from common/models/fix_record.py
rename to hemera/common/models/fix_record.py
index 33f704bbd..585dabfd2 100644
--- a/common/models/fix_record.py
+++ b/hemera/common/models/fix_record.py
@@ -1,7 +1,7 @@
from sqlalchemy import Column, func
from sqlalchemy.dialects.postgresql import BIGINT, INTEGER, TIMESTAMP, VARCHAR
-from common.models import HemeraModel
+from hemera.common.models import HemeraModel
class FixRecord(HemeraModel):
diff --git a/common/models/logs.py b/hemera/common/models/logs.py
similarity index 94%
rename from common/models/logs.py
rename to hemera/common/models/logs.py
index 94cad27ff..17b475de7 100644
--- a/common/models/logs.py
+++ b/hemera/common/models/logs.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, Index, PrimaryKeyConstraint, desc, func, text
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, INTEGER, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.domain.log import Log
+from hemera.common.models import HemeraModel, general_converter
+from hemera.indexer.domains.log import Log
class Logs(HemeraModel):
diff --git a/hemera/common/models/metrics_persistence.py b/hemera/common/models/metrics_persistence.py
new file mode 100644
index 000000000..2f591bf95
--- /dev/null
+++ b/hemera/common/models/metrics_persistence.py
@@ -0,0 +1,14 @@
+from sqlalchemy import Column, func
+from sqlalchemy.dialects.postgresql import JSON, TIMESTAMP, VARCHAR
+
+from hemera.common.models import HemeraModel
+
+
+class MetricsPersistence(HemeraModel):
+ __tablename__ = "metrics_persistence"
+
+ instance = Column(VARCHAR, primary_key=True)
+ metrics = Column(JSON)
+
+ create_time = Column(TIMESTAMP, server_default=func.now())
+ update_time = Column(TIMESTAMP, server_default=func.now())
diff --git a/hemera/common/models/nft_transfers.py b/hemera/common/models/nft_transfers.py
new file mode 100644
index 000000000..9b8f5bb5a
--- /dev/null
+++ b/hemera/common/models/nft_transfers.py
@@ -0,0 +1,101 @@
+from sqlalchemy import (
+ BigInteger,
+ Boolean,
+ Column,
+ DateTime,
+ Index,
+ Integer,
+ LargeBinary,
+ Numeric,
+ PrimaryKeyConstraint,
+ desc,
+ func,
+ text,
+)
+
+from hemera.common.models import HemeraModel, general_converter
+from hemera.indexer.domains.token_transfer import ERC721TokenTransfer, ERC1155TokenTransfer
+
+
+class NftTransfers(HemeraModel):
+ """
+ Model for tracking nft(ERC721/ERC1155) transfer events.
+ """
+
+ __tablename__ = "nft_transfers"
+
+ # Primary columns
+ transaction_hash = Column(LargeBinary, nullable=False)
+ block_hash = Column(LargeBinary, nullable=False)
+ log_index = Column(Integer, nullable=False)
+ token_id = Column(Numeric(100), nullable=False)
+
+ # Transfer info
+ from_address = Column(LargeBinary)
+ to_address = Column(LargeBinary)
+ token_address = Column(LargeBinary)
+ value = Column(Numeric(100), nullable=True)
+
+ # Block info
+ block_number = Column(BigInteger)
+ block_timestamp = Column(DateTime)
+
+ # Metadata columns
+ create_time = Column(DateTime, server_default=func.now(), nullable=False)
+ update_time = Column(DateTime, server_default=func.now(), onupdate=func.now(), nullable=False)
+ reorg = Column(Boolean, server_default=text("false"), nullable=False)
+
+ # Table constraints
+ __table_args__ = (
+ PrimaryKeyConstraint(
+ "transaction_hash",
+ "block_timestamp",
+ "block_number",
+ "log_index",
+ "block_hash",
+ "token_id",
+ name="pk_nft_transfers",
+ ),
+ # Block-based indices
+ Index("idx_nft_transfers_block_log", desc(block_timestamp), desc(block_number), desc(log_index)),
+ # Address-based indices with time
+ Index(
+ "idx_nft_transfers_token_time",
+ token_address,
+ desc(block_timestamp),
+ desc(block_number),
+ desc(log_index),
+ ),
+ # Token-specific indices
+ Index(
+ "idx_nft_transfers_token_id",
+ token_address,
+ token_id,
+ desc(block_timestamp),
+ desc(block_number),
+ desc(log_index),
+ ),
+ )
+
+ # Query order specification
+ __query_order__ = [block_timestamp, block_number, log_index]
+
+ @staticmethod
+ def model_domain_mapping():
+ """
+ Define the domain model mapping configuration.
+ """
+ return [
+ {
+ "domain": ERC1155TokenTransfer,
+ "conflict_do_update": True,
+ "update_strategy": None,
+ "converter": general_converter,
+ },
+ {
+ "domain": ERC721TokenTransfer,
+ "conflict_do_update": True,
+ "update_strategy": None,
+ "converter": general_converter,
+ },
+ ]
diff --git a/common/models/period_wallet_addresses_aggregates.py b/hemera/common/models/period_wallet_addresses_aggregates.py
similarity index 98%
rename from common/models/period_wallet_addresses_aggregates.py
rename to hemera/common/models/period_wallet_addresses_aggregates.py
index 504b3c9f2..9a5d8eb66 100644
--- a/common/models/period_wallet_addresses_aggregates.py
+++ b/hemera/common/models/period_wallet_addresses_aggregates.py
@@ -1,7 +1,7 @@
from sqlalchemy import DATE, Column, Computed
from sqlalchemy.dialects.postgresql import BYTEA, INTEGER, NUMERIC
-from common.models import HemeraModel
+from hemera.common.models import HemeraModel
class PeriodWalletAddressesAggregates(HemeraModel):
diff --git a/common/models/scheduled_metadata.py b/hemera/common/models/scheduled_metadata.py
similarity index 89%
rename from common/models/scheduled_metadata.py
rename to hemera/common/models/scheduled_metadata.py
index 174c229c0..719b5277f 100644
--- a/common/models/scheduled_metadata.py
+++ b/hemera/common/models/scheduled_metadata.py
@@ -1,7 +1,7 @@
from sqlalchemy import Column, DateTime
from sqlalchemy.dialects.postgresql import INTEGER, VARCHAR
-from common.models import HemeraModel
+from hemera.common.models import HemeraModel
class ScheduledMetadata(HemeraModel):
diff --git a/common/models/sync_record.py b/hemera/common/models/sync_record.py
similarity index 86%
rename from common/models/sync_record.py
rename to hemera/common/models/sync_record.py
index 57301aa1d..24b0bbcca 100644
--- a/common/models/sync_record.py
+++ b/hemera/common/models/sync_record.py
@@ -1,7 +1,7 @@
from sqlalchemy import Column
from sqlalchemy.dialects.postgresql import BIGINT, TIMESTAMP, VARCHAR
-from common.models import HemeraModel
+from hemera.common.models import HemeraModel
class SyncRecord(HemeraModel):
diff --git a/common/models/token_balances.py b/hemera/common/models/token_balances.py
similarity index 86%
rename from common/models/token_balances.py
rename to hemera/common/models/token_balances.py
index ff1b8e80b..4aecc294f 100644
--- a/common/models/token_balances.py
+++ b/hemera/common/models/token_balances.py
@@ -3,8 +3,8 @@
from sqlalchemy import Column, Index, PrimaryKeyConstraint, desc, func, text
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, NUMERIC, TIMESTAMP, VARCHAR
-from common.models import HemeraModel, general_converter
-from indexer.domain.token_balance import TokenBalance
+from hemera.common.models import HemeraModel, general_converter
+from hemera.indexer.domains.token_balance import TokenBalance
def token_balances_general_converter(table: Type[HemeraModel], data: TokenBalance, is_update=False):
@@ -25,13 +25,13 @@ class AddressTokenBalances(HemeraModel):
balance = Column(NUMERIC(100))
block_number = Column(BIGINT, primary_key=True)
- block_timestamp = Column(TIMESTAMP)
+ block_timestamp = Column(TIMESTAMP, primary_key=True)
create_time = Column(TIMESTAMP, server_default=func.now())
update_time = Column(TIMESTAMP, server_default=func.now())
reorg = Column(BOOLEAN, server_default=text("false"))
- __table_args__ = (PrimaryKeyConstraint("address", "token_address", "token_id", "block_number"),)
+ __table_args__ = (PrimaryKeyConstraint("address", "token_address", "token_id", "block_number", "block_timestamp"),)
@staticmethod
def model_domain_mapping():
diff --git a/common/models/token_hourly_price.py b/hemera/common/models/token_hourly_price.py
similarity index 88%
rename from common/models/token_hourly_price.py
rename to hemera/common/models/token_hourly_price.py
index ea5bc511d..115bd02e9 100644
--- a/common/models/token_hourly_price.py
+++ b/hemera/common/models/token_hourly_price.py
@@ -1,6 +1,6 @@
from sqlalchemy import Column, DateTime, Numeric, String
-from common.models import HemeraModel
+from hemera.common.models import HemeraModel
class TokenHourlyPrices(HemeraModel):
diff --git a/common/models/token_prices.py b/hemera/common/models/token_prices.py
similarity index 82%
rename from common/models/token_prices.py
rename to hemera/common/models/token_prices.py
index 2b3fe38eb..d4c7acfbf 100644
--- a/common/models/token_prices.py
+++ b/hemera/common/models/token_prices.py
@@ -1,6 +1,6 @@
from sqlalchemy import Column, DateTime, Numeric, String
-from common.models import HemeraModel
+from hemera.common.models import HemeraModel
class TokenPrices(HemeraModel):
diff --git a/common/models/tokens.py b/hemera/common/models/tokens.py
similarity index 70%
rename from common/models/tokens.py
rename to hemera/common/models/tokens.py
index 3ad01dab4..3ad019d3e 100644
--- a/common/models/tokens.py
+++ b/hemera/common/models/tokens.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, Index, PrimaryKeyConstraint, desc, func
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, INTEGER, JSONB, NUMERIC, TIMESTAMP, VARCHAR
-from common.models import HemeraModel, general_converter
-from indexer.domain.token import Token, UpdateToken
+from hemera.common.models import HemeraModel, general_converter
+from hemera.indexer.domains.token import MarkBalanceToken, MarkTotalSupplyToken, Token, UpdateToken
class Tokens(HemeraModel):
@@ -32,6 +32,12 @@ class Tokens(HemeraModel):
gecko_id = Column(VARCHAR)
description = Column(VARCHAR)
+ no_balance_of = Column(BOOLEAN, default=False)
+ fail_balance_of_count = Column(INTEGER, default=0)
+ succeed_balance_of_count = Column(INTEGER, default=0)
+ no_total_supply = Column(BOOLEAN, default=False)
+ fail_total_supply_count = Column(BOOLEAN, default=0)
+
create_time = Column(TIMESTAMP, server_default=func.now())
update_time = Column(TIMESTAMP, server_default=func.now())
@@ -52,6 +58,20 @@ def model_domain_mapping():
"update_strategy": "EXCLUDED.block_number > tokens.block_number",
"converter": general_converter,
},
+ {
+ "domain": MarkTotalSupplyToken,
+ "conflict_do_update": True,
+ "update_strategy": None,
+ # "update_strategy": "EXCLUDED.block_number >= tokens.block_number",
+ "converter": general_converter,
+ },
+ {
+ "domain": MarkBalanceToken,
+ "conflict_do_update": True,
+ "update_strategy": None,
+ # "update_strategy": "EXCLUDED.block_number >= tokens.block_number",
+ "converter": general_converter,
+ },
]
diff --git a/common/models/traces.py b/hemera/common/models/traces.py
similarity index 93%
rename from common/models/traces.py
rename to hemera/common/models/traces.py
index fd10a86e3..90c268912 100644
--- a/common/models/traces.py
+++ b/hemera/common/models/traces.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, Index, desc, func, text
from sqlalchemy.dialects.postgresql import ARRAY, BIGINT, BOOLEAN, BYTEA, INTEGER, NUMERIC, TEXT, TIMESTAMP, VARCHAR
-from common.models import HemeraModel, general_converter
-from indexer.domain.trace import Trace
+from hemera.common.models import HemeraModel, general_converter
+from hemera.indexer.domains.trace import Trace
class Traces(HemeraModel):
diff --git a/common/models/transactions.py b/hemera/common/models/transactions.py
similarity index 95%
rename from common/models/transactions.py
rename to hemera/common/models/transactions.py
index 3d14af846..b78a788cd 100644
--- a/common/models/transactions.py
+++ b/hemera/common/models/transactions.py
@@ -3,9 +3,9 @@
from sqlalchemy import Column, Computed, Index, asc, desc, func, text
from sqlalchemy.dialects.postgresql import ARRAY, BIGINT, BOOLEAN, BYTEA, INTEGER, NUMERIC, TEXT, TIMESTAMP, VARCHAR
-from common.models import HemeraModel, general_converter
-from common.utils.format_utils import hex_str_to_bytes
-from indexer.domain.transaction import Transaction
+from hemera.common.models import HemeraModel, general_converter
+from hemera.common.utils.format_utils import hex_str_to_bytes
+from hemera.indexer.domains.transaction import Transaction
class Transactions(HemeraModel):
diff --git a/indexer/__init__.py b/hemera/common/services/__init__.py
similarity index 100%
rename from indexer/__init__.py
rename to hemera/common/services/__init__.py
diff --git a/common/services/hemera_postgresql_service.py b/hemera/common/services/hemera_postgresql_service.py
similarity index 100%
rename from common/services/hemera_postgresql_service.py
rename to hemera/common/services/hemera_postgresql_service.py
diff --git a/common/services/postgresql_service.py b/hemera/common/services/postgresql_service.py
similarity index 99%
rename from common/services/postgresql_service.py
rename to hemera/common/services/postgresql_service.py
index c6b7a9d51..0b402ac33 100644
--- a/common/services/postgresql_service.py
+++ b/hemera/common/services/postgresql_service.py
@@ -45,7 +45,7 @@ def __init__(
pool_recycle: int = 1800, # 30 minutes
application_name: str = "postgresql_service",
db_version: str = "head",
- script_location: str = "migrations",
+ script_location: str = "hemera/migrations",
init_schema: bool = False,
):
"""
diff --git a/common/services/sqlalchemy_session.py b/hemera/common/services/sqlalchemy_session.py
similarity index 100%
rename from common/services/sqlalchemy_session.py
rename to hemera/common/services/sqlalchemy_session.py
diff --git a/indexer/aggr_jobs/__init__.py b/hemera/common/utils/__init__.py
similarity index 100%
rename from indexer/aggr_jobs/__init__.py
rename to hemera/common/utils/__init__.py
diff --git a/common/utils/abi_code_utils.py b/hemera/common/utils/abi_code_utils.py
similarity index 94%
rename from common/utils/abi_code_utils.py
rename to hemera/common/utils/abi_code_utils.py
index a1e9166db..db697dc4a 100644
--- a/common/utils/abi_code_utils.py
+++ b/hemera/common/utils/abi_code_utils.py
@@ -6,13 +6,13 @@
Project : hemera_indexer
"""
import logging
-from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union, cast
+from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
import eth_abi
from ens.utils import get_abi_output_types
from eth_abi import abi
from eth_abi.codec import ABICodec
-from eth_typing import HexStr, TypeStr
+from eth_typing import TypeStr
from eth_utils import encode_hex, to_hex
from hexbytes import HexBytes
from web3._utils.abi import (
@@ -25,17 +25,20 @@
from web3._utils.normalizers import BASE_RETURN_NORMALIZERS
from web3.types import ABIEvent, ABIFunction
-from common.utils.exception_control import FastShutdownError
-from common.utils.format_utils import bytes_to_hex_str, convert_bytes_to_hex, convert_dict, hex_str_to_bytes
-from indexer.utils.abi import (
+from hemera.common.utils.exception_control import FastShutdownError
+from hemera.common.utils.format_utils import bytes_to_hex_str, convert_bytes_to_hex, convert_dict, hex_str_to_bytes
+from hemera.indexer.domains.log import Log
+from hemera.indexer.utils.abi import (
abi_address_to_hex,
abi_bytes_to_bytes,
abi_string_to_text,
codec,
+ encode_bool,
event_log_abi_to_topic,
function_abi_to_4byte_selector_str,
get_types_from_abi_type_list,
pad_address,
+ tuple_encode,
uint256_to_bytes,
)
@@ -116,8 +119,6 @@ def decode_log_ignore_indexed(
:return: A dictionary containing the decoded log data, or raise exception if decoding fails.
:rtype: Optional[Dict[str, Any]]
"""
- from indexer.domain.log import Log
-
if not isinstance(log, Log):
raise ValueError(f"log: {log} is not a Log instance")
@@ -143,7 +144,6 @@ def decode_log(
:return: A dictionary containing the decoded log data, or None if decoding fails.
:rtype: Optional[Dict[str, Any]]
"""
- from indexer.domain.log import Log
if not isinstance(log, Log):
raise ValueError(f"log: {log} is not a Log instance")
@@ -309,11 +309,29 @@ def encode_function_call_data(self, arguments: Sequence[Any]) -> str:
encoded += pad_address(arg)
elif arg_type == "uint256":
encoded += uint256_to_bytes(arg)
+ elif arg_type == "bool":
+ encoded += encode_bool(arg)
else:
# cannot handle, call encode directly
return encode_data(self._function_abi, arguments, self.get_signature())
return bytes_to_hex_str(encoded)
+ def encode_multicall_data(self, arguments: Sequence[Any]) -> str:
+ """For use with multicall.
+ This implementation should be 5x faster than eth_abi.encode, which is slow in this scenario.
+ """
+ if arguments is None:
+ arguments = []
+
+ encoded = hex_str_to_bytes(self._signature)
+ if len(arguments) == 1:
+ encoded += tuple_encode(arguments, ["(address,bytes)[]"])
+ elif len(arguments) == 2:
+ encoded += tuple_encode(arguments, ["bool", "(address,bytes)[]"])
+ else:
+ return encode_data(self._function_abi, arguments, self.get_signature())
+ return bytes_to_hex_str(encoded)
+
class FunctionCollection:
def __init__(self, functions: List[Function]):
diff --git a/common/utils/bridge_utils.py b/hemera/common/utils/bridge_utils.py
similarity index 99%
rename from common/utils/bridge_utils.py
rename to hemera/common/utils/bridge_utils.py
index 86a1786b7..05a518be5 100644
--- a/common/utils/bridge_utils.py
+++ b/hemera/common/utils/bridge_utils.py
@@ -1,5 +1,5 @@
-from api.app.config import AppConfig, TokenConfiguration
-from common.utils.exception_control import ErrorRollupError
+from hemera.api.app.config import AppConfig, TokenConfiguration
+from hemera.common.utils.exception_control import ErrorRollupError
class BridgeTransactionParser:
diff --git a/common/utils/cache_utils.py b/hemera/common/utils/cache_utils.py
similarity index 100%
rename from common/utils/cache_utils.py
rename to hemera/common/utils/cache_utils.py
diff --git a/common/utils/config.py b/hemera/common/utils/config.py
similarity index 79%
rename from common/utils/config.py
rename to hemera/common/utils/config.py
index bdf935c48..bfdd1bbe7 100644
--- a/common/utils/config.py
+++ b/hemera/common/utils/config.py
@@ -2,7 +2,7 @@
import os
from configparser import ConfigParser
-from api.app.config import AppConfig
+from hemera.api.app.config import AppConfig
_config_instance = None
_is_initialized = False
@@ -63,3 +63,14 @@ def read_config_value(config_file, section, key):
value = None
return value
+
+
+def check_and_set_default_env(key: str, default_value: str):
+ env_value = os.environ.get(key)
+ if env_value is None:
+ os.environ[key] = default_value
+ else:
+ logging.warning(
+ f"The environment variable: {key} has been set to `{env_value}`. "
+ f"Please confirm that {key} assignment meets your expectations."
+ )
diff --git a/hemera/common/utils/db_utils.py b/hemera/common/utils/db_utils.py
new file mode 100644
index 000000000..00e7c1dbb
--- /dev/null
+++ b/hemera/common/utils/db_utils.py
@@ -0,0 +1,219 @@
+from datetime import datetime, timezone
+from decimal import Decimal
+from typing import List, Type
+
+from sqlalchemy import text
+
+from hemera.common.models import HemeraModel, db
+from hemera.common.models.blocks import Blocks
+from hemera.common.services.postgresql_service import PostgreSQLService
+from hemera.common.utils.config import get_config
+from hemera.common.utils.format_utils import bytes_to_hex_str
+from hemera.indexer.domains import Domain, dict_to_dataclass
+from hemera.indexer.domains.block import Block
+from hemera.indexer.domains.log import Log
+from hemera.indexer.domains.receipt import Receipt
+from hemera.indexer.domains.transaction import Transaction
+
+app_config = get_config()
+
+
+def build_entities(model, columns):
+ if columns == "*":
+ entities = [attr for attr in model.__table__.columns]
+ else:
+ entities = []
+ for column in columns:
+ if isinstance(column, tuple):
+ col, alias = column
+ entities.append(getattr(model, col).label(alias))
+ else:
+ entities.append(getattr(model, column))
+
+ return entities
+
+
+def get_total_row_count(table):
+ estimate_transaction = db.session.execute(
+ text(
+ f"""
+ SELECT reltuples::bigint AS estimate FROM pg_class where oid = '{app_config.db_read_sql_alchemy_database_config.schema}.{table}'::regclass;
+ """
+ )
+ ).fetchone()
+ return estimate_transaction[0]
+
+
+def table_to_dataclass(row_instance, cls):
+ """
+ Converts row of table to a dataclass instance, handling nested structures.
+
+ Args:
+ row_instance (HemeraModel): The input data structure.
+ cls: The dataclass type to convert to.
+
+ Returns:
+ An instance of the dataclass which is corresponding to table in the definition.
+ """
+
+ dict_instance = {}
+ if hasattr(row_instance, "__table__"):
+ for column in row_instance.__table__.columns:
+ if column.name == "meta_data":
+ meta_data_json = getattr(row_instance, column.name)
+ if meta_data_json:
+ for key in meta_data_json:
+ dict_instance[key] = meta_data_json[key]
+ else:
+ value = getattr(row_instance, column.name)
+ dict_instance[column.name] = convert_value(value)
+ else:
+ for column, value in row_instance._asdict().items():
+ dict_instance[column] = convert_value(value)
+
+ domain = dict_to_dataclass(dict_instance, cls)
+ if cls is Transaction:
+ domain.fill_with_receipt(Receipt.from_pg(dict_instance))
+
+ return domain
+
+
+def convert_value(value):
+ if isinstance(value, datetime):
+ return int(round(value.replace(tzinfo=timezone.utc).timestamp()))
+ elif isinstance(value, Decimal):
+ return float(value)
+ elif isinstance(value, bytes):
+ return bytes_to_hex_str(value)
+ elif isinstance(value, memoryview):
+ return bytes_to_hex_str(bytes(value))
+ elif isinstance(value, list):
+ return [convert_value(v) for v in value]
+ elif isinstance(value, dict):
+ return {k: convert_value(v) for k, v in value.items()}
+ else:
+ return value
+
+
+def dataclass_builder(datas: list, domain: Type[Domain]):
+ def build_block():
+ blocks = [table_to_dataclass(data, Block) for data in datas]
+ transactions = build_transaction()
+ blocks_mapping = {block.hash: block for block in blocks}
+
+ for block in blocks:
+ block.transactions = []
+
+ for transaction in transactions:
+ blocks_mapping[transaction.block_hash].transactions.append(transaction)
+
+ return blocks
+
+ def build_transaction():
+ transactions = [table_to_dataclass(data, Transaction) for data in datas]
+ logs = build_log()
+ transaction_mapping = {transaction.hash: transaction for transaction in transactions}
+
+ for log in logs:
+ transaction_mapping[log.transaction_hash].receipt.logs.append(log)
+
+ return transactions
+
+ def build_log():
+ logs = [table_to_dataclass(data, Log) for data in datas]
+
+ return logs
+
+ special_build = {
+ Block: build_block,
+ Transaction: build_transaction,
+ Log: build_log,
+ }
+
+ if domain in special_build:
+ domains = special_build[domain]()
+ else:
+ domains = [table_to_dataclass(data, domain) for data in datas]
+
+ return domains
+
+
+def require_data_as_domain(
+ service: PostgreSQLService,
+ table: HemeraModel,
+ domain: Type[Domain],
+ columns: List[str] = "*",
+) -> List[Domain]:
+ """Read entire data from table and assemeble as a list of domain objects.
+
+ This utility function fetches specified columns from a database table and converts
+ each row into a domain object.
+
+ Args:
+ service: PostgreSQL service instance for database connection
+ table: SQLAlchemy model class representing the database table
+ columns: List of column names to retrieve from the table
+ domain: Domain class to instantiate with the retrieved data
+
+ Returns:
+ List of domain objects populated with the database data
+
+ Note:
+ - Automatically handles session management
+ - Converts SQL results to domain objects using dict_to_dataclass
+ - Closes database session even if an error occurs
+
+ Example:
+ >>> blocks = require_data_as_domain(
+ ... service=pg_service,
+ ... table=Blocks,
+ ... domain=Block
+ ... )
+ """
+
+ session = service.get_service_session()
+
+ entities = build_entities(table, columns)
+
+ try:
+ datas = session.query(table).with_entities(*entities).all()
+ finally:
+ session.close()
+
+ domains = dataclass_builder(datas, domain)
+ return domains
+
+
+def build_domains_by_sql(service: PostgreSQLService, domain: Type[Domain], sql: str) -> List[Domain]:
+ """Read data by given sql and assemeble as a list of domain objects.
+
+ This utility function executes a raw SQL query and assemeble each result row
+ into a domain object.
+
+ Args:
+ service: PostgreSQL service instance for database connection
+ domain: Domain class to instantiate with the query results
+ sql: Raw SQL query string to execute
+
+ Returns:
+ List of domain objects populated with the query results
+
+ Note:
+ - Ensure SQL query returns columns that match domain class fields
+
+ Example:
+ >>> txs = build_domains_by_sql(
+ ... service=pg_service,
+ ... domain=Transaction,
+ ... sql="SELECT hash, from_address, to_address FROM transactions WHERE block_number > 1000 limit 100"
+ ... )
+ """
+ session = service.get_service_session()
+
+ try:
+ datas = session.execute(text(sql))
+ finally:
+ session.close()
+
+ domains = dataclass_builder(datas, domain)
+ return domains
diff --git a/common/utils/exception_control.py b/hemera/common/utils/exception_control.py
similarity index 85%
rename from common/utils/exception_control.py
rename to hemera/common/utils/exception_control.py
index 2287173d8..77108fc43 100644
--- a/common/utils/exception_control.py
+++ b/hemera/common/utils/exception_control.py
@@ -1,4 +1,6 @@
import logging
+import sys
+import traceback
from werkzeug.exceptions import HTTPException
@@ -95,9 +97,15 @@ def decode_response_error(error):
return None
if "out of gas" in message:
return None
-
+ if "Invalid request" in message:
+ return None
if "InvalidJump" in message:
return None
+ if "revert" in message:
+ return None
+
+ if "EVM" in message:
+ return None
if (
message == "execution reverted"
@@ -118,3 +126,15 @@ def decode_response_error(error):
raise RetriableError(message)
else:
return None
+
+
+def get_exception_details(e: Exception) -> dict:
+ exc_type, exc_value, exc_traceback = sys.exc_info()
+
+ return {
+ "type": exc_type.__name__ if exc_type else None,
+ "module": exc_type.__module__ if exc_type else None,
+ "message": str(exc_value) if exc_value else str(e),
+ "traceback": traceback.format_exc(),
+ "line_number": exc_traceback.tb_lineno if exc_traceback else None,
+ }
diff --git a/common/utils/file_utils.py b/hemera/common/utils/file_utils.py
similarity index 93%
rename from common/utils/file_utils.py
rename to hemera/common/utils/file_utils.py
index d68f66654..7890204fc 100644
--- a/common/utils/file_utils.py
+++ b/hemera/common/utils/file_utils.py
@@ -94,6 +94,12 @@ def delete_file(file):
raise OSError
+def get_project_root():
+ current_dir = os.path.dirname(os.path.abspath(__file__))
+ project_root = os.path.abspath(os.path.join(current_dir, "..", "..", ".."))
+ return project_root
+
+
class NoopFile:
def __enter__(self):
pass
diff --git a/common/utils/format_utils.py b/hemera/common/utils/format_utils.py
similarity index 92%
rename from common/utils/format_utils.py
rename to hemera/common/utils/format_utils.py
index f95c66c67..f474e3455 100644
--- a/common/utils/format_utils.py
+++ b/hemera/common/utils/format_utils.py
@@ -92,6 +92,34 @@ def to_snake_case(name: str) -> str:
return re.sub("([a-z0-9])([A-Z])", r"\1_\2", s1).lower()
+def to_camel_case(name: str) -> str:
+ """
+ Converts a snake_case string to CamelCase.
+
+ :param name: The snake_case string to convert.
+ :type name: str
+
+ :return: The converted CamelCase string.
+ :rtype: str
+ """
+ components = name.split("_")
+ return "".join(x.title() for x in components)
+
+
+def to_space_camel_case(name: str) -> str:
+ """
+ Converts a snake_case string to CamelCase.
+
+ :param name: The snake_case string to convert.
+ :type name: str
+
+ :return: The converted CamelCase string.
+ :rtype: str
+ """
+ components = name.split("_")
+ return " ".join(x.title() for x in components)
+
+
def as_dict(self):
"""
Converts an SQLAlchemy model instance to a dictionary.
diff --git a/common/utils/module_loading.py b/hemera/common/utils/module_loading.py
similarity index 92%
rename from common/utils/module_loading.py
rename to hemera/common/utils/module_loading.py
index e86002cc8..0c6e52af2 100644
--- a/common/utils/module_loading.py
+++ b/hemera/common/utils/module_loading.py
@@ -1,10 +1,13 @@
import ast
import glob
+import logging
import os
import pkgutil
from importlib import import_module
from typing import Dict, List, Type
+from hemera.common.utils.file_utils import get_project_root
+
def import_string(dotted_path: str):
"""
@@ -28,8 +31,7 @@ def import_string(dotted_path: str):
def scan_subclass_by_path_patterns(
path_patterns: List[str], base_class: Type[object], exclude_path=[]
) -> Dict[str, dict]:
- current_dir = os.path.dirname(os.path.abspath(__file__))
- project_root = os.path.abspath(os.path.join(current_dir, "..", ".."))
+ project_root = get_project_root()
exclude_path = [os.path.join(project_root, path) for path in exclude_path]
mapping = {}
@@ -78,7 +80,11 @@ def recurse(cls):
def import_submodules(package_name):
- package = import_module(package_name)
+ try:
+ package = import_module(package_name)
+ except ImportError:
+ logging.warn(f"Failed to import {package_name}")
+ return
for _, name, is_pkg in pkgutil.walk_packages(package.__path__):
full_name = package.__name__ + "." + name
import_module(full_name)
diff --git a/common/utils/web3_utils.py b/hemera/common/utils/web3_utils.py
similarity index 100%
rename from common/utils/web3_utils.py
rename to hemera/common/utils/web3_utils.py
diff --git a/indexer/controller/__init__.py b/hemera/indexer/__init__.py
similarity index 100%
rename from indexer/controller/__init__.py
rename to hemera/indexer/__init__.py
diff --git a/indexer/controller/dispatcher/__init__.py b/hemera/indexer/cache/__init__.py
similarity index 100%
rename from indexer/controller/dispatcher/__init__.py
rename to hemera/indexer/cache/__init__.py
diff --git a/indexer/cache/cache_dict.py b/hemera/indexer/cache/cache_dict.py
similarity index 100%
rename from indexer/cache/cache_dict.py
rename to hemera/indexer/cache/cache_dict.py
diff --git a/indexer/controller/scheduler/__init__.py b/hemera/indexer/controller/__init__.py
similarity index 100%
rename from indexer/controller/scheduler/__init__.py
rename to hemera/indexer/controller/__init__.py
diff --git a/indexer/controller/base_controller.py b/hemera/indexer/controller/base_controller.py
similarity index 100%
rename from indexer/controller/base_controller.py
rename to hemera/indexer/controller/base_controller.py
diff --git a/indexer/executors/__init__.py b/hemera/indexer/controller/dispatcher/__init__.py
similarity index 100%
rename from indexer/executors/__init__.py
rename to hemera/indexer/controller/dispatcher/__init__.py
diff --git a/indexer/controller/dispatcher/base_dispatcher.py b/hemera/indexer/controller/dispatcher/base_dispatcher.py
similarity index 100%
rename from indexer/controller/dispatcher/base_dispatcher.py
rename to hemera/indexer/controller/dispatcher/base_dispatcher.py
diff --git a/indexer/controller/reorg_controller.py b/hemera/indexer/controller/reorg_controller.py
similarity index 92%
rename from indexer/controller/reorg_controller.py
rename to hemera/indexer/controller/reorg_controller.py
index 83e1291f3..9fbdf65cc 100644
--- a/indexer/controller/reorg_controller.py
+++ b/hemera/indexer/controller/reorg_controller.py
@@ -5,26 +5,29 @@
from sqlalchemy import and_, update
from sqlalchemy.dialects.postgresql import insert
-from common.models.blocks import Blocks
-from common.models.fix_record import FixRecord
-from common.utils.exception_control import HemeraBaseException
-from common.utils.format_utils import hex_str_to_bytes
-from common.utils.web3_utils import build_web3
-from indexer.controller.base_controller import BaseController
-from indexer.utils.exception_recorder import ExceptionRecorder
+from hemera.common.models.blocks import Blocks
+from hemera.common.models.fix_record import FixRecord
+from hemera.common.utils.exception_control import HemeraBaseException
+from hemera.common.utils.format_utils import hex_str_to_bytes
+from hemera.common.utils.web3_utils import build_web3
+from hemera.indexer.controller.base_controller import BaseController
+from hemera.indexer.utils.exception_recorder import ExceptionRecorder
exception_recorder = ExceptionRecorder()
class ReorgController(BaseController):
- def __init__(self, batch_web3_provider, job_scheduler, ranges, config, max_retries=5):
+ def __init__(self, batch_web3_provider, job_scheduler, ranges, service, max_retries=5):
self.ranges = ranges
self.web3 = build_web3(batch_web3_provider)
- self.db_service = config.get("db_service")
+ self.db_service = service
self.job_scheduler = job_scheduler
self.max_retries = max_retries
+ def get_current_block_number(self):
+ return int(self.web3.eth.block_number)
+
def action(self, job_id=None, block_number=None, remains=None, retry_errors=True):
if block_number is None:
raise ValueError("Reorging mission must provide a block_number.")
@@ -104,7 +107,7 @@ def _do_fixing(self, fix_block, retry_errors=True):
break
except HemeraBaseException as e:
- logging.exception(f"An rpc response exception occurred while syncing block data. error: {e}")
+ logging.exception(f"An expected exception occurred while syncing block data. error: {e}")
if e.crashable:
logging.exception("Mission will crash immediately.")
raise e
diff --git a/indexer/exporters/__init__.py b/hemera/indexer/controller/scheduler/__init__.py
similarity index 100%
rename from indexer/exporters/__init__.py
rename to hemera/indexer/controller/scheduler/__init__.py
diff --git a/indexer/controller/scheduler/job_scheduler.py b/hemera/indexer/controller/scheduler/job_scheduler.py
similarity index 59%
rename from indexer/controller/scheduler/job_scheduler.py
rename to hemera/indexer/controller/scheduler/job_scheduler.py
index a2fb807e0..27dbd12fe 100644
--- a/indexer/controller/scheduler/job_scheduler.py
+++ b/hemera/indexer/controller/scheduler/job_scheduler.py
@@ -1,45 +1,80 @@
+import io
import logging
+import os
+import time
from collections import defaultdict, deque
-from typing import List, Set, Type
+from distutils.util import strtobool
+from typing import List, Set, Type, Union
+import pandas as pd
from pottery import RedisDict
from redis.client import Redis
+from tqdm import tqdm
-from common.models.tokens import Tokens
-from common.utils.format_utils import bytes_to_hex_str
-from common.utils.module_loading import import_submodules
-from indexer.exporters.console_item_exporter import ConsoleItemExporter
-from indexer.jobs import CSVSourceJob
-from indexer.jobs.base_job import (
+from hemera.common.utils.exception_control import HemeraBaseException
+from hemera.common.utils.module_loading import import_submodules
+from hemera.indexer.jobs import CSVSourceJob
+from hemera.indexer.jobs.base_job import (
BaseExportJob,
BaseJob,
ExtensionJob,
FilterTransactionDataJob,
generate_dependency_types,
)
-from indexer.jobs.check_block_consensus_job import CheckBlockConsensusJob
-from indexer.jobs.export_blocks_job import ExportBlocksJob
-from indexer.jobs.source_job.pg_source_job import PGSourceJob
+from hemera.indexer.jobs.export_blocks_job import ExportBlocksJob
+from hemera.indexer.jobs.source_job.pg_source_job import PGSourceJob
+from hemera.indexer.utils.buffer_service import BufferService
-import_submodules("indexer.modules")
+JOB_RETRIES = int(os.environ.get("JOB_RETRIES", "5"))
+PGSOURCE_ACCURACY = bool(strtobool(os.environ.get("PGSOURCE_ACCURACY", "false")))
def get_tokens_from_db(service):
- with service.session_scope() as s:
- dict = {}
- result = s.query(Tokens).all()
- if result is not None:
- for token in result:
- dict[bytes_to_hex_str(token.address)] = {
- "address": bytes_to_hex_str(token.address),
- "token_type": token.token_type,
- "name": token.name,
- "symbol": token.symbol,
- "decimals": int(token.decimals) if token.decimals is not None else None,
- "block_number": token.block_number,
- "total_supply": int(token.total_supply) if token.total_supply is not None else None,
- }
- return dict
+ with service.cursor_scope() as cur:
+ csv_data = io.StringIO()
+ copy_query = "COPY tokens TO STDOUT WITH CSV HEADER"
+ cur.copy_expert(copy_query, csv_data)
+ csv_data.seek(0)
+
+ dtype = {
+ "address": str,
+ "token_type": str,
+ "name": str,
+ "symbol": str,
+ "decimals": str,
+ "total_supply": str,
+ }
+
+ df = pd.read_csv(csv_data, dtype=dtype)
+
+ for col in ["no_balance_of", "no_total_supply"]:
+ if col in df.columns:
+ df[col] = df[col].astype(str).str.lower().isin(["t", "true", "1"])
+
+ int_columns = ["fail_balance_of_count", "succeed_balance_of_count", "fail_total_supply_count", "block_number"]
+ for col in int_columns:
+ if col in df.columns:
+ df[col] = df[col].fillna(0).astype(float).astype(int)
+ df["address"] = df["address"].str.replace(r"\\x", "0x", regex=True)
+
+ token_dict = {}
+ for row in tqdm(df.itertuples(), total=len(df), desc="Loading tokens"):
+ address = row.address
+ token_dict[address] = {
+ "address": address,
+ "token_type": row.token_type,
+ "name": row.name,
+ "symbol": row.symbol,
+ "decimals": int(row.decimals) if pd.notna(row.decimals) else None,
+ "total_supply": int(row.total_supply) if pd.notna(row.total_supply) else None,
+ "no_total_supply": row.no_total_supply,
+ "fail_total_supply_count": row.fail_total_supply_count,
+ "no_balance_of": row.no_balance_of,
+ "fail_balance_of_count": row.fail_balance_of_count,
+ "succeed_balance_of_count": row.succeed_balance_of_count,
+ "block_number": row.block_number,
+ }
+ return token_dict
def get_source_job_type(source_path: str):
@@ -60,24 +95,27 @@ def __init__(
debug_batch_size=1,
max_workers=5,
config={},
- item_exporters=[ConsoleItemExporter()],
+ buffer_service: Union[dict, BufferService] = defaultdict(list),
required_output_types=[],
required_source_types=[],
cache="memory",
multicall=None,
auto_reorg=True,
force_filter_mode=False,
+ metrics=None,
):
+ import_submodules("hemera_udf")
self.logger = logging.getLogger(__name__)
self.auto_reorg = auto_reorg
self.batch_web3_provider = batch_web3_provider
self.batch_web3_debug_provider = batch_web3_debug_provider
- self.item_exporters = item_exporters
+ self.buffer_service = buffer_service
self.batch_size = batch_size
self._is_multicall = multicall
self.debug_batch_size = debug_batch_size
self.max_workers = max_workers
self.config = config
+ self.metrics = metrics
required_output_types.sort(key=lambda x: x.type())
self.required_output_types = required_output_types
self.required_source_types = required_source_types
@@ -114,6 +152,51 @@ def __init__(
for output_type in self.required_output_types:
self.logger.info(f"[*] {output_type.type()}")
+ def clear_data_buff(self):
+ BaseJob._data_buff.clear()
+
+ def get_data_buff(self):
+ return BaseJob._data_buff
+
+ def discover_and_register_job_classes(self):
+ discovered_job_classes = BaseExportJob.discover_jobs()
+ discovered_job_classes.extend(ExtensionJob.discover_jobs())
+
+ for job in discovered_job_classes:
+ generate_dependency_types(job)
+
+ if self.load_from_source:
+ source_job = get_source_job_type(source_path=self.load_from_source)
+ if source_job is PGSourceJob:
+ source_job.output_types = self.required_source_types
+ all_subclasses = [source_job]
+
+ source_output_types = set(source_job.output_types)
+ for job in discovered_job_classes:
+ skip = False
+ for output_type in job.output_types:
+ if output_type in source_output_types:
+ if not PGSOURCE_ACCURACY:
+ source_job.output_types = list(set(job.output_types + list(source_output_types)))
+ skip = True
+ break
+ if not skip:
+ all_subclasses.append(job)
+
+ else:
+ all_subclasses = discovered_job_classes
+
+ for cls in all_subclasses:
+ self.job_classes.append(cls)
+ for output in cls.output_types:
+ if output.type() in self.job_map:
+ raise Exception(
+ f"Duplicated output type: {output.type()}, job: {cls.__name__}, existing: {self.job_map[output.type()]}, plz check your job definition"
+ )
+ self.job_map[output.type()].append(cls)
+ for dependency in cls.dependency_types:
+ self.dependency_map[dependency.type()].append(cls)
+
def get_required_job_classes(self, output_types) -> (List[Type[BaseJob]], bool):
required_job_classes = set()
output_type_queue = deque(output_types)
@@ -147,50 +230,47 @@ def get_required_job_classes(self, output_types) -> (List[Type[BaseJob]], bool):
required_job_classes.add(job_class)
for dependency in job_class.dependency_types:
output_type_queue.append(dependency)
+
+ if len(required_job_classes) == 0:
+ raise Exception(
+ "No job classes were required. The following are possible reasons: "
+ "1. The udf job is not recognized by indexer. "
+ "2. The input dependency and output dataclass are not correctly bound to the udf job. "
+ "3. DynamicEntityTypeRegistry failed to register correctly."
+ )
+
return required_job_classes, is_filter
- def clear_data_buff(self):
- BaseJob._data_buff.clear()
+ def resolve_dependencies(self, required_jobs: Set[Type[BaseJob]]) -> List[Type[BaseJob]]:
+ sorted_order = []
+ job_graph = defaultdict(list)
+ in_degree = defaultdict(int)
- def get_data_buff(self):
- return BaseJob._data_buff
+ for job_class in required_jobs:
+ for dependency in job_class.dependency_types:
+ for parent_class in self.job_map[dependency.type()]:
+ if parent_class in required_jobs:
+ job_graph[parent_class].append(job_class)
+ in_degree[job_class] += 1
- def discover_and_register_job_classes(self):
- if self.load_from_source:
- source_job = get_source_job_type(source_path=self.load_from_source)
- if source_job is PGSourceJob:
- source_job.output_types = self.required_source_types
- all_subclasses = [source_job]
+ sources = deque([job_class for job_class in required_jobs if in_degree[job_class] == 0])
- source_output_types = set(source_job.output_types)
- for export_job in BaseExportJob.discover_jobs():
- generate_dependency_types(export_job)
- skip = False
- for output_type in export_job.output_types:
- if output_type in source_output_types:
- source_job.output_types = list(set(export_job.output_types + list(source_output_types)))
- skip = True
- break
- if not skip:
- all_subclasses.append(export_job)
+ while sources:
+ job_class = sources.popleft()
+ sorted_order.append(job_class)
+ for child_class in job_graph[job_class]:
+ in_degree[child_class] -= 1
+ if in_degree[child_class] == 0:
+ sources.append(child_class)
- else:
- all_subclasses = BaseExportJob.discover_jobs()
+ if len(sorted_order) != len(required_jobs):
+ raise Exception("Dependency cycle detected")
- all_subclasses.extend(ExtensionJob.discover_jobs())
- for cls in all_subclasses:
- generate_dependency_types(cls)
- self.job_classes.append(cls)
- for output in cls.output_types:
- if output.type() in self.job_map:
- raise Exception(
- f"Duplicated output type: {output.type()}, job: {cls.__name__}, existing: {self.job_map[output.type()]}, plz check your job definition"
- )
- self.job_map[output.type()].append(cls)
- for dependency in cls.dependency_types:
- self.dependency_map[dependency.type()].append(cls)
+ return sorted_order
def instantiate_jobs(self):
+ BaseJob._data_buff = self.buffer_service
+
filters = []
for job_class in self.resolved_job_classes:
if job_class is ExportBlocksJob or job_class is PGSourceJob:
@@ -199,7 +279,6 @@ def instantiate_jobs(self):
required_output_types=self.required_output_types,
batch_web3_provider=self.batch_web3_provider,
batch_web3_debug_provider=self.batch_web3_debug_provider,
- item_exporters=self.item_exporters,
batch_size=self.batch_size,
multicall=self._is_multicall,
debug_batch_size=self.debug_batch_size,
@@ -216,7 +295,6 @@ def instantiate_jobs(self):
required_output_types=self.required_output_types,
batch_web3_provider=self.batch_web3_provider,
batch_web3_debug_provider=self.batch_web3_debug_provider,
- item_exporters=self.item_exporters,
batch_size=self.batch_size,
multicall=self._is_multicall,
debug_batch_size=self.debug_batch_size,
@@ -226,12 +304,12 @@ def instantiate_jobs(self):
filters=filters,
)
self.jobs.insert(0, export_blocks_job)
- else:
+
+ if PGSourceJob in self.resolved_job_classes:
pg_source_job = PGSourceJob(
required_output_types=self.required_output_types,
batch_web3_provider=self.batch_web3_provider,
batch_web3_debug_provider=self.batch_web3_debug_provider,
- item_exporters=self.item_exporters,
batch_size=self.batch_size,
multicall=self._is_multicall,
debug_batch_size=self.debug_batch_size,
@@ -242,59 +320,65 @@ def instantiate_jobs(self):
)
self.jobs.insert(0, pg_source_job)
- if self.auto_reorg:
- check_job = CheckBlockConsensusJob(
- required_output_types=self.required_output_types,
- batch_web3_provider=self.batch_web3_provider,
- batch_web3_debug_provider=self.batch_web3_debug_provider,
- item_exporters=self.item_exporters,
- batch_size=self.batch_size,
- multicall=self._is_multicall,
- debug_batch_size=self.debug_batch_size,
- max_workers=self.max_workers,
- config=self.config,
- filters=filters,
- )
- self.jobs.append(check_job)
+ def get_scheduled_jobs(self):
+ return self.jobs
def run_jobs(self, start_block, end_block):
self.clear_data_buff()
- try:
- for job in self.jobs:
- job.run(start_block=start_block, end_block=end_block)
-
- for output_type in self.required_output_types:
- message = f"{output_type.type()} : {len(self.get_data_buff().get(output_type.type())) if self.get_data_buff().get(output_type.type()) else 0}"
- self.logger.info(f"{message}")
- except Exception as e:
- raise e
- finally:
- pass
+ total_start = time.time()
+ for job in self.jobs:
+ job_start = time.time()
+ self.job_with_retires(job, start_block=start_block, end_block=end_block)
- def resolve_dependencies(self, required_jobs: Set[Type[BaseJob]]) -> List[Type[BaseJob]]:
- sorted_order = []
- job_graph = defaultdict(list)
- in_degree = defaultdict(int)
+ if self.metrics:
+ self.metrics.update_job_processing_duration(
+ job_name=job.__class__.__name__,
+ duration=int((time.time() - job_start) * 1000),
+ )
- for job_class in required_jobs:
- for dependency in job_class.dependency_types:
- for parent_class in self.job_map[dependency.type()]:
- if parent_class in required_jobs:
- job_graph[parent_class].append(job_class)
- in_degree[job_class] += 1
-
- sources = deque([job_class for job_class in required_jobs if in_degree[job_class] == 0])
+ if self.metrics:
+ self.metrics.update_total_processing_duration(
+ duration=int((time.time() - total_start) * 1000),
+ )
- while sources:
- job_class = sources.popleft()
- sorted_order.append(job_class)
- for child_class in job_graph[job_class]:
- in_degree[child_class] -= 1
- if in_degree[child_class] == 0:
- sources.append(child_class)
+ for output_type in self.required_output_types:
+ message = f"{output_type.type()} : {len(self.get_data_buff().get(output_type.type())) if self.get_data_buff().get(output_type.type()) else 0}"
+ self.logger.info(f"{message}")
- if len(sorted_order) != len(required_jobs):
- raise Exception("Dependency cycle detected")
+ def job_with_retires(self, job, start_block, end_block):
+ for retry in range(JOB_RETRIES + 1):
+ try:
+ self.logger.info(f"Task run {job.__class__.__name__}")
+ job.run(start_block=start_block, end_block=end_block)
- return sorted_order
+ if self.metrics and retry > 0:
+ self.metrics.update_job_processing_retry(job_name=job.__class__.__name__, retry=retry)
+
+ return
+
+ except HemeraBaseException as e:
+ self.logger.error(f"An expected exception occurred while running {job.__class__.__name__}. error: {e}")
+ if e.crashable:
+ self.logger.error("Mission will crash immediately.")
+ raise e
+
+ if e.retriable:
+ if retry == JOB_RETRIES:
+ self.logger.info(f"The number of retry is reached limit {JOB_RETRIES}.")
+ else:
+ self.logger.info(f"No: {retry + 1} retry is about to start.")
+ else:
+ self.logger.error("Mission will not retry, and exit immediately.")
+ raise e
+
+ except Exception as e:
+ self.logger.error(f"An unknown exception occurred while running {job.__class__.__name__}. error: {e}")
+ if self.metrics:
+ self.metrics.update_instance_shutdown()
+ raise e
+
+ self.logger.error(
+ f"The job with parameters start_block:{start_block}, end_block:{end_block} "
+ f"can't be automatically resumed after reached out limit of retries. Program will exit."
+ )
diff --git a/indexer/controller/scheduler/reorg_scheduler.py b/hemera/indexer/controller/scheduler/reorg_scheduler.py
similarity index 94%
rename from indexer/controller/scheduler/reorg_scheduler.py
rename to hemera/indexer/controller/scheduler/reorg_scheduler.py
index f1c88c651..ebe32704f 100644
--- a/indexer/controller/scheduler/reorg_scheduler.py
+++ b/hemera/indexer/controller/scheduler/reorg_scheduler.py
@@ -5,15 +5,13 @@
from pottery import RedisDict
from redis.client import Redis
-from common.models.tokens import Tokens
-from common.utils.format_utils import bytes_to_hex_str
-from common.utils.module_loading import import_submodules
-from indexer.jobs import FilterTransactionDataJob
-from indexer.jobs.base_job import BaseExportJob, BaseJob, ExtensionJob
-from indexer.jobs.export_blocks_job import ExportBlocksJob
-from indexer.jobs.export_reorg_job import ExportReorgJob
-
-import_submodules("indexer.modules")
+from hemera.common.models.tokens import Tokens
+from hemera.common.utils.format_utils import bytes_to_hex_str
+from hemera.common.utils.module_loading import import_submodules
+from hemera.indexer.jobs import FilterTransactionDataJob
+from hemera.indexer.jobs.base_job import BaseExportJob, BaseJob, ExtensionJob
+from hemera.indexer.jobs.export_blocks_job import ExportBlocksJob
+from hemera.indexer.jobs.export_reorg_job import ExportReorgJob
def get_tokens_from_db(service):
diff --git a/indexer/controller/stream_controller.py b/hemera/indexer/controller/stream_controller.py
similarity index 70%
rename from indexer/controller/stream_controller.py
rename to hemera/indexer/controller/stream_controller.py
index cc6f27f6d..32de09593 100644
--- a/indexer/controller/stream_controller.py
+++ b/hemera/indexer/controller/stream_controller.py
@@ -4,16 +4,18 @@
import mpire
-from common.utils.exception_control import FastShutdownError, HemeraBaseException
-from common.utils.file_utils import delete_file, write_to_file
-from common.utils.web3_utils import build_web3
-from indexer.controller.base_controller import BaseController
-from indexer.controller.scheduler.job_scheduler import JobScheduler
-from indexer.utils.limit_reader import LimitReader
-from indexer.utils.sync_recorder import BaseRecorder
+from hemera.common.utils.exception_control import FastShutdownError
+from hemera.common.utils.file_utils import delete_file, write_to_file
+from hemera.common.utils.web3_utils import build_web3
+from hemera.indexer.controller.base_controller import BaseController
+from hemera.indexer.controller.scheduler.job_scheduler import JobScheduler
+from hemera.indexer.utils.limit_reader import LimitReader
+from hemera.indexer.utils.sync_recorder import BaseRecorder
logger = logging.getLogger(__name__)
+JOB_RETRIES = int(os.environ.get("JOB_RETRIES", "5"))
+
class StreamController(BaseController):
@@ -23,7 +25,7 @@ def __init__(
sync_recorder: BaseRecorder,
job_scheduler: JobScheduler,
limit_reader: LimitReader,
- max_retries=5,
+ metrics,
retry_from_record=False,
delay=0,
process_numbers=1,
@@ -35,10 +37,12 @@ def __init__(
self.sync_recorder = sync_recorder
self.job_scheduler = job_scheduler
self.limit_reader = limit_reader
- self.max_retries = max_retries
+ self.max_retries = JOB_RETRIES
self.retry_from_record = retry_from_record
self.delay = delay
+ self.buffer_service = job_scheduler.buffer_service
+
self.process_numbers = process_numbers
self.process_size = process_size
self.process_time_out = process_time_out
@@ -47,6 +51,8 @@ def __init__(
else:
self.pool = mpire.WorkerPool(n_jobs=self.process_numbers, use_dill=True, keep_alive=True)
+ self.metrics = metrics
+
def action(
self,
start_block=None,
@@ -62,6 +68,7 @@ def action(
write_to_file(pid_file, str(os.getpid()))
last_synced_block = self.sync_recorder.get_last_synced_block()
+ self.metrics.update_last_sync_record(last_synced_block)
if start_block is not None:
if (
@@ -98,9 +105,15 @@ def action(
else:
splits = self.split_blocks(last_synced_block + 1, target_block, self.process_size)
self.pool.map(func=self._do_stream, iterable_of_args=splits, task_timeout=self.process_time_out)
- logger.info("Writing last synced block {}".format(target_block))
- self.sync_recorder.set_last_synced_block(target_block)
+ # when in muliprocess env, make sure last_synced_block is right
+ self.metrics.update_last_sync_record(target_block)
+ self.buffer_service.success_callback(target_block)
+
last_synced_block = target_block
+ if self.buffer_service.is_shutdown():
+ raise FastShutdownError(
+ "By some reason, BufferService was shutdown, Indexer will exit immediately."
+ )
if synced_blocks <= 0:
logger.info("Nothing to sync. Sleeping for {} seconds...".format(period_seconds))
@@ -121,36 +134,7 @@ def split_blocks(self, start_block, end_block, step):
return blocks
def _do_stream(self, start_block, end_block):
-
- for retry in range(self.max_retries + 1):
- try:
- # ETL program's main logic
- self.job_scheduler.run_jobs(start_block, end_block)
- return
-
- except HemeraBaseException as e:
- logger.error(f"An expected exception occurred while syncing block data. error: {e}")
- if e.crashable:
- logger.error("Mission will crash immediately.")
- raise e
-
- if e.retriable:
- if retry == self.max_retries:
- logger.info(f"The number of retry is reached limit {self.max_retries}.")
- else:
- logger.info(f"No: {retry} retry is about to start.")
- else:
- logger.error("Mission will not retry, and exit immediately.")
- raise e
-
- except Exception as e:
- logger.error(f"An unknown exception occurred while syncing block data. error: {e}")
- raise e
-
- logger.error(
- f"The job with parameters start_block:{start_block}, end_block:{end_block} "
- f"can't be automatically resumed after reached out limit of retries. Program will exit."
- )
+ self.job_scheduler.run_jobs(start_block, end_block)
def _get_current_block_number(self):
return int(self.web3.eth.block_number)
diff --git a/indexer/domain/__init__.py b/hemera/indexer/domains/__init__.py
similarity index 84%
rename from indexer/domain/__init__.py
rename to hemera/indexer/domains/__init__.py
index 6825db593..fc775361a 100644
--- a/indexer/domain/__init__.py
+++ b/hemera/indexer/domains/__init__.py
@@ -1,14 +1,8 @@
from dataclasses import asdict, dataclass, fields, is_dataclass
from typing import Any, Dict, Union, get_args, get_origin
-from common.utils.format_utils import to_snake_case
-from common.utils.module_loading import import_string, scan_subclass_by_path_patterns
-
-model_path_patterns = [
- "indexer/domain",
- "indexer/modules/*/domain",
- "indexer/modules/custom/*/domain",
-]
+from hemera.common.utils.format_utils import to_snake_case
+from hemera.common.utils.module_loading import import_string, scan_subclass_by_path_patterns
class DomainMeta(type):
@@ -66,18 +60,6 @@ def dict_to_entity(self, data_dict: Dict[str, Any]):
for key, value in filtered_data.items():
setattr(self, key, value)
- @classmethod
- def is_filter_data(cls):
- return False
-
-
-@dataclass
-class FilterData(Domain):
-
- @classmethod
- def is_filter_data(cls):
- return True
-
from typing import Dict
@@ -138,13 +120,10 @@ def dataclass_to_dict(instance: Domain) -> Dict[str, Any]:
def generate_domains_mapping():
mapping = {}
- for domain in __domain_imports.keys():
- mapping[to_snake_case(domain)] = import_string(__domain_imports[domain])
+ for domain in DomainMeta.get_all_subclasses_with_type().keys():
+ mapping[to_snake_case(domain)] = import_string(DomainMeta.get_all_subclasses_with_type()[domain])
return mapping
-__domain_imports = {
- k: v["cls_import_path"] for k, v in scan_subclass_by_path_patterns(model_path_patterns, Domain).items()
-}
domains_mapping = generate_domains_mapping()
diff --git a/indexer/domain/block.py b/hemera/indexer/domains/block.py
similarity index 96%
rename from indexer/domain/block.py
rename to hemera/indexer/domains/block.py
index bbec0fdc7..61b123e02 100644
--- a/indexer/domain/block.py
+++ b/hemera/indexer/domains/block.py
@@ -3,8 +3,8 @@
from eth_utils import to_int, to_normalized_address
-from indexer.domain import Domain
-from indexer.domain.transaction import Transaction
+from hemera.indexer.domains import Domain
+from hemera.indexer.domains.transaction import Transaction
@dataclass
diff --git a/indexer/domain/block_ts_mapper.py b/hemera/indexer/domains/block_ts_mapper.py
similarity index 86%
rename from indexer/domain/block_ts_mapper.py
rename to hemera/indexer/domains/block_ts_mapper.py
index 98392533a..2a74318b9 100644
--- a/indexer/domain/block_ts_mapper.py
+++ b/hemera/indexer/domains/block_ts_mapper.py
@@ -1,7 +1,7 @@
from dataclasses import dataclass
from typing import Tuple
-from indexer.domain import Domain
+from hemera.indexer.domains import Domain
@dataclass
diff --git a/indexer/domain/coin_balance.py b/hemera/indexer/domains/coin_balance.py
similarity index 85%
rename from indexer/domain/coin_balance.py
rename to hemera/indexer/domains/coin_balance.py
index a5db10f7d..07e37f7c4 100644
--- a/indexer/domain/coin_balance.py
+++ b/hemera/indexer/domains/coin_balance.py
@@ -1,6 +1,6 @@
from dataclasses import dataclass
-from indexer.domain import Domain
+from hemera.indexer.domains import Domain
@dataclass
diff --git a/hemera/indexer/domains/contract.py b/hemera/indexer/domains/contract.py
new file mode 100644
index 000000000..8c53e54e4
--- /dev/null
+++ b/hemera/indexer/domains/contract.py
@@ -0,0 +1,78 @@
+from dataclasses import dataclass
+
+from hemera.indexer.domains import Domain
+from hemera.indexer.domains.transaction import Transaction
+
+
+@dataclass
+class Contract(Domain):
+ address: str
+ name: str
+ contract_creator: str
+ creation_code: str
+ deployed_code: str
+ block_number: int
+ block_hash: str
+ block_timestamp: int
+ transaction_index: int
+ transaction_hash: str
+ transaction_from_address: str
+
+ def __init__(self, contract: dict):
+ self.dict_to_entity(contract)
+
+ def fill_transaction_from_address(self, address: str):
+ self.transaction_from_address = address
+
+
+def extract_contract_from_trace(trace):
+ contract = {
+ "address": trace.to_address,
+ "contract_creator": trace.from_address,
+ "creation_code": trace.input,
+ "deployed_code": trace.output,
+ "block_number": trace.block_number,
+ "block_hash": trace.block_hash,
+ "block_timestamp": trace.block_timestamp,
+ "transaction_index": trace.transaction_index,
+ "transaction_hash": trace.transaction_hash,
+ }
+
+ return contract
+
+
+@dataclass
+class ContractFromTransaction(Domain):
+ address: str
+ name: str
+ contract_creator: str
+ creation_code: str
+ deployed_code: str
+ block_number: int
+ block_hash: str
+ block_timestamp: int
+ transaction_index: int
+ transaction_hash: str
+ transaction_from_address: str
+
+ def __init__(self, contract: dict):
+ self.dict_to_entity(contract)
+
+ def fill_transaction_from_address(self, address: str):
+ self.transaction_from_address = address
+
+
+def extract_contract_from_transaction(transaction: Transaction):
+ contract = {
+ "address": transaction.receipt.contract_address,
+ "contract_creator": transaction.from_address,
+ "creation_code": transaction.input,
+ "block_number": transaction.block_number,
+ "block_hash": transaction.block_hash,
+ "block_timestamp": transaction.block_timestamp,
+ "transaction_index": transaction.transaction_index,
+ "transaction_hash": transaction.hash,
+ "transaction_from_address": transaction.from_address,
+ }
+
+ return contract
diff --git a/indexer/domain/contract_internal_transaction.py b/hemera/indexer/domains/contract_internal_transaction.py
similarity index 97%
rename from indexer/domain/contract_internal_transaction.py
rename to hemera/indexer/domains/contract_internal_transaction.py
index 8a4a5da7a..068b7d065 100644
--- a/indexer/domain/contract_internal_transaction.py
+++ b/hemera/indexer/domains/contract_internal_transaction.py
@@ -2,7 +2,7 @@
from eth_utils import to_int
-from indexer.domain import Domain
+from hemera.indexer.domains import Domain
@dataclass
diff --git a/indexer/domain/current_token_balance.py b/hemera/indexer/domains/current_token_balance.py
similarity index 84%
rename from indexer/domain/current_token_balance.py
rename to hemera/indexer/domains/current_token_balance.py
index 498532064..4c8c0e4b7 100644
--- a/indexer/domain/current_token_balance.py
+++ b/hemera/indexer/domains/current_token_balance.py
@@ -1,6 +1,6 @@
from dataclasses import dataclass
-from indexer.domain import Domain
+from hemera.indexer.domains import Domain
@dataclass
diff --git a/indexer/domain/log.py b/hemera/indexer/domains/log.py
similarity index 97%
rename from indexer/domain/log.py
rename to hemera/indexer/domains/log.py
index c09e470d7..cddc25577 100644
--- a/indexer/domain/log.py
+++ b/hemera/indexer/domains/log.py
@@ -3,7 +3,7 @@
from eth_utils import to_int, to_normalized_address
-from indexer.domain import Domain
+from hemera.indexer.domains import Domain
@dataclass
diff --git a/indexer/domain/receipt.py b/hemera/indexer/domains/receipt.py
similarity index 97%
rename from indexer/domain/receipt.py
rename to hemera/indexer/domains/receipt.py
index 6178f66a3..0d88fdd0d 100644
--- a/indexer/domain/receipt.py
+++ b/hemera/indexer/domains/receipt.py
@@ -3,8 +3,8 @@
from eth_utils import to_int, to_normalized_address
-from indexer.domain import Domain
-from indexer.domain.log import Log
+from hemera.indexer.domains import Domain
+from hemera.indexer.domains.log import Log
@dataclass
diff --git a/indexer/domain/token.py b/hemera/indexer/domains/token.py
similarity index 52%
rename from indexer/domain/token.py
rename to hemera/indexer/domains/token.py
index bd3141107..77310f274 100644
--- a/indexer/domain/token.py
+++ b/hemera/indexer/domains/token.py
@@ -1,7 +1,7 @@
from dataclasses import dataclass
from typing import Optional
-from indexer.domain import Domain
+from hemera.indexer.domains import Domain
@dataclass
@@ -20,3 +20,18 @@ class UpdateToken(Domain):
address: str
block_number: int
total_supply: Optional[int] = None
+
+
+@dataclass
+class MarkBalanceToken(Domain):
+ address: str
+ fail_balance_of_count: int
+ succeed_balance_of_count: int
+ no_balance_of: Optional[bool] = True
+
+
+@dataclass
+class MarkTotalSupplyToken(Domain):
+ address: str
+ fail_total_supply_count: int
+ no_total_supply: Optional[bool] = True
diff --git a/indexer/domain/token_balance.py b/hemera/indexer/domains/token_balance.py
similarity index 83%
rename from indexer/domain/token_balance.py
rename to hemera/indexer/domains/token_balance.py
index c73cfa18d..b1dbd1444 100644
--- a/indexer/domain/token_balance.py
+++ b/hemera/indexer/domains/token_balance.py
@@ -1,6 +1,6 @@
from dataclasses import dataclass
-from indexer.domain import Domain
+from hemera.indexer.domains import Domain
@dataclass
diff --git a/indexer/domain/token_id_infos.py b/hemera/indexer/domains/token_id_infos.py
similarity index 96%
rename from indexer/domain/token_id_infos.py
rename to hemera/indexer/domains/token_id_infos.py
index f2ed4ddbe..33eb616ed 100644
--- a/indexer/domain/token_id_infos.py
+++ b/hemera/indexer/domains/token_id_infos.py
@@ -1,8 +1,8 @@
from dataclasses import dataclass
from typing import Optional
-from common.utils.web3_utils import ZERO_ADDRESS
-from indexer.domain import Domain
+from hemera.common.utils.web3_utils import ZERO_ADDRESS
+from hemera.indexer.domains import Domain
@dataclass
diff --git a/indexer/domain/token_transfer.py b/hemera/indexer/domains/token_transfer.py
similarity index 96%
rename from indexer/domain/token_transfer.py
rename to hemera/indexer/domains/token_transfer.py
index feac6035f..bb65068b9 100644
--- a/indexer/domain/token_transfer.py
+++ b/hemera/indexer/domains/token_transfer.py
@@ -3,11 +3,11 @@
from dataclasses import dataclass
from typing import List, Optional, Union
-from common.utils.web3_utils import ZERO_ADDRESS
-from enumeration.token_type import TokenType
-from indexer.domain import Domain
-from indexer.domain.log import Log
-from indexer.utils.abi_setting import (
+from hemera.common.enumeration.token_type import TokenType
+from hemera.common.utils.web3_utils import ZERO_ADDRESS
+from hemera.indexer.domains import Domain
+from hemera.indexer.domains.log import Log
+from hemera.indexer.utils.abi_setting import (
ERC20_TRANSFER_EVENT,
ERC1155_BATCH_TRANSFER_EVENT,
ERC1155_SINGLE_TRANSFER_EVENT,
diff --git a/indexer/domain/trace.py b/hemera/indexer/domains/trace.py
similarity index 98%
rename from indexer/domain/trace.py
rename to hemera/indexer/domains/trace.py
index daf921a45..38a5d6b62 100644
--- a/indexer/domain/trace.py
+++ b/hemera/indexer/domains/trace.py
@@ -3,7 +3,7 @@
from eth_utils import to_int
-from indexer.domain import Domain
+from hemera.indexer.domains import Domain
@dataclass
diff --git a/indexer/domain/transaction.py b/hemera/indexer/domains/transaction.py
similarity index 96%
rename from indexer/domain/transaction.py
rename to hemera/indexer/domains/transaction.py
index 0ea5febba..b5a848a77 100644
--- a/indexer/domain/transaction.py
+++ b/hemera/indexer/domains/transaction.py
@@ -3,8 +3,8 @@
from eth_utils import to_int, to_normalized_address
-from indexer.domain import Domain
-from indexer.domain.receipt import Receipt
+from hemera.indexer.domains import Domain
+from hemera.indexer.domains.receipt import Receipt
@dataclass
diff --git a/indexer/jobs/source_job/__init__.py b/hemera/indexer/executors/__init__.py
similarity index 100%
rename from indexer/jobs/source_job/__init__.py
rename to hemera/indexer/executors/__init__.py
diff --git a/indexer/executors/batch_work_executor.py b/hemera/indexer/executors/batch_work_executor.py
similarity index 96%
rename from indexer/executors/batch_work_executor.py
rename to hemera/indexer/executors/batch_work_executor.py
index 4b4ff4bb7..5876dee2e 100644
--- a/indexer/executors/batch_work_executor.py
+++ b/hemera/indexer/executors/batch_work_executor.py
@@ -7,9 +7,9 @@
from requests.exceptions import TooManyRedirects
from web3._utils.threads import Timeout as Web3Timeout
-from common.utils.exception_control import FastShutdownError, RetriableError
-from indexer.executors.bounded_executor import BoundedExecutor
-from indexer.utils.progress_logger import ProgressLogger
+from hemera.common.utils.exception_control import FastShutdownError, RetriableError
+from hemera.indexer.executors.bounded_executor import BoundedExecutor
+from hemera.indexer.utils.progress_logger import ProgressLogger
RETRY_EXCEPTIONS = (
ConnectionError,
diff --git a/indexer/executors/bounded_executor.py b/hemera/indexer/executors/bounded_executor.py
similarity index 100%
rename from indexer/executors/bounded_executor.py
rename to hemera/indexer/executors/bounded_executor.py
diff --git a/indexer/modules/__init__.py b/hemera/indexer/exporters/__init__.py
similarity index 100%
rename from indexer/modules/__init__.py
rename to hemera/indexer/exporters/__init__.py
diff --git a/indexer/exporters/base_exporter.py b/hemera/indexer/exporters/base_exporter.py
similarity index 92%
rename from indexer/exporters/base_exporter.py
rename to hemera/indexer/exporters/base_exporter.py
index 75aa196d7..4826997cb 100644
--- a/indexer/exporters/base_exporter.py
+++ b/hemera/indexer/exporters/base_exporter.py
@@ -1,7 +1,7 @@
import collections
from typing import List
-from indexer.domain import Domain
+from hemera.indexer.domains import Domain
class BaseExporter(object):
diff --git a/indexer/exporters/console_item_exporter.py b/hemera/indexer/exporters/console_item_exporter.py
similarity index 84%
rename from indexer/exporters/console_item_exporter.py
rename to hemera/indexer/exporters/console_item_exporter.py
index a1ec45d45..d42b23497 100644
--- a/indexer/exporters/console_item_exporter.py
+++ b/hemera/indexer/exporters/console_item_exporter.py
@@ -1,6 +1,6 @@
import logging
-from indexer.exporters.base_exporter import BaseExporter
+from hemera.indexer.exporters.base_exporter import BaseExporter
logger = logging.getLogger(__name__)
diff --git a/indexer/exporters/csv_file_item_exporter.py b/hemera/indexer/exporters/csv_file_item_exporter.py
similarity index 93%
rename from indexer/exporters/csv_file_item_exporter.py
rename to hemera/indexer/exporters/csv_file_item_exporter.py
index 450afd47f..9ea7c5a7c 100644
--- a/indexer/exporters/csv_file_item_exporter.py
+++ b/hemera/indexer/exporters/csv_file_item_exporter.py
@@ -6,13 +6,13 @@
from dateutil.tz import tzlocal
-from common.utils.file_utils import smart_open
-from indexer.domain import Domain, dataclass_to_dict
-from indexer.exporters.base_exporter import BaseExporter, group_by_item_type
+from hemera.common.utils.file_utils import smart_open
+from hemera.indexer.domains import Domain, dataclass_to_dict
+from hemera.indexer.exporters.base_exporter import BaseExporter, group_by_item_type
logger = logging.getLogger(__name__)
-DEFAULT_BLOCKS_PER_FILE = os.environ.get("DEFAULT_BLOCKS_PER_FILE", 1000)
+DEFAULT_BLOCKS_PER_FILE = int(os.environ.get("DEFAULT_BLOCKS_PER_FILE", "1000"))
class CSVFileItemExporter(BaseExporter):
diff --git a/indexer/exporters/item_exporter.py b/hemera/indexer/exporters/item_exporter.py
similarity index 72%
rename from indexer/exporters/item_exporter.py
rename to hemera/indexer/exporters/item_exporter.py
index f5cc515a1..65453099e 100644
--- a/indexer/exporters/item_exporter.py
+++ b/hemera/indexer/exporters/item_exporter.py
@@ -1,9 +1,9 @@
-from indexer.exporters.base_exporter import BaseExporter
-from indexer.exporters.console_item_exporter import ConsoleItemExporter
-from indexer.exporters.csv_file_item_exporter import CSVFileItemExporter
-from indexer.exporters.hemera_address_postgres_item_exporter import HemeraAddressPostgresItemExporter
-from indexer.exporters.json_file_item_exporter import JSONFileItemExporter
-from indexer.exporters.postgres_item_exporter import PostgresItemExporter
+from hemera.indexer.exporters.base_exporter import BaseExporter
+from hemera.indexer.exporters.console_item_exporter import ConsoleItemExporter
+from hemera.indexer.exporters.csv_file_item_exporter import CSVFileItemExporter
+from hemera.indexer.exporters.json_file_item_exporter import JSONFileItemExporter
+from hemera.indexer.exporters.kafka_exporter import KafkaItemExporter
+from hemera.indexer.exporters.postgres_item_exporter import PostgresItemExporter
def create_item_exporters(outputs, config):
@@ -22,8 +22,11 @@ def create_item_exporter(output, config):
item_exporter = JSONFileItemExporter(output, config)
elif item_exporter_type == ItemExporterType.CSVFILE:
item_exporter = CSVFileItemExporter(output, config)
- elif item_exporter_type == ItemExporterType.HEMERA_ADDRESS_POSTGRES:
- item_exporter = HemeraAddressPostgresItemExporter(output, config["chain_id"])
+ elif item_exporter_type == ItemExporterType.KAFKA:
+ item_exporter = KafkaItemExporter(output)
+ # TODO: Implement HemeraAddressPostgresItemExporter dynamically importing it
+ # elif item_exporter_type == ItemExporterType.HEMERA_ADDRESS_POSTGRES:
+ # item_exporter = HemeraAddressPostgresItemExporter(output, config["chain_id"])
elif item_exporter_type == ItemExporterType.VOID:
item_exporter = BaseExporter()
else:
@@ -52,6 +55,8 @@ def determine_item_exporter_type(output):
return ItemExporterType.JSONFILE
elif output is not None and output.startswith("csvfile://"):
return ItemExporterType.CSVFILE
+ elif output is not None and output.startswith("kafka://"):
+ return ItemExporterType.KAFKA
elif output is not None and output == "void":
return ItemExporterType.VOID
elif output is None or output == "console":
@@ -68,6 +73,7 @@ class ItemExporterType:
CSVFILE = "csvfile"
CONSOLE = "console"
UNKNOWN = "unknown"
+ KAFKA = "kafka"
HEMERA_ADDRESS_POSTGRES = "hemera_address_postgres"
diff --git a/indexer/exporters/json_file_item_exporter.py b/hemera/indexer/exporters/json_file_item_exporter.py
similarity index 93%
rename from indexer/exporters/json_file_item_exporter.py
rename to hemera/indexer/exporters/json_file_item_exporter.py
index f907551d7..78ee46959 100644
--- a/indexer/exporters/json_file_item_exporter.py
+++ b/hemera/indexer/exporters/json_file_item_exporter.py
@@ -6,13 +6,13 @@
from dateutil.tz import tzlocal
-from common.utils.file_utils import smart_open
-from indexer.domain import Domain, dataclass_to_dict
-from indexer.exporters.base_exporter import BaseExporter, group_by_item_type
+from hemera.common.utils.file_utils import smart_open
+from hemera.indexer.domains import Domain, dataclass_to_dict
+from hemera.indexer.exporters.base_exporter import BaseExporter, group_by_item_type
logger = logging.getLogger(__name__)
-DEFAULT_BLOCKS_PER_FILE = os.environ.get("DEFAULT_BLOCKS_PER_FILE", 1000)
+DEFAULT_BLOCKS_PER_FILE = int(os.environ.get("DEFAULT_BLOCKS_PER_FILE", "1000"))
class JSONFileItemExporter(BaseExporter):
diff --git a/hemera/indexer/exporters/kafka_exporter.py b/hemera/indexer/exporters/kafka_exporter.py
new file mode 100644
index 000000000..576cac18d
--- /dev/null
+++ b/hemera/indexer/exporters/kafka_exporter.py
@@ -0,0 +1,187 @@
+import json
+import logging
+from copy import deepcopy
+from dataclasses import asdict
+from datetime import datetime, timezone
+from urllib.parse import urlparse
+
+from kafka import KafkaProducer
+from kafka.errors import KafkaError
+
+from hemera.indexer.domains import Domain
+from hemera.indexer.domains.current_token_balance import CurrentTokenBalance
+from hemera.indexer.domains.token_balance import TokenBalance
+from hemera.indexer.domains.token_transfer import ERC20TokenTransfer
+from hemera.indexer.exporters.base_exporter import BaseExporter
+from hemera_udf.token_holder_metrics.domains.metrics import TokenHolderMetricsCurrentD, TokenHolderMetricsHistoryD
+from hemera_udf.token_price.domains import DexBlockTokenPrice
+from hemera_udf.uniswap_v2 import UniswapV2SwapEvent
+from hemera_udf.uniswap_v3 import UniswapV3SwapEvent
+
+logger = logging.getLogger(__name__)
+import os
+
+chain_name = os.environ.get("CHAIN_NAME", "default")
+
+
+class KafkaItemExporter(BaseExporter):
+ def __init__(self, output, max_retries=5, ack_mode="all", timeout=30):
+ """
+ Initialize Kafka exporter with reliable delivery settings.
+
+ Args:
+ output: Kafka connection URL
+ max_retries: Number of retry attempts for message delivery
+ ack_mode: Acknowledgment mode ("all" for strongest guarantee)
+ timeout: Timeout in seconds for message delivery confirmation
+ """
+ self.connection_url = self.get_connection_url(output)
+ self.max_retries = max_retries
+ self.timeout = timeout
+ self.producer = None
+ self._create_producer(ack_mode)
+
+ def _create_producer(self, ack_mode):
+ """Create Kafka producer with reliability settings."""
+ try:
+ self.producer = KafkaProducer(
+ bootstrap_servers=self.connection_url,
+ security_protocol="SASL_SSL" if self.protocol == "kafka+ssl" else "SASL_PLAINTEXT",
+ sasl_mechanism="PLAIN",
+ sasl_plain_username=self.username,
+ sasl_plain_password=self.password,
+ ssl_cafile=None,
+ # Reliability settings
+ acks=ack_mode, # Wait for all replicas to acknowledge
+ retries=self.max_retries, # Number of retries
+ retry_backoff_ms=500, # Backoff time between retries
+ )
+ logger.info("Kafka producer initialized successfully")
+ except Exception as e:
+ logger.error(f"Failed to initialize Kafka producer: {e}")
+ raise
+
+ def get_connection_url(self, output):
+ """Parse and validate Kafka connection URL."""
+ try:
+ parsed_url = urlparse(output)
+ if parsed_url.scheme not in ["kafka", "kafka+ssl"]:
+ raise ValueError('Invalid scheme in kafka URL. Use "kafka" or "kafka+ssl".')
+
+ connection_url = parsed_url.hostname + ":" + str(parsed_url.port)
+ self.username = parsed_url.username
+ self.password = parsed_url.password
+ self.protocol = parsed_url.scheme
+ return connection_url
+ except Exception as e:
+ raise ValueError(f"Invalid kafka output param: {output}. Error: {e}")
+
+ def open(self):
+ pass
+
+ def export_items(self, items, **kwargs):
+ """Export multiple items to Kafka with delivery guarantees."""
+ success_count = 0
+ fail_count = 0
+
+ for item in items:
+ try:
+ result = self.export_item(item)
+ if result:
+ success_count += 1
+ else:
+ fail_count += 1
+ except Exception as e:
+ logger.error(f"Failed to export item: {e}")
+ fail_count += 1
+
+ logger.info(f"Exported {success_count} items successfully, {fail_count} failed")
+ return success_count, fail_count
+
+ def export_item(self, item: Domain, **kwargs):
+ """
+ Export a single item to Kafka with guaranteed delivery.
+
+ Returns:
+ bool: True if successful, False otherwise
+ """
+ if not self.producer:
+ logger.error("Kafka producer not initialized")
+ return False
+
+ item = self.domain_mapping(item)
+ if item is None:
+ return False
+
+ try:
+ # Prepare the data
+ data = {key: value for key, value in asdict(item).items() if value is not None}
+ utc_now = datetime.now(timezone.utc)
+ utc_timestamp = int(utc_now.timestamp())
+ data["update_time"] = utc_timestamp
+ encoded_data = json.dumps(data).encode("utf-8")
+
+ # Send the message and wait for confirmation
+ topic = item.type()
+ if chain_name != "base":
+ topic = f"{chain_name}_{topic}"
+
+ future = self.producer.send(topic, value=encoded_data)
+
+ # Block until the message is sent (or timeout)
+ record_metadata = future.get(timeout=self.timeout)
+
+ logger.debug(
+ f"Message sent successfully to {topic} "
+ f"[partition={record_metadata.partition}, offset={record_metadata.offset}]"
+ )
+ return True
+
+ except KafkaError as ke:
+ logger.error(f"Kafka error while exporting item: {ke}")
+ return False
+ except Exception as e:
+ logger.error(f"Unexpected error while exporting item: {e}")
+ return False
+
+ def close(self):
+ pass
+
+ def domain_mapping(self, item):
+ """Map domain objects for Kafka compatibility."""
+ data = deepcopy(item)
+
+ if isinstance(data, (TokenBalance, CurrentTokenBalance)):
+ if data.token_id is None or data.token_id < 0:
+ data.token_id = 0
+ return data
+ if isinstance(data, DexBlockTokenPrice):
+ data.token_symbol = ""
+ return data
+ if isinstance(data, (TokenHolderMetricsHistoryD, TokenHolderMetricsCurrentD)):
+ if data.current_balance:
+ data.current_balance = int(data.current_balance)
+ if data.max_balance:
+ data.max_balance = int(data.max_balance)
+ if data.total_buy_amount:
+ data.total_buy_amount = int(data.total_buy_amount)
+ if data.total_sell_amount:
+ data.total_sell_amount = int(data.total_sell_amount)
+ if data.swap_buy_amount:
+ data.swap_buy_amount = int(data.swap_buy_amount)
+ if data.swap_sell_amount:
+ data.swap_sell_amount = int(data.swap_sell_amount)
+ return data
+ if isinstance(
+ data,
+ (
+ UniswapV2SwapEvent,
+ UniswapV3SwapEvent,
+ ERC20TokenTransfer,
+ TokenHolderMetricsCurrentD,
+ TokenHolderMetricsHistoryD,
+ ),
+ ):
+ return data
+
+ return None
diff --git a/indexer/exporters/postgres_item_exporter.py b/hemera/indexer/exporters/postgres_item_exporter.py
similarity index 89%
rename from indexer/exporters/postgres_item_exporter.py
rename to hemera/indexer/exporters/postgres_item_exporter.py
index c4cd423d2..a8c9b7d94 100644
--- a/indexer/exporters/postgres_item_exporter.py
+++ b/hemera/indexer/exporters/postgres_item_exporter.py
@@ -1,17 +1,17 @@
import logging
+import os
from typing import Type
from psycopg2.extras import execute_values
from tqdm import tqdm
-from common.converter.pg_converter import domain_model_mapping
-from common.models import HemeraModel
-from common.services.postgresql_service import PostgreSQLService
-from indexer.exporters.base_exporter import BaseExporter, group_by_item_type
+from hemera.common.models import HemeraModel
+from hemera.common.services.postgresql_service import PostgreSQLService
+from hemera.indexer.exporters.base_exporter import BaseExporter, group_by_item_type
logger = logging.getLogger(__name__)
-COMMIT_BATCH_SIZE = 1000
+COMMIT_BATCH_SIZE = int(os.environ.get("COMMIT_BATCH_SIZE", "1000"))
class TqdmExtraFormat(tqdm):
@@ -32,6 +32,9 @@ def __init__(self, **service):
self.postgres_url = service["postgres_url"]
self.db_version = service.get("db_version")
self.init_schema = service.get("init_schema")
+ from hemera.common.converter.pg_converter import domain_model_mapping
+
+ self._domain_model_mapping = domain_model_mapping
# self.service = service
def export_items(self, items, **kwargs):
@@ -62,7 +65,10 @@ def export_items(self, items, **kwargs):
item_group = items_grouped_by_type.get(item_type)
if item_group:
- pg_config = domain_model_mapping[item_type]
+ if item_type not in self._domain_model_mapping:
+ continue
+
+ pg_config = self._domain_model_mapping[item_type]
table = pg_config["table"]
do_update = pg_config["conflict_do_update"]
update_strategy = pg_config["update_strategy"]
diff --git a/hemera/indexer/jobs/__init__.py b/hemera/indexer/jobs/__init__.py
new file mode 100644
index 000000000..afb07829b
--- /dev/null
+++ b/hemera/indexer/jobs/__init__.py
@@ -0,0 +1,27 @@
+__all__ = [
+ "CSVSourceJob",
+ "PGSourceJob",
+ "ExportBlocksJob",
+ "ExportTransactionsAndLogsJob",
+ "ExportTokensAndTransfersJob",
+ "ExportTokenIdInfosJob",
+ "ExportTokenBalancesJob",
+ "ExportTracesJob",
+ "ExportContractsJob",
+ "ExportCoinBalancesJob",
+ "FilterTransactionDataJob",
+ "ExportContractsFromTransactionJob",
+]
+
+from hemera.indexer.jobs.base_job import FilterTransactionDataJob
+from hemera.indexer.jobs.export_blocks_job import ExportBlocksJob
+from hemera.indexer.jobs.export_coin_balances_job import ExportCoinBalancesJob
+from hemera.indexer.jobs.export_contracts_from_transaction_job import ExportContractsFromTransactionJob
+from hemera.indexer.jobs.export_contracts_job import ExportContractsJob
+from hemera.indexer.jobs.export_token_balances_job import ExportTokenBalancesJob
+from hemera.indexer.jobs.export_token_id_infos_job import ExportTokenIdInfosJob
+from hemera.indexer.jobs.export_tokens_and_transfers_job import ExportTokensAndTransfersJob
+from hemera.indexer.jobs.export_traces_job import ExportTracesJob
+from hemera.indexer.jobs.export_transactions_and_logs_job import ExportTransactionsAndLogsJob
+from hemera.indexer.jobs.source_job.csv_source_job import CSVSourceJob
+from hemera.indexer.jobs.source_job.pg_source_job import PGSourceJob
diff --git a/indexer/jobs/base_job.py b/hemera/indexer/jobs/base_job.py
similarity index 85%
rename from indexer/jobs/base_job.py
rename to hemera/indexer/jobs/base_job.py
index 75a7aac40..3b4359f05 100644
--- a/indexer/jobs/base_job.py
+++ b/hemera/indexer/jobs/base_job.py
@@ -7,12 +7,12 @@
from deprecated import deprecated
from web3 import Web3
-from common.converter.pg_converter import domain_model_mapping
-from common.utils.exception_control import FastShutdownError
-from common.utils.format_utils import to_snake_case
-from indexer.domain import Domain
-from indexer.domain.transaction import Transaction
-from indexer.utils.reorg import should_reorg
+from hemera.common.utils.exception_control import FastShutdownError, RetriableError
+from hemera.common.utils.format_utils import to_snake_case
+from hemera.indexer.domains import Domain
+from hemera.indexer.domains.transaction import Transaction
+from hemera.indexer.utils.buffer_service import BufferService
+from hemera.indexer.utils.reorg import should_reorg
T = TypeVar("T")
@@ -31,29 +31,47 @@ def check_collect_type(self, cls):
)
def collect_item(self, key: str, data: Domain):
+ if data is None:
+ return
+
self.check_collect_type(type(data))
with self.job._data_buff_lock[key]:
self.job._data_buff[key].append(data)
def collect_items(self, key, datas: List[Domain]):
+ if datas is None or len(datas) == 0:
+ return
+
self.check_collect_type(type(datas[0]))
with self.job._data_buff_lock[key]:
self.job._data_buff[key].extend(datas)
def collect_domain(self, domain: Domain):
+ if domain is None:
+ return
+
self.check_collect_type(type(domain))
self.collect(domain)
def collect_domains(self, domains: List[Domain]):
+ if domains is None or len(domains) == 0:
+ return
+
self.check_collect_type(type(domains[0]))
self.collects(domains)
def collect(self, domain: Domain):
+ if domain is None:
+ return
+
self.check_collect_type(type(domain))
with self.job._data_buff_lock[domain.type()]:
self.job._data_buff[domain.type()].append(domain)
def collects(self, domains: List[Domain]):
+ if domains is None or len(domains) == 0:
+ return
+
self.check_collect_type(type(domains[0]))
with self.job._data_buff_lock[domains[0].type()]:
self.job._data_buff[domains[0].type()].extend(domains)
@@ -89,7 +107,7 @@ def get_subclasses(cls):
class BaseJob(metaclass=BaseJobMeta):
- _data_buff = defaultdict(list)
+ _data_buff: Union[dict, BufferService] = defaultdict(list)
_data_buff_lock = defaultdict(threading.Lock)
tokens = None
@@ -115,7 +133,6 @@ def init_token_cache(cls, _token=None):
def __init__(self, **kwargs):
self._required_output_types = kwargs["required_output_types"]
- self._item_exporters = kwargs["item_exporters"]
self._batch_web3_provider = kwargs["batch_web3_provider"]
self._web3 = Web3(Web3.HTTPProvider(self._batch_web3_provider.endpoint_uri))
self.logger = logging.getLogger(self.__class__.__name__)
@@ -127,12 +144,16 @@ def __init__(self, **kwargs):
self._should_reorg = False
self._should_reorg_type = set()
self._service = kwargs["config"].get("db_service", None)
+ self.collector = Collector(self, self.output_types)
job_name_snake = to_snake_case(self.job_name)
self.user_defined_config = kwargs["config"][job_name_snake] if kwargs["config"].get(job_name_snake) else {}
def run(self, **kwargs):
try:
+ start_block = kwargs["start_block"]
+ end_block = kwargs["end_block"]
+
self._start(**kwargs)
if self.able_to_reorg and self._reorg:
@@ -149,16 +170,29 @@ def run(self, **kwargs):
self._collect(**kwargs)
self._process(**kwargs)
- if not self._reorg:
- self._export()
+ if not self._reorg or not issubclass(self.__class__, BaseSourceJob):
+ if (
+ type(self._data_buff) is BufferService
+ and not self._data_buff.is_shutdown()
+ and not self._data_buff.check_and_flush(
+ start_block=start_block,
+ end_block=end_block,
+ job_name=self.job_name,
+ output_types=[output.type() for output in self.output_types],
+ )
+ ):
+ raise RetriableError(f"Job {self.job_name} export error.")
finally:
self._end()
def _start(self, **kwargs):
- pass
+ for dataclass in self.output_types:
+ self._data_buff[dataclass.type()].clear()
def _pre_reorg(self, **kwargs):
+ from hemera.common.converter.pg_converter import domain_model_mapping
+
if self._service is None:
raise FastShutdownError("PG Service is not set")
@@ -181,14 +215,14 @@ def _end(self):
self._data_buff.pop(output.type())
# @deprecated
- # This function has been marked as deprecated in 0.6.0, and will be removed in 0.8.0.
+ # This function has been marked as deprecated in 0.6.0, and will be removed in 1.1.0.
# Please move your data process logic into _udf instead.
@deprecated
def _collect(self, **kwargs):
pass
# @deprecated
- # This function has been marked as deprecated in 0.6.0, and will be removed in 0.8.0.
+ # This function has been marked as deprecated in 0.6.0, and will be removed in 1.1.0.
# Please move your data process batch logic into custom define function instead.
@deprecated
def _collect_batch(self, iterator):
@@ -211,8 +245,9 @@ def _collect_domains(self, domains: List[Domain]):
self._collect_domain(domain)
def _update_domains(self, domains: List[Domain]):
- key = domains[0].type()
- self._data_buff[key] = domains
+ if not domains:
+ return
+ self._data_buff[domains[0].type()] = domains
def _get_domain(self, domain):
return self._data_buff[domain.type()] if domain.type() in self._data_buff else []
@@ -224,7 +259,7 @@ def _get_domains(self, domains: list[Domain]):
return res
# @deprecated
- # This function has been marked as deprecated in 0.6.0, and will be removed in 0.8.0.
+ # This function has been marked as deprecated in 0.6.0, and will be removed in 1.1.0.
# Please move your data process logic into _udf instead.
@deprecated
def _process(self, **kwargs):
@@ -237,29 +272,17 @@ def _build_udf_parameter(self):
if param == "output":
continue
args_type = get_args(param_type)[0]
- if args_type.type() in self._data_buff:
+ if args_type.type() in self._data_buff.keys():
parameters[param] = self._data_buff[args_type.type()]
else:
parameters[param] = []
- parameters["output"] = Collector(self, self.output_types)
+ parameters["output"] = self.collector
return parameters
def _udf(self, **kwargs):
pass
- def _export(self):
- items = []
-
- for output_type in self.output_types:
- if output_type in self._required_output_types:
- items.extend(self._data_buff[output_type.type()])
-
- for item_exporter in self._item_exporters:
- item_exporter.open()
- item_exporter.export_items(items, job_name=self.job_name)
- item_exporter.close()
-
def get_buff(self):
return self._data_buff
diff --git a/indexer/jobs/check_block_consensus_job.py b/hemera/indexer/jobs/check_block_consensus_job.py
similarity index 92%
rename from indexer/jobs/check_block_consensus_job.py
rename to hemera/indexer/jobs/check_block_consensus_job.py
index 3a0ee0e34..563ec3ff1 100644
--- a/indexer/jobs/check_block_consensus_job.py
+++ b/hemera/indexer/jobs/check_block_consensus_job.py
@@ -4,11 +4,11 @@
from sqlalchemy import and_
-from common.models.blocks import Blocks
-from common.utils.format_utils import as_dict
-from indexer.domain import dict_to_dataclass
-from indexer.domain.block import Block
-from indexer.jobs.base_job import BaseJob
+from hemera.common.models.blocks import Blocks
+from hemera.common.utils.format_utils import as_dict
+from hemera.indexer.domains import dict_to_dataclass
+from hemera.indexer.domains.block import Block
+from hemera.indexer.jobs.base_job import BaseJob
logger = logging.getLogger(__name__)
diff --git a/indexer/jobs/export_blocks_job.py b/hemera/indexer/jobs/export_blocks_job.py
similarity index 82%
rename from indexer/jobs/export_blocks_job.py
rename to hemera/indexer/jobs/export_blocks_job.py
index e94218ef1..6b03b7639 100644
--- a/indexer/jobs/export_blocks_job.py
+++ b/hemera/indexer/jobs/export_blocks_job.py
@@ -2,23 +2,22 @@
import orjson
-from common.utils.exception_control import FastShutdownError
-from indexer.domain.block import Block
-from indexer.domain.block_ts_mapper import BlockTsMapper
-from indexer.domain.transaction import Transaction
-from indexer.executors.batch_work_executor import BatchWorkExecutor
-from indexer.jobs.base_job import BaseExportJob
-from indexer.specification.specification import (
+from hemera.common.utils.exception_control import FastShutdownError
+from hemera.indexer.domains.block import Block
+from hemera.indexer.domains.block_ts_mapper import BlockTsMapper
+from hemera.indexer.executors.batch_work_executor import BatchWorkExecutor
+from hemera.indexer.jobs.base_job import BaseExportJob
+from hemera.indexer.specification.specification import (
AlwaysFalseSpecification,
AlwaysTrueSpecification,
TransactionFilterByLogs,
TransactionFilterByTransactionInfo,
TransactionHashSpecification,
)
-from indexer.utils.collection_utils import flatten
-from indexer.utils.json_rpc_requests import generate_get_block_by_number_json_rpc
-from indexer.utils.reorg import set_reorg_sign
-from indexer.utils.rpc_utils import rpc_response_batch_to_results
+from hemera.indexer.utils.collection_utils import distinct_collections_by_group, flatten
+from hemera.indexer.utils.json_rpc_requests import generate_get_block_by_number_json_rpc
+from hemera.indexer.utils.reorg import set_reorg_sign
+from hemera.indexer.utils.rpc_utils import rpc_response_batch_to_results
logger = logging.getLogger(__name__)
@@ -94,13 +93,16 @@ def _collect_batch(self, block_number_batch):
for block_rpc_dict in results:
block_entity = Block.from_rpc(block_rpc_dict)
self._collect_item(Block.type(), block_entity)
+
+ satisfied_transactions = []
for transaction_entity in block_entity.transactions:
if self._specification.is_satisfied_by(transaction_entity):
- self._collect_item(Transaction.type(), transaction_entity)
+ satisfied_transactions.append(transaction_entity)
+ block_entity.transactions = satisfied_transactions
def _process(self, **kwargs):
+ self._data_buff[Block.type()] = distinct_collections_by_group(self._data_buff[Block.type()], ["hash"])
self._data_buff[Block.type()].sort(key=lambda x: x.number)
- self._data_buff[Transaction.type()].sort(key=lambda x: (x.block_number, x.transaction_index))
ts_dict = {}
for block in self._data_buff[Block.type()]:
diff --git a/indexer/jobs/export_coin_balances_job.py b/hemera/indexer/jobs/export_coin_balances_job.py
similarity index 86%
rename from indexer/jobs/export_coin_balances_job.py
rename to hemera/indexer/jobs/export_coin_balances_job.py
index 8380cc819..5077ef6b6 100644
--- a/indexer/jobs/export_coin_balances_job.py
+++ b/hemera/indexer/jobs/export_coin_balances_job.py
@@ -5,17 +5,17 @@
from eth_utils import to_int
-from common.utils.exception_control import RPCNotReachable
-from enumeration.record_level import RecordLevel
-from indexer.domain.block import Block
-from indexer.domain.coin_balance import CoinBalance
-from indexer.domain.contract_internal_transaction import ContractInternalTransaction
-from indexer.domain.transaction import Transaction
-from indexer.executors.batch_work_executor import BatchWorkExecutor
-from indexer.jobs.base_job import BaseExportJob
-from indexer.utils.exception_recorder import ExceptionRecorder
-from indexer.utils.json_rpc_requests import generate_get_balance_json_rpc
-from indexer.utils.rpc_utils import rpc_response_to_result, zip_rpc_response
+from hemera.common.enumeration.record_level import RecordLevel
+from hemera.common.utils.exception_control import RPCNotReachable
+from hemera.indexer.domains.block import Block
+from hemera.indexer.domains.coin_balance import CoinBalance
+from hemera.indexer.domains.contract_internal_transaction import ContractInternalTransaction
+from hemera.indexer.domains.transaction import Transaction
+from hemera.indexer.executors.batch_work_executor import BatchWorkExecutor
+from hemera.indexer.jobs.base_job import BaseExportJob
+from hemera.indexer.utils.exception_recorder import ExceptionRecorder
+from hemera.indexer.utils.json_rpc_requests import generate_get_balance_json_rpc
+from hemera.indexer.utils.rpc_utils import rpc_response_to_result, zip_rpc_response
logger = logging.getLogger(__name__)
exception_recorder = ExceptionRecorder()
diff --git a/hemera/indexer/jobs/export_contracts_from_transaction_job.py b/hemera/indexer/jobs/export_contracts_from_transaction_job.py
new file mode 100644
index 000000000..e4d650144
--- /dev/null
+++ b/hemera/indexer/jobs/export_contracts_from_transaction_job.py
@@ -0,0 +1,157 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time 2025/1/3 14:11
+# @Author ideal93
+# @File export_contracts_from_transaction_job.py
+# @Brief
+
+import json
+import logging
+from typing import List, Optional, Union
+
+from hemera.common.enumeration.record_level import RecordLevel
+from hemera.common.utils.abi_code_utils import decode_data, encode_data
+from hemera.common.utils.exception_control import HemeraBaseException
+from hemera.common.utils.format_utils import bytes_to_hex_str
+from hemera.indexer.domains.contract import Contract, ContractFromTransaction, extract_contract_from_transaction
+from hemera.indexer.domains.trace import Trace
+from hemera.indexer.domains.transaction import Transaction
+from hemera.indexer.executors.batch_work_executor import BatchWorkExecutor
+from hemera.indexer.jobs.base_job import BaseExportJob
+from hemera.indexer.utils.abi_setting import TOKEN_NAME_FUNCTION
+from hemera.indexer.utils.exception_recorder import ExceptionRecorder
+from hemera.indexer.utils.json_rpc_requests import generate_eth_call_json_rpc
+from hemera.indexer.utils.rpc_utils import rpc_response_to_result, zip_rpc_response
+
+logger = logging.getLogger(__name__)
+exception_recorder = ExceptionRecorder()
+
+
+# Exports contracts
+class ExportContractsFromTransactionJob(BaseExportJob):
+ dependency_types = [Transaction]
+ output_types = [ContractFromTransaction]
+ able_to_reorg = True
+
+ def get_code(self, address, block_number: Union[str, int, None]) -> Optional[str]:
+ if block_number is not None:
+ if isinstance(block_number, int):
+ block_number = hex(block_number)
+ else:
+ block_number = "latest"
+ try:
+ address = self._web3.to_checksum_address(address)
+ code = self._web3.eth.get_code(address, block_number)
+ return code.hex()
+ except Exception as e:
+ return None
+
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+
+ self._batch_work_executor = BatchWorkExecutor(
+ kwargs["batch_size"],
+ kwargs["max_workers"],
+ job_name=self.__class__.__name__,
+ )
+ self._is_batch = kwargs["batch_size"] > 1
+
+ def _collect(self, **kwargs):
+ contracts = self.build_contracts(self._data_buff[Transaction.type()])
+
+ self._batch_work_executor.execute(contracts, self._collect_batch, total_items=len(contracts))
+ self._batch_work_executor.wait()
+
+ def _collect_batch(self, contracts):
+ contracts = contract_info_rpc_requests(self._batch_web3_provider.make_request, contracts, self._is_batch)
+
+ for contract in contracts:
+ self._collect_item(ContractFromTransaction.type(), ContractFromTransaction(contract))
+
+ def _process(self, **kwargs):
+ self._data_buff[ContractFromTransaction.type()].sort(
+ key=lambda x: (x.block_number, x.transaction_index, x.address)
+ )
+
+ def build_contracts(self, transactions: List[Transaction]):
+ contracts = []
+ for transaction in transactions:
+ if transaction.receipt.contract_address is not None and transaction.receipt.status == 1:
+ contract = extract_contract_from_transaction(transaction)
+ contract["deployed_code"] = self.get_code(contract["address"], transaction.block_number)
+
+ contract["param_to"] = contract["address"]
+
+ try:
+ contract["param_data"] = encode_data(
+ TOKEN_NAME_FUNCTION.get_abi(), [], TOKEN_NAME_FUNCTION.get_signature()
+ )
+ except Exception as e:
+ logger.warning(
+ f"Encoding contract api parameter failed. "
+ f"contract address: {contract['address']}. "
+ f"fn: name. "
+ f"exception: {e}. "
+ )
+ contract["param_data"] = "0x"
+
+ contract["param_number"] = hex(contract["block_number"])
+ contracts.append(contract)
+
+ return contracts
+
+
+def contract_info_rpc_requests(make_requests, contracts, is_batch):
+ for idx, contract in enumerate(contracts):
+ contract["request_id"] = idx
+
+ contract_name_rpc = list(generate_eth_call_json_rpc(contracts))
+
+ if is_batch:
+ response = make_requests(params=json.dumps(contract_name_rpc))
+ else:
+ response = [make_requests(params=json.dumps(contract_name_rpc[0]))]
+
+ for data in list(zip_rpc_response(contracts, response)):
+ contract = data[0]
+ try:
+ result = rpc_response_to_result(data[1])
+ except HemeraBaseException as e:
+ result = None
+ logger.warning(
+ f"eth call contract name failed. "
+ f"contract address: {contract['address']}. "
+ f"rpc response: {result}. "
+ f"exception: {e}"
+ )
+ exception_recorder.log(
+ block_number=data[0]["block_number"],
+ dataclass=Contract.type(),
+ message_type=e.__class__.__name__,
+ message=str(e),
+ exception_env=data[1],
+ level=RecordLevel.WARN,
+ )
+
+ info = result[2:] if result is not None else None
+
+ try:
+ contract["name"] = decode_data(["string"], bytes.fromhex(info))[0].replace("\u0000", "")
+ except Exception as e:
+ logger.warning(
+ f"Decoding contract name failed. "
+ f"contract address: {contract['address']}. "
+ f"rpc response: {result}. "
+ f"exception: {e}"
+ )
+ exception_recorder.log(
+ block_number=data[0]["block_number"],
+ dataclass=Contract.type(),
+ message_type="DecodeNameFail",
+ message=str(e),
+ exception_env=contract,
+ level=RecordLevel.WARN,
+ )
+ contract["name"] = None
+
+ return contracts
diff --git a/indexer/jobs/export_contracts_job.py b/hemera/indexer/jobs/export_contracts_job.py
similarity index 85%
rename from indexer/jobs/export_contracts_job.py
rename to hemera/indexer/jobs/export_contracts_job.py
index 6d63577a9..db261983a 100644
--- a/indexer/jobs/export_contracts_job.py
+++ b/hemera/indexer/jobs/export_contracts_job.py
@@ -2,18 +2,18 @@
import logging
from typing import List
-from common.utils.abi_code_utils import decode_data, encode_data
-from common.utils.exception_control import HemeraBaseException
-from enumeration.record_level import RecordLevel
-from indexer.domain.block import Block
-from indexer.domain.contract import Contract, extract_contract_from_trace
-from indexer.domain.trace import Trace
-from indexer.executors.batch_work_executor import BatchWorkExecutor
-from indexer.jobs.base_job import BaseExportJob
-from indexer.utils.abi_setting import TOKEN_NAME_FUNCTION
-from indexer.utils.exception_recorder import ExceptionRecorder
-from indexer.utils.json_rpc_requests import generate_eth_call_json_rpc
-from indexer.utils.rpc_utils import rpc_response_to_result, zip_rpc_response
+from hemera.common.enumeration.record_level import RecordLevel
+from hemera.common.utils.abi_code_utils import decode_data, encode_data
+from hemera.common.utils.exception_control import HemeraBaseException
+from hemera.indexer.domains.block import Block
+from hemera.indexer.domains.contract import Contract, extract_contract_from_trace
+from hemera.indexer.domains.trace import Trace
+from hemera.indexer.executors.batch_work_executor import BatchWorkExecutor
+from hemera.indexer.jobs.base_job import BaseExportJob
+from hemera.indexer.utils.abi_setting import TOKEN_NAME_FUNCTION
+from hemera.indexer.utils.exception_recorder import ExceptionRecorder
+from hemera.indexer.utils.json_rpc_requests import generate_eth_call_json_rpc
+from hemera.indexer.utils.rpc_utils import rpc_response_to_result, zip_rpc_response
logger = logging.getLogger(__name__)
exception_recorder = ExceptionRecorder()
diff --git a/indexer/jobs/export_reorg_job.py b/hemera/indexer/jobs/export_reorg_job.py
similarity index 82%
rename from indexer/jobs/export_reorg_job.py
rename to hemera/indexer/jobs/export_reorg_job.py
index 114c5a14f..99563023b 100644
--- a/indexer/jobs/export_reorg_job.py
+++ b/hemera/indexer/jobs/export_reorg_job.py
@@ -2,9 +2,8 @@
from psycopg2.extras import execute_values
-from common.converter.pg_converter import domain_model_mapping
-from indexer.exporters.postgres_item_exporter import sql_insert_statement
-from indexer.jobs.base_job import BaseJob
+from hemera.indexer.exporters.postgres_item_exporter import sql_insert_statement
+from hemera.indexer.jobs.base_job import BaseJob
logger = logging.getLogger(__name__)
@@ -12,8 +11,11 @@
class ExportReorgJob(BaseJob):
def __init__(self, **kwargs):
+ from hemera.common.converter.pg_converter import domain_model_mapping
+
super().__init__(**kwargs)
self._should_reorg = True
+ self._domain_model_mapping = domain_model_mapping
def _process(self, **kwargs):
block_number = int(kwargs["start_block"])
@@ -25,10 +27,10 @@ def _process(self, **kwargs):
if len(self._data_buff[key]) > 0:
items = self._data_buff[key]
domain = type(items[0])
- if domain.__name__ not in domain_model_mapping:
+ if domain.__name__ not in self._domain_model_mapping:
continue
- pg_config = domain_model_mapping[domain.__name__]
+ pg_config = self._domain_model_mapping[domain.__name__]
table = pg_config["table"]
do_update = pg_config["conflict_do_update"]
diff --git a/indexer/jobs/export_token_balances_job.py b/hemera/indexer/jobs/export_token_balances_job.py
similarity index 51%
rename from indexer/jobs/export_token_balances_job.py
rename to hemera/indexer/jobs/export_token_balances_job.py
index 03a9833b6..609355a85 100644
--- a/indexer/jobs/export_token_balances_job.py
+++ b/hemera/indexer/jobs/export_token_balances_job.py
@@ -5,19 +5,20 @@
from eth_utils import to_hex
from hexbytes import HexBytes
-from common.utils.web3_utils import ZERO_ADDRESS
-from indexer.domain import dict_to_dataclass
-from indexer.domain.current_token_balance import CurrentTokenBalance
-from indexer.domain.token_balance import TokenBalance
-from indexer.domain.token_transfer import ERC20TokenTransfer, ERC721TokenTransfer, ERC1155TokenTransfer
-from indexer.executors.batch_work_executor import BatchWorkExecutor
-from indexer.jobs.base_job import BaseExportJob
-from indexer.utils.abi import pad_address, uint256_to_bytes
-from indexer.utils.abi_setting import ERC20_BALANCE_OF_FUNCTION, ERC1155_TOKEN_ID_BALANCE_OF_FUNCTION
-from indexer.utils.collection_utils import distinct_collections_by_group
-from indexer.utils.exception_recorder import ExceptionRecorder
-from indexer.utils.multicall_hemera.util import calculate_execution_time
-from indexer.utils.token_fetcher import TokenFetcher
+from hemera.common.utils.web3_utils import ZERO_ADDRESS
+from hemera.indexer.domains import dict_to_dataclass
+from hemera.indexer.domains.current_token_balance import CurrentTokenBalance
+from hemera.indexer.domains.token import MarkBalanceToken
+from hemera.indexer.domains.token_balance import TokenBalance
+from hemera.indexer.domains.token_transfer import ERC20TokenTransfer, ERC721TokenTransfer, ERC1155TokenTransfer
+from hemera.indexer.executors.batch_work_executor import BatchWorkExecutor
+from hemera.indexer.jobs.base_job import BaseExportJob
+from hemera.indexer.utils.abi import pad_address, uint256_to_bytes
+from hemera.indexer.utils.abi_setting import ERC20_BALANCE_OF_FUNCTION, ERC1155_TOKEN_ID_BALANCE_OF_FUNCTION
+from hemera.indexer.utils.collection_utils import distinct_collections_by_group
+from hemera.indexer.utils.exception_recorder import ExceptionRecorder
+from hemera.indexer.utils.multicall_hemera.util import calculate_execution_time
+from hemera.indexer.utils.token_fetcher import TokenFetcher
logger = logging.getLogger(__name__)
exception_recorder = ExceptionRecorder()
@@ -33,10 +34,13 @@ class TokenBalanceParam:
block_timestamp: int
+FAILURE_THRESHOLD = 100
+
+
# Exports token balance
class ExportTokenBalancesJob(BaseExportJob):
dependency_types = [ERC20TokenTransfer, ERC721TokenTransfer, ERC1155TokenTransfer]
- output_types = [TokenBalance, CurrentTokenBalance]
+ output_types = [TokenBalance, CurrentTokenBalance, MarkBalanceToken]
able_to_reorg = True
def __init__(self, **kwargs):
@@ -54,14 +58,36 @@ def __init__(self, **kwargs):
@calculate_execution_time
def _collect(self, **kwargs):
token_transfers = self._collect_all_token_transfers()
- parameters = extract_token_parameters(token_transfers)
+ parameters = self.extract_token_parameters(token_transfers)
self._collect_batch(parameters)
@calculate_execution_time
def _collect_batch(self, parameters):
token_balances = self.token_fetcher.fetch_token_balance(parameters)
- results = [dict_to_dataclass(t, TokenBalance) for t in token_balances]
+ results = []
+ tokens_set = set()
+ for tb in token_balances:
+ if tb["token_address"] in self.tokens:
+ key = "fail_balance_of_count" if tb["balance"] is None else "succeed_balance_of_count"
+ self.tokens[tb["token_address"]][key] += 1
+ tokens_set.add(tb["token_address"])
+ results.append(dict_to_dataclass(tb, TokenBalance))
self._collect_items(TokenBalance.type(), results)
+ for tk in tokens_set:
+ if (
+ self.tokens[tk]["fail_balance_of_count"] > FAILURE_THRESHOLD
+ and self.tokens[tk]["succeed_balance_of_count"] <= 0
+ ):
+ self.tokens[tk]["no_balance_of"] = True
+ self._collect_item(
+ MarkBalanceToken.type(),
+ MarkBalanceToken(
+ address=tk,
+ no_balance_of=self.tokens[tk]["no_balance_of"],
+ fail_balance_of_count=self.tokens[tk]["fail_balance_of_count"],
+ succeed_balance_of_count=self.tokens[tk]["succeed_balance_of_count"],
+ ),
+ )
def _process(self, **kwargs):
if TokenBalance.type() in self._data_buff:
@@ -88,17 +114,62 @@ def _process(self, **kwargs):
@calculate_execution_time
def _collect_all_token_transfers(self):
token_transfers = []
+ erc20_tokens = set()
if ERC20TokenTransfer.type() in self._data_buff:
token_transfers += self._data_buff[ERC20TokenTransfer.type()]
-
+ erc20_tokens.update([transfer.token_address for transfer in self._data_buff[ERC20TokenTransfer.type()]])
if ERC721TokenTransfer.type() in self._data_buff:
- token_transfers += self._data_buff[ERC721TokenTransfer.type()]
+ for erc721_token_transfer in self._data_buff[ERC721TokenTransfer.type()]:
+ if erc721_token_transfer.token_address not in erc20_tokens:
+ token_transfers.append(erc721_token_transfer)
if ERC1155TokenTransfer.type() in self._data_buff:
token_transfers += self._data_buff[ERC1155TokenTransfer.type()]
return token_transfers
+ @calculate_execution_time
+ def extract_token_parameters(
+ self,
+ token_transfers: List[Union[ERC20TokenTransfer, ERC721TokenTransfer, ERC1155TokenTransfer]],
+ block_number: Union[Optional[int], str] = None,
+ ):
+ origin_parameters = set()
+ token_parameters = []
+ for transfer in token_transfers:
+ if transfer.token_address in self.tokens and self.tokens[transfer.token_address]["no_balance_of"]:
+ continue
+ common_params = {
+ "token_address": transfer.token_address,
+ "token_id": (transfer.token_id if isinstance(transfer, ERC1155TokenTransfer) else None),
+ "token_type": transfer.token_type,
+ "block_number": transfer.block_number if block_number is None else block_number,
+ "block_timestamp": transfer.block_timestamp,
+ }
+ if transfer.from_address != ZERO_ADDRESS:
+ origin_parameters.add(TokenBalanceParam(address=transfer.from_address, **common_params))
+ if transfer.to_address != ZERO_ADDRESS:
+ origin_parameters.add(TokenBalanceParam(address=transfer.to_address, **common_params))
+
+ for parameter in origin_parameters:
+ token_parameters.append(
+ {
+ "address": parameter.address,
+ "token_address": parameter.token_address,
+ "token_id": parameter.token_id,
+ "token_type": parameter.token_type,
+ "param_to": parameter.token_address,
+ "param_data": encode_balance_abi_parameter(
+ parameter.address, parameter.token_type, parameter.token_id
+ ),
+ "param_number": parameter.block_number if block_number is None else block_number,
+ "block_number": parameter.block_number if block_number is None else block_number,
+ "block_timestamp": parameter.block_timestamp,
+ }
+ )
+
+ return token_parameters
+
def encode_balance_abi_parameter(address, token_type, token_id):
if token_type == "ERC1155":
@@ -109,7 +180,6 @@ def encode_balance_abi_parameter(address, token_type, token_id):
return to_hex(HexBytes(ERC20_BALANCE_OF_FUNCTION.get_signature()) + encoded_arguments)
-@calculate_execution_time
def extract_token_parameters(
token_transfers: List[Union[ERC20TokenTransfer, ERC721TokenTransfer, ERC1155TokenTransfer]],
block_number: Union[Optional[int], str] = None,
diff --git a/indexer/jobs/export_token_id_infos_job.py b/hemera/indexer/jobs/export_token_id_infos_job.py
similarity index 89%
rename from indexer/jobs/export_token_id_infos_job.py
rename to hemera/indexer/jobs/export_token_id_infos_job.py
index 82e959e4a..ffc8b70dc 100644
--- a/indexer/jobs/export_token_id_infos_job.py
+++ b/hemera/indexer/jobs/export_token_id_infos_job.py
@@ -2,19 +2,19 @@
from itertools import groupby
from typing import List, Optional, Union
-from common.utils.web3_utils import ZERO_ADDRESS
-from indexer.domain.token_id_infos import (
+from hemera.common.utils.web3_utils import ZERO_ADDRESS
+from hemera.indexer.domains.token_id_infos import (
ERC721TokenIdChange,
ERC721TokenIdDetail,
ERC1155TokenIdDetail,
UpdateERC721TokenIdDetail,
UpdateERC1155TokenIdDetail,
)
-from indexer.domain.token_transfer import ERC721TokenTransfer, ERC1155TokenTransfer
-from indexer.executors.batch_work_executor import BatchWorkExecutor
-from indexer.jobs.base_job import BaseExportJob
-from indexer.utils.multicall_hemera.util import calculate_execution_time
-from indexer.utils.token_fetcher import TokenFetcher
+from hemera.indexer.domains.token_transfer import ERC721TokenTransfer, ERC1155TokenTransfer
+from hemera.indexer.executors.batch_work_executor import BatchWorkExecutor
+from hemera.indexer.jobs.base_job import BaseExportJob
+from hemera.indexer.utils.multicall_hemera.util import calculate_execution_time
+from hemera.indexer.utils.token_fetcher import TokenFetcher
logger = logging.getLogger(__name__)
diff --git a/indexer/jobs/export_tokens_and_transfers_job.py b/hemera/indexer/jobs/export_tokens_and_transfers_job.py
similarity index 79%
rename from indexer/jobs/export_tokens_and_transfers_job.py
rename to hemera/indexer/jobs/export_tokens_and_transfers_job.py
index d0d5ba63f..3d1bf1510 100644
--- a/indexer/jobs/export_tokens_and_transfers_job.py
+++ b/hemera/indexer/jobs/export_tokens_and_transfers_job.py
@@ -1,27 +1,28 @@
+import json
import logging
from dataclasses import asdict
from typing import Dict, List
import orjson
-from common.utils.abi_code_utils import decode_data, encode_data
-from common.utils.format_utils import to_snake_case
-from enumeration.record_level import RecordLevel
-from enumeration.token_type import TokenType
-from indexer.domain import dataclass_to_dict, dict_to_dataclass
-from indexer.domain.log import Log
-from indexer.domain.token import Token, UpdateToken
-from indexer.domain.token_transfer import (
+from hemera.common.enumeration.record_level import RecordLevel
+from hemera.common.enumeration.token_type import TokenType
+from hemera.common.utils.abi_code_utils import decode_data, encode_data
+from hemera.common.utils.format_utils import to_snake_case
+from hemera.indexer.domains import dataclass_to_dict, dict_to_dataclass
+from hemera.indexer.domains.log import Log
+from hemera.indexer.domains.token import Token, UpdateToken
+from hemera.indexer.domains.token_transfer import (
ERC20TokenTransfer,
ERC721TokenTransfer,
ERC1155TokenTransfer,
TokenTransfer,
extract_transfer_from_log,
)
-from indexer.executors.batch_work_executor import BatchWorkExecutor
-from indexer.jobs.base_job import FilterTransactionDataJob
-from indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
-from indexer.utils.abi_setting import (
+from hemera.indexer.executors.batch_work_executor import BatchWorkExecutor
+from hemera.indexer.jobs.base_job import FilterTransactionDataJob
+from hemera.indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera.indexer.utils.abi_setting import (
ERC20_TRANSFER_EVENT,
ERC721_OWNER_OF_FUNCTION,
ERC721_TOKEN_URI_FUNCTION,
@@ -34,9 +35,9 @@
WETH_DEPOSIT_EVENT,
WETH_WITHDRAW_EVENT,
)
-from indexer.utils.exception_recorder import ExceptionRecorder
-from indexer.utils.json_rpc_requests import generate_eth_call_json_rpc_without_block_number
-from indexer.utils.rpc_utils import rpc_response_to_result, zip_rpc_response
+from hemera.indexer.utils.exception_recorder import ExceptionRecorder
+from hemera.indexer.utils.json_rpc_requests import generate_eth_call_json_rpc_without_block_number
+from hemera.indexer.utils.rpc_utils import rpc_response_to_result, zip_rpc_response
logger = logging.getLogger(__name__)
exception_recorder = ExceptionRecorder()
@@ -153,15 +154,21 @@ def _collect(self, **kwargs):
self._batch_work_executor.wait()
for token in self.get_buff()[Token.type()]:
- self.tokens[token.address] = asdict(token)
-
- filtered_old_tokens = [token for token in token_dict.values() if token.token_type != TokenType.ERC1155.value]
- self._batch_work_executor.execute(
- [dataclass_to_dict(x) for x in filtered_old_tokens],
- self._export_token_total_supply_batch,
- total_items=len(filtered_old_tokens),
- )
- self._batch_work_executor.wait()
+ dic = asdict(token)
+ dic["fail_balance_of_count"] = 0
+ dic["succeed_balance_of_count"] = 0
+ dic["no_balance_of"] = False
+ dic["fail_total_supply_count"] = 0
+ dic["no_total_supply"] = False
+ self.tokens[token.address] = dic
+
+ # filtered_old_tokens = [token for token in token_dict.values() if token.token_type != TokenType.ERC1155.value]
+ # self._batch_work_executor.execute(
+ # [dataclass_to_dict(x) for x in filtered_old_tokens],
+ # self._export_token_total_supply_batch,
+ # total_items=len(filtered_old_tokens),
+ # )
+ # self._batch_work_executor.wait()
self._batch_work_executor.execute(
token_transfers,
@@ -178,7 +185,11 @@ def _extract_batch(self, logs):
def _generate_token_transfers(self, token_transfers):
for transfer in token_transfers:
if transfer.token_id is None:
- transfer.token_type = self.tokens[transfer.token_address]["token_type"]
+ token_type = self.tokens[transfer.token_address]["token_type"]
+ if token_type == TokenType.ERC20.value and transfer.token_type != TokenType.ERC20.value:
+ pass
+ else:
+ transfer.token_type = token_type
self._collect_domain(transfer.to_specific_transfer())
def _export_token_info_batch(self, tokens):
@@ -194,7 +205,7 @@ def _export_token_total_supply_batch(self, tokens):
def _process(self, **kwargs):
for token_transfer_type in self.output_transfer_types:
- if token_transfer_type in self._data_buff:
+ if token_transfer_type.type() in self._data_buff:
self._data_buff[token_transfer_type.type()].sort(
key=lambda x: (x.block_number, x.transaction_hash, x.log_index)
)
@@ -243,7 +254,10 @@ def tokens_total_supply_rpc_requests(make_requests, tokens, is_batch):
token["total_supply"] = decode_data(["uint256"], bytes.fromhex(value))[0]
except Exception as e:
logger.warning(
- f"Decoding token {fn_name} failed. " f"token: {token}. " f"rpc response: {result}. " f"exception: {e}"
+ f"Decoding token {fn_name} failed. "
+ f"token: {json.dumps(token)}. "
+ f"rpc response: {result[:500]}...(truncated). "
+ f"exception: {e}"
)
token["total_supply"] = None
return tokens
@@ -280,6 +294,10 @@ def tokens_info_rpc_requests(make_requests, tokens, is_batch):
token = data[0]
value = result[2:] if result is not None else None
key = to_snake_case(fn_name)
+ if value is None:
+ # skip decode progress
+ token[key] = None
+ continue
try:
token[key] = decode_data([token["data_type"]], bytes.fromhex(value))[0]
if token["data_type"] == "string":
@@ -287,8 +305,8 @@ def tokens_info_rpc_requests(make_requests, tokens, is_batch):
except Exception as e:
logger.warning(
f"Decoding token {fn_name} failed. "
- f"token: {token}. "
- f"rpc response: {result}. "
+ f"token: {json.dumps(token)}. "
+ f"rpc response: {result[:500]}... (truncated) "
f"exception: {e}"
)
exception_recorder.log(
diff --git a/indexer/jobs/export_traces_job.py b/hemera/indexer/jobs/export_traces_job.py
similarity index 90%
rename from indexer/jobs/export_traces_job.py
rename to hemera/indexer/jobs/export_traces_job.py
index b1c1b2f40..3580c5a1a 100644
--- a/indexer/jobs/export_traces_job.py
+++ b/hemera/indexer/jobs/export_traces_job.py
@@ -5,17 +5,17 @@
import orjson
from eth_utils import to_int
-from common.utils.exception_control import HistoryUnavailableError
-from enumeration.record_level import RecordLevel
-from indexer.domain import dataclass_to_dict
-from indexer.domain.block import Block, UpdateBlockInternalCount
-from indexer.domain.contract_internal_transaction import ContractInternalTransaction
-from indexer.domain.trace import Trace
-from indexer.executors.batch_work_executor import BatchWorkExecutor
-from indexer.jobs.base_job import BaseExportJob
-from indexer.utils.exception_recorder import ExceptionRecorder
-from indexer.utils.json_rpc_requests import generate_trace_block_by_number_json_rpc
-from indexer.utils.rpc_utils import rpc_response_to_result, zip_rpc_response
+from hemera.common.enumeration.record_level import RecordLevel
+from hemera.common.utils.exception_control import HistoryUnavailableError
+from hemera.indexer.domains import dataclass_to_dict
+from hemera.indexer.domains.block import Block, UpdateBlockInternalCount
+from hemera.indexer.domains.contract_internal_transaction import ContractInternalTransaction
+from hemera.indexer.domains.trace import Trace
+from hemera.indexer.executors.batch_work_executor import BatchWorkExecutor
+from hemera.indexer.jobs.base_job import BaseExportJob
+from hemera.indexer.utils.exception_recorder import ExceptionRecorder
+from hemera.indexer.utils.json_rpc_requests import generate_trace_block_by_number_json_rpc
+from hemera.indexer.utils.rpc_utils import rpc_response_to_result, zip_rpc_response
logger = logging.getLogger(__name__)
exception_recorder = ExceptionRecorder()
diff --git a/hemera/indexer/jobs/export_transactions_and_logs_job.py b/hemera/indexer/jobs/export_transactions_and_logs_job.py
new file mode 100644
index 000000000..757ab2c7b
--- /dev/null
+++ b/hemera/indexer/jobs/export_transactions_and_logs_job.py
@@ -0,0 +1,124 @@
+import logging
+from typing import List, Union
+
+import orjson
+
+from hemera.indexer.domains.block import Block
+from hemera.indexer.domains.log import Log
+from hemera.indexer.domains.receipt import Receipt
+from hemera.indexer.domains.transaction import Transaction
+from hemera.indexer.executors.batch_work_executor import BatchWorkExecutor
+from hemera.indexer.jobs.base_job import BaseExportJob, Collector
+from hemera.indexer.utils.collection_utils import flatten
+from hemera.indexer.utils.json_rpc_requests import (
+ generate_get_receipt_from_blocks_json_rpc,
+ generate_get_receipt_json_rpc,
+)
+from hemera.indexer.utils.rpc_utils import rpc_response_batch_to_results, zip_rpc_response
+
+logger = logging.getLogger(__name__)
+
+
+# Exports transactions and logs
+class ExportTransactionsAndLogsJob(BaseExportJob):
+ able_to_reorg = True
+
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+
+ self._batch_work_executor = BatchWorkExecutor(
+ kwargs["batch_size"],
+ kwargs["max_workers"],
+ job_name=self.__class__.__name__,
+ )
+ self._is_batch = kwargs["batch_size"] > 1
+ self._use_receipt_from_blocks_rpc = self.user_defined_config.get("use_receipt_from_blocks_rpc") or False
+
+ def request_for_receipt_from_block(self, blocks: List[Block], output: Collector):
+ transaction_hash_mapper = {
+ transaction.hash: transaction for block in blocks for transaction in block.transactions
+ }
+ results = receipt_rpc_from_block_number_requests(
+ self._batch_web3_provider.make_request,
+ [block.number for block in blocks],
+ self._is_batch,
+ )
+ for block, receipts in zip_rpc_response(blocks, results, index="number"):
+ for receipt in receipts:
+ transaction = transaction_hash_mapper[receipt["transactionHash"]]
+ receipt_entity = Receipt.from_rpc(
+ receipt,
+ block.timestamp,
+ block.hash,
+ block.number,
+ )
+ transaction.fill_with_receipt(receipt_entity)
+
+ for log in transaction.receipt.logs:
+ output.collect(log)
+
+ def request_for_receipt(self, blocks: List[Block], output: Collector):
+ transaction_hash_mapper = {
+ transaction.hash: transaction for block in blocks for transaction in block.transactions
+ }
+
+ if self._use_receipt_from_blocks_rpc:
+ results = receipt_rpc_from_block_number_requests(
+ self._batch_web3_provider.make_request,
+ [block.number for block in blocks],
+ self._is_batch,
+ )
+ else:
+ results = receipt_rpc_requests(
+ self._batch_web3_provider.make_request,
+ transaction_hash_mapper.keys(),
+ self._is_batch,
+ )
+
+ for receipt in results:
+ if receipt is None:
+ continue
+ transaction = transaction_hash_mapper.get(receipt["transactionHash"])
+ if transaction:
+ receipt_entity = Receipt.from_rpc(
+ receipt,
+ transaction.block_timestamp,
+ transaction.block_hash,
+ transaction.block_number,
+ )
+ transaction.fill_with_receipt(receipt_entity)
+ output.collect(transaction)
+
+ for log in transaction.receipt.logs:
+ output.collect(log)
+
+ def _udf(self, blocks: List[Block], output: Collector[Union[Transaction, Log]]):
+ self._batch_work_executor.execute(blocks, self.request_for_receipt, collector=output, total_items=len(blocks))
+ self._batch_work_executor.wait()
+
+ self._data_buff[Transaction.type()].sort(key=lambda x: (x.block_number, x.transaction_index))
+ self._data_buff[Log.type()].sort(key=lambda x: (x.block_number, x.log_index))
+
+
+def receipt_rpc_requests(make_request, transaction_hashes, is_batch):
+ receipts_rpc = list(generate_get_receipt_json_rpc(transaction_hashes))
+
+ if is_batch:
+ response = make_request(params=orjson.dumps(receipts_rpc))
+ else:
+ response = [make_request(params=orjson.dumps(receipts_rpc[0]))]
+
+ results = rpc_response_batch_to_results(response)
+ return list(results)
+
+
+def receipt_rpc_from_block_number_requests(make_request, block_numbers, is_batch):
+ receipts_rpc = list(generate_get_receipt_from_blocks_json_rpc(block_numbers))
+
+ if is_batch:
+ response = make_request(params=orjson.dumps(receipts_rpc))
+ else:
+ response = [make_request(params=orjson.dumps(receipts_rpc[0]))]
+
+ results = rpc_response_batch_to_results(response)
+ return flatten(results)
diff --git a/indexer/modules/bridge/__init__.py b/hemera/indexer/jobs/source_job/__init__.py
similarity index 100%
rename from indexer/modules/bridge/__init__.py
rename to hemera/indexer/jobs/source_job/__init__.py
diff --git a/indexer/jobs/source_job/csv_source_job.py b/hemera/indexer/jobs/source_job/csv_source_job.py
similarity index 72%
rename from indexer/jobs/source_job/csv_source_job.py
rename to hemera/indexer/jobs/source_job/csv_source_job.py
index 51f76c3b8..b5d467f56 100644
--- a/indexer/jobs/source_job/csv_source_job.py
+++ b/hemera/indexer/jobs/source_job/csv_source_job.py
@@ -4,29 +4,29 @@
import pandas
-from indexer.domain import dict_to_dataclass, domains_mapping
-from indexer.domain.block import Block, UpdateBlockInternalCount
-from indexer.domain.block_ts_mapper import BlockTsMapper
-from indexer.domain.coin_balance import CoinBalance
-from indexer.domain.contract import Contract
-from indexer.domain.contract_internal_transaction import ContractInternalTransaction
-from indexer.domain.current_token_balance import CurrentTokenBalance
-from indexer.domain.log import Log
-from indexer.domain.token import Token, UpdateToken
-from indexer.domain.token_balance import TokenBalance
-from indexer.domain.token_id_infos import (
+from hemera.indexer.domains import dict_to_dataclass, domains_mapping
+from hemera.indexer.domains.block import Block, UpdateBlockInternalCount
+from hemera.indexer.domains.block_ts_mapper import BlockTsMapper
+from hemera.indexer.domains.coin_balance import CoinBalance
+from hemera.indexer.domains.contract import Contract
+from hemera.indexer.domains.contract_internal_transaction import ContractInternalTransaction
+from hemera.indexer.domains.current_token_balance import CurrentTokenBalance
+from hemera.indexer.domains.log import Log
+from hemera.indexer.domains.token import Token, UpdateToken
+from hemera.indexer.domains.token_balance import TokenBalance
+from hemera.indexer.domains.token_id_infos import (
ERC721TokenIdChange,
ERC721TokenIdDetail,
ERC1155TokenIdDetail,
UpdateERC721TokenIdDetail,
UpdateERC1155TokenIdDetail,
)
-from indexer.domain.token_transfer import ERC20TokenTransfer, ERC721TokenTransfer, ERC1155TokenTransfer
-from indexer.domain.trace import Trace
-from indexer.domain.transaction import Transaction
-from indexer.executors.batch_work_executor import BatchWorkExecutor
-from indexer.jobs.base_job import BaseSourceJob
-from indexer.utils.parameter_utils import extract_path_from_parameter
+from hemera.indexer.domains.token_transfer import ERC20TokenTransfer, ERC721TokenTransfer, ERC1155TokenTransfer
+from hemera.indexer.domains.trace import Trace
+from hemera.indexer.domains.transaction import Transaction
+from hemera.indexer.executors.batch_work_executor import BatchWorkExecutor
+from hemera.indexer.jobs.base_job import BaseSourceJob
+from hemera.indexer.utils.parameter_utils import extract_path_from_parameter
logger = logging.getLogger(__name__)
diff --git a/indexer/jobs/source_job/pg_source_job.py b/hemera/indexer/jobs/source_job/pg_source_job.py
similarity index 87%
rename from indexer/jobs/source_job/pg_source_job.py
rename to hemera/indexer/jobs/source_job/pg_source_job.py
index 1e8467a43..492551920 100644
--- a/indexer/jobs/source_job/pg_source_job.py
+++ b/hemera/indexer/jobs/source_job/pg_source_job.py
@@ -2,27 +2,27 @@
import inspect
import logging
from collections import defaultdict
-from datetime import datetime
-from decimal import Decimal
+from datetime import datetime, timezone
from queue import Queue
from typing import List, Type, Union, get_args, get_origin
from sqlalchemy import and_, func, select
-
-from common.converter.pg_converter import domain_model_mapping
-from common.models.blocks import Blocks
-from common.models.logs import Logs
-from common.models.transactions import Transactions
-from common.services.postgresql_service import PostgreSQLService
-from common.utils.exception_control import FastShutdownError
-from common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes
-from indexer.domain import Domain, dict_to_dataclass
-from indexer.domain.block import Block
-from indexer.domain.log import Log
-from indexer.domain.receipt import Receipt
-from indexer.domain.transaction import Transaction
-from indexer.jobs.base_job import BaseSourceJob
-from indexer.specification.specification import (
+from sqlalchemy.dialects.postgresql import BIGINT, INTEGER
+
+from hemera.common.models.blocks import Blocks
+from hemera.common.models.logs import Logs
+from hemera.common.models.transactions import Transactions
+from hemera.common.services.postgresql_service import PostgreSQLService
+from hemera.common.utils.db_utils import table_to_dataclass
+from hemera.common.utils.exception_control import FastShutdownError
+from hemera.common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes
+from hemera.indexer.domains import Domain, dict_to_dataclass
+from hemera.indexer.domains.block import Block
+from hemera.indexer.domains.log import Log
+from hemera.indexer.domains.receipt import Receipt
+from hemera.indexer.domains.transaction import Transaction
+from hemera.indexer.jobs.base_job import BaseSourceJob
+from hemera.indexer.specification.specification import (
AlwaysFalseSpecification,
AlwaysTrueSpecification,
FromAddressSpecification,
@@ -31,7 +31,7 @@
TransactionFilterByTransactionInfo,
TransactionHashSpecification,
)
-from indexer.utils.collection_utils import distinct_collections_by_group, flatten
+from hemera.indexer.utils.collection_utils import distinct_collections_by_group, flatten
logger = logging.getLogger(__name__)
@@ -44,9 +44,13 @@ class PGSourceJob(BaseSourceJob):
]
def __init__(self, **kwargs):
+ from hemera.common.converter.pg_converter import domain_model_mapping
+
super().__init__(**kwargs)
self._source_path = kwargs["config"].get("source_path", None)
- self._service = PostgreSQLService(self._source_path) if self._source_path else None
+ if self._source_path is None:
+ raise FastShutdownError("-pg or --postgres-url is required to run PGSourceJob")
+
self.pre_build = defaultdict(list)
self.post_build = defaultdict()
self.domain_mapping = defaultdict(dict)
@@ -54,9 +58,7 @@ def __init__(self, **kwargs):
self._filters = flatten(kwargs.get("filters", []))
self._is_filter = kwargs.get("is_filter", False)
self._specification = AlwaysFalseSpecification() if self._is_filter else AlwaysTrueSpecification()
-
- if self._service is None:
- raise FastShutdownError("-pg or --postgres-url is required to run PGSourceJob")
+ self._domain_model_mapping = domain_model_mapping
for output_type in self.output_types:
self._dataclass_build_dependence(output_type, Domain)
@@ -64,6 +66,8 @@ def __init__(self, **kwargs):
self._calculate_build_queue()
def _collect(self, **kwargs):
+ self._service = PostgreSQLService(self._source_path)
+
start_block = int(kwargs["start_block"])
end_block = int(kwargs["end_block"])
start_timestamp = self._query_timestamp_with_block(start_block)
@@ -178,7 +182,7 @@ def _collect(self, **kwargs):
def _collect_from_pg(self, blocks, start_timestamp, end_timestamp):
for output_type in self.output_types:
- table = domain_model_mapping[output_type]["table"]
+ table = self._domain_model_mapping[output_type]["table"]
if len(self.pg_datas[table]) == 0:
start_time = datetime.now()
self.pg_datas[table] = self._query_with_blocks(table, blocks, start_timestamp, end_timestamp)
@@ -189,7 +193,7 @@ def _collect_from_pg(self, blocks, start_timestamp, end_timestamp):
def _process(self, **kwargs):
self.domain_mapping.clear()
for output_type in self.build_order:
- table = domain_model_mapping[output_type]["table"]
+ table = self._domain_model_mapping[output_type]["table"]
domains = self._dataclass_build(self.pg_datas[table], output_type)
if hasattr(table, "__query_order__"):
domains.sort(key=lambda x: tuple(getattr(x, column.name) for column in table.__query_order__))
@@ -226,6 +230,10 @@ def _query_with_blocks(self, table, blocks, start_timestamp, end_timestamp):
session.query(sub_table).join(unnest_query, sub_table.c.number == unnest_query.c.block_number).all()
)
elif hasattr(table, "block_number") and hasattr(table, "block_timestamp"):
+ if type(table.block_timestamp.type) is BIGINT or type(table.block_timestamp.type) is INTEGER:
+ start_timestamp = int(round(start_timestamp.replace(tzinfo=timezone.utc).timestamp()))
+ end_timestamp = int(round(end_timestamp.replace(tzinfo=timezone.utc).timestamp()))
+
sub_table = (
select(table)
.filter(and_(table.block_timestamp >= start_timestamp, table.block_timestamp <= end_timestamp))
@@ -446,52 +454,3 @@ def check_dependency(column_type, target_type) -> (bool, object):
return True, column_type
return False, None
-
-
-def table_to_dataclass(row_instance, cls):
- """
- Converts row of table to a dataclass instance, handling nested structures.
-
- Args:
- row_instance (HemeraModel): The input data structure.
- cls: The dataclass type to convert to.
-
- Returns:
- An instance of the dataclass which is corresponding to table in the definition.
- """
-
- dict_instance = {}
- if hasattr(row_instance, "__table__"):
- for column in row_instance.__table__.columns:
- if column.name == "meta_data":
- meta_data_json = getattr(row_instance, column.name)
- if meta_data_json:
- for key in meta_data_json:
- dict_instance[key] = meta_data_json[key]
- else:
- value = getattr(row_instance, column.name)
- dict_instance[column.name] = convert_value(value)
- else:
- for column, value in row_instance._asdict().items():
- dict_instance[column] = convert_value(value)
-
- domain = dict_to_dataclass(dict_instance, cls)
- if cls is Transaction:
- domain.fill_with_receipt(Receipt.from_pg(dict_instance))
-
- return domain
-
-
-def convert_value(value):
- if isinstance(value, datetime):
- return int(round(value.timestamp()))
- elif isinstance(value, Decimal):
- return float(value)
- elif isinstance(value, bytes):
- return bytes_to_hex_str(value)
- elif isinstance(value, list):
- return [convert_value(v) for v in value]
- elif isinstance(value, dict):
- return {k: convert_value(v) for k, v in value.items()}
- else:
- return value
diff --git a/indexer/modules/bridge/arbitrum/__init__.py b/hemera/indexer/specification/__init__.py
similarity index 100%
rename from indexer/modules/bridge/arbitrum/__init__.py
rename to hemera/indexer/specification/__init__.py
diff --git a/indexer/specification/specification.py b/hemera/indexer/specification/specification.py
similarity index 98%
rename from indexer/specification/specification.py
rename to hemera/indexer/specification/specification.py
index a1c988274..a89fcf231 100644
--- a/indexer/specification/specification.py
+++ b/hemera/indexer/specification/specification.py
@@ -3,7 +3,7 @@
from eth_utils import to_checksum_address
from web3 import Web3
-from indexer.domain.transaction import Transaction
+from hemera.indexer.domains.transaction import Transaction
class Specification:
diff --git a/indexer/modules/bridge/bedrock/__init__.py b/hemera/indexer/utils/__init__.py
similarity index 100%
rename from indexer/modules/bridge/bedrock/__init__.py
rename to hemera/indexer/utils/__init__.py
diff --git a/indexer/utils/abi.py b/hemera/indexer/utils/abi.py
similarity index 50%
rename from indexer/utils/abi.py
rename to hemera/indexer/utils/abi.py
index 34dbe1712..4e982b7e5 100644
--- a/indexer/utils/abi.py
+++ b/hemera/indexer/utils/abi.py
@@ -1,7 +1,10 @@
+from itertools import accumulate
from typing import Any, Dict, Optional, Sequence, Tuple
from eth_abi.codec import ABICodec
from eth_abi.grammar import BasicType
+from eth_abi.utils.numeric import ceil32
+from eth_abi.utils.padding import zpad, zpad_right
from eth_typing import ChecksumAddress, HexStr, TypeStr
from eth_utils import (
event_abi_to_log_topic,
@@ -17,7 +20,7 @@
from web3._utils.normalizers import implicitly_identity, parse_basic_type_str
from web3.types import ABIEvent, ABIFunction
-from common.utils.format_utils import bytes_to_hex_str
+from hemera.common.utils.format_utils import bytes_to_hex_str
codec = ABICodec(build_strict_registry())
@@ -70,9 +73,6 @@ def abi_address_to_hex(type_str: TypeStr, data: Any) -> Optional[Tuple[TypeStr,
def uint256_to_bytes(value: int) -> bytes:
- if value < 0 or value >= 2**256:
- raise ValueError("Value out of uint256 range")
-
return value.to_bytes(32, byteorder="big")
@@ -84,3 +84,64 @@ def pad_address(address: str) -> bytes:
padded = "0" * 24 + address
return bytes.fromhex(padded)
+
+
+def encode_bool(arg: bool) -> bytes:
+ value = b"\x01" if arg is True else b"\x00"
+ return zpad(value, 32)
+
+
+def encode_bytes(value: bytes) -> bytes:
+ value_length = len(value)
+
+ encoded_size = uint256_to_bytes(value_length)
+ padded_value = zpad_right(value, ceil32(value_length))
+
+ return encoded_size + padded_value
+
+
+def tuple_encode(values, type_lis):
+ raw_head_chunks = []
+ tail_chunks = []
+ for value, tp in zip(values, type_lis):
+ if tp == "bytes":
+ raw_head_chunks.append(None)
+ tail_chunks.append(encode_bytes(value))
+ elif tp == "bool":
+ raw_head_chunks.append(encode_bool(value))
+ tail_chunks.append(b"")
+ elif tp == "address":
+ raw_head_chunks.append(pad_address(value))
+ tail_chunks.append(b"")
+ elif tp == "(address,bytes)[]":
+ items_are_dynamic = True
+ if not items_are_dynamic or len(value) == 0:
+ return b"".join(tail_chunks)
+ encoded_size = uint256_to_bytes(len(value))
+
+ tmp_tail_chunks = tuple(tuple_encode(list(i), ["address", "bytes"]) for i in value)
+ head_length = 32 * len(value)
+ tail_offsets = (0,) + tuple(accumulate(map(len, tmp_tail_chunks[:-1])))
+ head_chunks = tuple(uint256_to_bytes(head_length + offset) for offset in tail_offsets)
+ raw_head_chunks.append(None)
+ tail_chunks.append(encoded_size + b"".join(head_chunks + tmp_tail_chunks))
+ elif tp == "(address,bytes)":
+ encoded_size = uint256_to_bytes(len(value))
+ encoded_elements = b""
+ for target, call_data in value:
+ encoded_elements += pad_address(target)
+ encoded_elements += tuple_encode([call_data], ["bytes"])
+ raw_head_chunks.append(None)
+ tail_chunks.append(encoded_size + encoded_elements)
+ else:
+ raise Exception(f"Unsupported type {tp}")
+
+ head_length = sum(32 if item is None else len(item) for item in raw_head_chunks)
+ tail_offsets = (0,) + tuple(accumulate(map(len, tail_chunks[:-1])))
+ head_chunks = tuple(
+ uint256_to_bytes(head_length + offset) if chunk is None else chunk
+ for chunk, offset in zip(raw_head_chunks, tail_offsets)
+ )
+
+ encoded_value = b"".join(head_chunks + tuple(tail_chunks))
+ return encoded_value
diff --git a/indexer/utils/abi_setting.py b/hemera/indexer/utils/abi_setting.py
similarity index 98%
rename from indexer/utils/abi_setting.py
rename to hemera/indexer/utils/abi_setting.py
index 67e631eb8..5b682219a 100644
--- a/indexer/utils/abi_setting.py
+++ b/hemera/indexer/utils/abi_setting.py
@@ -5,7 +5,7 @@
Author : xuzh
Project : hemera_indexer
"""
-from common.utils.abi_code_utils import Event, Function
+from hemera.common.utils.abi_code_utils import Event, Function
# log event
WETH_DEPOSIT_EVENT = Event(
diff --git a/indexer/utils/atomic_counter.py b/hemera/indexer/utils/atomic_counter.py
similarity index 100%
rename from indexer/utils/atomic_counter.py
rename to hemera/indexer/utils/atomic_counter.py
diff --git a/hemera/indexer/utils/buffer_service.py b/hemera/indexer/utils/buffer_service.py
new file mode 100644
index 000000000..1e073fe60
--- /dev/null
+++ b/hemera/indexer/utils/buffer_service.py
@@ -0,0 +1,360 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+Time : 2024/11/19 下午6:07
+Author : xuzh
+Project : hemera_indexer
+"""
+import json
+import logging
+import os
+import signal
+import threading
+import time
+from collections import defaultdict
+from concurrent.futures import Future, ThreadPoolExecutor
+from distutils.util import strtobool
+from threading import Event
+from typing import Any, Callable, Dict, List
+
+from hemera.common.utils.exception_control import FastShutdownError, get_exception_details
+from hemera.indexer.utils.metrics_collector import MetricsCollector
+
+BUFFER_BLOCK_SIZE = int(os.environ.get("BUFFER_BLOCK_SIZE", "1"))
+MAX_BUFFER_SIZE = int(os.environ.get("MAX_BUFFER_SIZE", "1"))
+ASYNC_SUBMIT = bool(strtobool(os.environ.get("ASYNC_SUBMIT", "false")))
+CONCURRENT_SUBMITTERS = int(os.environ.get("CONCURRENT_SUBMITTERS", "1"))
+CRASH_INSTANTLY = bool(strtobool(os.environ.get("CRASH_INSTANTLY", "true")))
+EXPORT_STRATEGY = json.loads(os.environ.get("EXPORT_STRATEGY", "{}"))
+
+
+class KeyLockContext:
+
+ def __init__(self, manager, key, lock):
+ self._manager = manager
+ self._key = key
+ self._lock = lock
+
+ def __enter__(self):
+ self._lock.acquire()
+
+ with self._manager._meta_lock:
+ self._manager._active_row_locks.add(self._key)
+
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ try:
+ with self._manager._meta_lock:
+ self._manager._active_row_locks.discard(self._key)
+ should_notify = len(self._manager._active_row_locks) == 0
+
+ if should_notify:
+ with self._manager._global_condition:
+ self._manager._global_condition.notify_all()
+ finally:
+ self._lock.release()
+
+
+class BufferLockManager:
+ def __init__(self):
+ self._global_lock = threading.Lock()
+ self._global_condition = threading.Condition(self._global_lock)
+
+ self._locks: Dict[str, threading.Lock] = {}
+ self._meta_lock = threading.Lock()
+
+ self._active_row_locks = set()
+
+ def __getitem__(self, key: str) -> KeyLockContext:
+ if self._global_lock.locked():
+ with self._global_lock:
+ pass
+
+ with self._meta_lock:
+ if key not in self._locks:
+ self._locks[key] = threading.Lock()
+ return KeyLockContext(self, key, self._locks[key])
+
+ def acquire_global_lock(self, timeout=-1):
+ if not self._global_lock.acquire(blocking=True, timeout=timeout):
+ return False
+
+ try:
+ while self._active_row_locks:
+ self._global_condition.wait()
+ return True
+ except Exception as e:
+ if self._global_lock.locked():
+ self._global_lock.release()
+ raise RuntimeError(f"Failed to acquire global lock: {str(e)}")
+
+ def release_global(self):
+ try:
+ self._global_condition.notify_all()
+ finally:
+ self._global_lock.release()
+
+ def remove(self, key: str) -> None:
+ with self._meta_lock:
+ self._locks.pop(key, None)
+ self._active_row_locks.discard(key)
+ should_notify = len(self._active_row_locks) == 0
+
+ if should_notify:
+ with self._global_condition:
+ self._global_condition.notify_all()
+
+ def clear(self) -> None:
+ with self._meta_lock:
+ self._locks.clear()
+ self._active_row_locks.clear()
+
+ with self._global_condition:
+ self._global_condition.notify_all()
+
+ def __enter__(self):
+ if not self.acquire_global_lock():
+ raise RuntimeError("Failed to acquire global lock")
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.release_global()
+
+
+class BufferService:
+
+ def __init__(
+ self,
+ item_exporters,
+ required_output_types,
+ block_size: int = BUFFER_BLOCK_SIZE,
+ max_buffer_size: int = MAX_BUFFER_SIZE,
+ export_workers: int = CONCURRENT_SUBMITTERS,
+ success_callback: Callable = None,
+ exception_callback: Callable = None,
+ metrics: MetricsCollector = None,
+ ):
+ self.block_size = block_size
+ self.max_buffer_size = max_buffer_size
+ self.concurrent_submitters = threading.Semaphore(export_workers)
+
+ self.item_exporters = item_exporters
+ self.required_output_types = required_output_types
+
+ self.buffer = defaultdict(list)
+ self.buffer_lock = BufferLockManager()
+
+ self.output_in_progress: dict[(int, int), set] = dict()
+ self.futures_output: dict[Future, set] = dict()
+ self.pending_futures: dict[Future, (int, int)] = dict()
+ self.futures_start: dict[Future, float] = dict()
+ self.futures_lock = threading.Lock()
+
+ self._shutdown_event = Event()
+
+ self.submit_export_pool = ThreadPoolExecutor(max_workers=export_workers)
+
+ self._setup_signal_handlers()
+
+ self.success_callback = success_callback
+ self.exception_callback = exception_callback
+
+ self.export_strategy = EXPORT_STRATEGY
+
+ self.metrics = metrics
+ self.logger = logging.getLogger(__name__)
+
+ def keys(self) -> List[Any]:
+ return self.buffer.keys()
+
+ def __getitem__(self, key: str) -> List[Any]:
+ with self.buffer_lock[key]:
+ return self.buffer[key]
+
+ def get(self, key: str, default: Any = None) -> List[Any]:
+ with self.buffer_lock[key]:
+ return self.buffer.get(key, default)
+
+ def __setitem__(self, key: str, value: Any):
+ with self.buffer_lock[key]:
+ if isinstance(value, list):
+ self.buffer[key] = value
+ else:
+ self.buffer[key] = [value]
+
+ def __contains__(self, key: str) -> bool:
+ return key in self.buffer.keys()
+
+ def pop(self, key: str, default: Any = None) -> Any:
+ with self.buffer_lock[key]:
+ return self.buffer.pop(key, default)
+
+ def extend(self, key: str, values: List[Any]):
+ with self.buffer_lock[key]:
+ self.buffer[key].extend(values)
+
+ def append(self, key: str, value: Any):
+ with self.buffer_lock[key]:
+ self.buffer[key].append(value)
+
+ def _get_data_snapshot(self) -> Dict[str, List[Any]]:
+ snapshot = {}
+ all_keys = set(self.buffer.keys())
+
+ with self.buffer_lock:
+ for key in all_keys:
+ if key in self.buffer:
+ snapshot[key] = self.buffer[key].copy()
+ return snapshot
+
+ def _clear_exported_data(self, keys: List[str]):
+ for key in keys:
+ with self.buffer_lock[key]:
+ if key in self.buffer:
+ del self.buffer[key]
+
+ def _setup_signal_handlers(self):
+ signal.signal(signal.SIGTERM, self._handle_shutdown)
+ signal.signal(signal.SIGINT, self._handle_shutdown)
+
+ def _handle_shutdown(self, signum, frame):
+ # self.logger.info("Received shutdown signal, flushing buffer...")
+ # self.flush_buffer(self.required_output_types)
+ self._shutdown_event.set()
+
+ def _handle_export_completion(self, future: Future):
+ with self.futures_lock:
+ start_block, end_block = self.pending_futures[future]
+ complete_type = self.futures_output[future]
+ start_time = self.futures_start[future]
+
+ self.pending_futures.pop(future)
+ self.futures_output.pop(future)
+
+ try:
+ future.result()
+
+ self.output_in_progress[(start_block, end_block)] -= complete_type
+
+ if self.metrics:
+ for output_type in complete_type:
+ self.metrics.update_exported_domains(
+ domain=output_type,
+ status="success",
+ amount=len(self.buffer[output_type]),
+ )
+ self.metrics.update_export_domains_processing_duration(
+ domains=",".join(complete_type),
+ duration=int((time.time() - start_time) * 1000),
+ )
+
+ if self.success_callback and len(self.output_in_progress[(start_block, end_block)]) == 0:
+ try:
+ self.output_in_progress.pop((start_block, end_block))
+ self.success_callback(end_block)
+
+ if self.metrics:
+ self.metrics.update_last_sync_record(last_sync_record=end_block)
+
+ except Exception as e:
+ self.logger.error(f"Writing last synced block number {end_block} error.")
+
+ except Exception as e:
+ exception_details = get_exception_details(e)
+ if self.exception_callback:
+ self.exception_callback(self.required_output_types, start_block, end_block, "export", exception_details)
+ self.logger.error(f"Exporting items error: {exception_details}")
+
+ if self.metrics:
+ self.metrics.update_failure_batch_counter()
+
+ for output_type in complete_type:
+ self.metrics.update_exported_domains(
+ domain=output_type,
+ status="failure",
+ amount=len(self.buffer[output_type]),
+ )
+
+ if CRASH_INSTANTLY:
+ self.shutdown()
+ raise FastShutdownError(f"Exporting items error: {exception_details}")
+
+ finally:
+ self.concurrent_submitters.release()
+
+ def export_items(self, items):
+ for item_exporter in self.item_exporters:
+ item_exporter.open()
+ item_exporter.export_items(items)
+ item_exporter.close()
+
+ def flush_buffer(self, start_block, end_block, flush_keys: List[str]):
+ flush_items = []
+ flush_type = set()
+ with self.buffer_lock:
+ block_range = (start_block, end_block)
+
+ for key in flush_keys:
+ if key in self.required_output_types:
+ flush_type.add(key)
+ flush_items.extend(self.buffer[key])
+
+ if self.metrics:
+ self.metrics.update_indexed_domains(domain=key, amount=len(self.buffer[key]))
+
+ if len(flush_type):
+ self.logger.info(f"Flush domains: {','.join(flush_type)} between block range: {block_range}")
+ else:
+ self.concurrent_submitters.release()
+ return True
+
+ with self.futures_lock:
+ future = self.submit_export_pool.submit(self.export_items, flush_items)
+ self.futures_output[future] = flush_type
+ self.pending_futures[future] = block_range
+ self.futures_start[future] = time.time()
+ if block_range not in self.output_in_progress:
+ self.output_in_progress[block_range] = set(self.required_output_types)
+
+ future.add_done_callback(self._handle_export_completion)
+
+ if not ASYNC_SUBMIT:
+ try:
+ future.result()
+ return True
+ except Exception as e:
+ return False
+
+ return True
+
+ def check_and_flush(self, start_block, end_block, job_name: str = None, output_types: List[str] = None):
+ if job_name in self.export_strategy:
+ output_types = self.export_strategy[job_name]
+
+ self.concurrent_submitters.acquire()
+
+ if not ASYNC_SUBMIT:
+ return self.flush_buffer(start_block, end_block, output_types)
+ else:
+ self.flush_buffer(start_block, end_block, output_types)
+ return True
+
+ def clear(self):
+ with self.buffer_lock:
+ self.buffer.clear()
+
+ def shutdown(self):
+ if self._shutdown_event.is_set():
+ return
+
+ if self.metrics:
+ self.metrics.update_instance_shutdown()
+
+ self.logger.info("Shutting down buffer service...")
+ self._handle_shutdown(None, None)
+ self.submit_export_pool.shutdown(wait=True)
+ self.logger.info("Buffer service shut down completed")
+
+ def is_shutdown(self):
+ return self._shutdown_event.is_set()
diff --git a/indexer/utils/collection_utils.py b/hemera/indexer/utils/collection_utils.py
similarity index 77%
rename from indexer/utils/collection_utils.py
rename to hemera/indexer/utils/collection_utils.py
index b6d793569..12521b527 100644
--- a/indexer/utils/collection_utils.py
+++ b/hemera/indexer/utils/collection_utils.py
@@ -61,3 +61,20 @@ def split_to_batches(start_incl, end_incl, batch_size):
for batch_start in range(start_incl, end_incl + 1, batch_size):
batch_end = min(batch_start + batch_size - 1, end_incl)
yield batch_start, batch_end
+
+
+def merge_dataclasses(self, data_class, attributes):
+ """sort dataclass by block_number, then keep the newest data"""
+ if data_class.type() not in self._data_buff:
+ return
+ tmps = self._data_buff.pop(data_class.type())
+ tmps.sort(key=lambda x: x.block_number, reverse=True)
+ lis = []
+ unique_k_set = set()
+ for li in tmps:
+ k = tuple([getattr(li, at) for at in attributes])
+ if k not in unique_k_set:
+ unique_k_set.add(k)
+ lis.append(li)
+ if len(lis) > 0:
+ self._collect_items(data_class.type(), lis)
diff --git a/indexer/utils/exception_recorder.py b/hemera/indexer/utils/exception_recorder.py
similarity index 97%
rename from indexer/utils/exception_recorder.py
rename to hemera/indexer/utils/exception_recorder.py
index b3a093768..df31946b3 100644
--- a/indexer/utils/exception_recorder.py
+++ b/hemera/indexer/utils/exception_recorder.py
@@ -3,7 +3,7 @@
from sqlalchemy.dialects.postgresql import insert
-from common.models.exception_records import ExceptionRecords
+from hemera.common.models.exception_records import ExceptionRecords
LOG_BUFFER_SIZE = 5000
diff --git a/indexer/utils/json_rpc_requests.py b/hemera/indexer/utils/json_rpc_requests.py
similarity index 88%
rename from indexer/utils/json_rpc_requests.py
rename to hemera/indexer/utils/json_rpc_requests.py
index 123e4bd86..27ab817a7 100644
--- a/indexer/utils/json_rpc_requests.py
+++ b/hemera/indexer/utils/json_rpc_requests.py
@@ -1,4 +1,4 @@
-from common.utils.format_utils import format_block_id
+from hemera.common.utils.format_utils import format_block_id
def generate_get_block_by_number_json_rpc(block_numbers, include_transactions):
@@ -29,6 +29,15 @@ def generate_get_receipt_json_rpc(transaction_hashes):
)
+def generate_get_receipt_from_blocks_json_rpc(block_numbers):
+ for idx, block_number in enumerate(block_numbers):
+ yield generate_json_rpc(
+ method="eth_getBlockReceipts",
+ params=[hex(block_number)],
+ request_id=idx,
+ )
+
+
def generate_get_code_json_rpc(contract_addresses, block="latest"):
for idx, contract_address in enumerate(contract_addresses):
yield generate_json_rpc(
diff --git a/indexer/utils/limit_reader.py b/hemera/indexer/utils/limit_reader.py
similarity index 80%
rename from indexer/utils/limit_reader.py
rename to hemera/indexer/utils/limit_reader.py
index bb99fc219..6354e180e 100644
--- a/indexer/utils/limit_reader.py
+++ b/hemera/indexer/utils/limit_reader.py
@@ -1,10 +1,10 @@
from sqlalchemy import func
-from common.models.blocks import Blocks
-from common.services.postgresql_service import PostgreSQLService
-from common.utils.exception_control import FastShutdownError
-from common.utils.web3_utils import build_web3
-from indexer.utils.thread_local_proxy import ThreadLocalProxy
+from hemera.common.models.blocks import Blocks
+from hemera.common.services.postgresql_service import PostgreSQLService
+from hemera.common.utils.exception_control import FastShutdownError
+from hemera.common.utils.web3_utils import build_web3
+from hemera.indexer.utils.thread_local_proxy import ThreadLocalProxy
class LimitReader(object):
diff --git a/indexer/utils/logging_utils.py b/hemera/indexer/utils/logging_utils.py
similarity index 100%
rename from indexer/utils/logging_utils.py
rename to hemera/indexer/utils/logging_utils.py
diff --git a/hemera/indexer/utils/metrics_collector.py b/hemera/indexer/utils/metrics_collector.py
new file mode 100644
index 000000000..33127fe88
--- /dev/null
+++ b/hemera/indexer/utils/metrics_collector.py
@@ -0,0 +1,132 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+import os
+from collections import defaultdict
+
+from prometheus_client import Counter, Gauge, start_http_server
+
+from hemera.indexer.utils.metrics_persistence import BasePersistence
+
+METRICS_CLIENT_PORT = int(os.environ.get("METRICS_CLIENT_PORT", "9200"))
+
+
+class MetricsCollector:
+ _instance = None
+
+ def __new__(cls, *args, **kwargs):
+ if cls._instance is None:
+ cls._instance = super().__new__(cls)
+ return cls._instance
+
+ def __init__(self, instance_name: str, persistence: BasePersistence):
+ if hasattr(self, "_initialized"):
+ return
+ start_http_server(METRICS_CLIENT_PORT)
+
+ self.instance_name = instance_name
+ self.persistence = persistence
+
+ self._metrics_definition()
+ self._load_from_persistence(self.persistence.load())
+
+ self._initialized = True
+
+ def _metrics_definition(self):
+ self.last_sync_record = Gauge("last_sync_record", "The last synced block number", ["instance"])
+
+ self.failure_batch_counter = Counter("failure_batch_counter", "Total number of failed index", ["instance"])
+
+ self.indexed_domains = Counter("indexed_domains", "Total number of indexed domains", ["instance", "domain"])
+
+ self.exported_domains = Counter(
+ "exported_domains", "Total number of exported domains", ["instance", "domain", "status"]
+ )
+
+ self.total_processing_duration = Gauge(
+ "total_processing_duration",
+ "Total time spent processing each block range in milliseconds",
+ ["instance"],
+ )
+
+ self.job_processing_duration = Gauge(
+ "job_processing_duration",
+ "Time spent in each sub-job processing block range in milliseconds",
+ ["instance", "job_name"],
+ )
+
+ self.export_domains_processing_duration = Gauge(
+ "export_domains_processing_duration",
+ "Time spent in each sub-job processing block range in milliseconds",
+ ["instance", "domains"],
+ )
+
+ self.instance_shutdown = Counter(
+ "instance_shutdown",
+ "Instance shutdown times including mannual and encounter error",
+ ["instance"],
+ )
+
+ self.job_processing_retry = Counter(
+ "job_processing_retry",
+ "Retry times in sub-job processing block range",
+ ["instance", "job_name"],
+ )
+
+ def _load_from_persistence(self, metrics: dict):
+ for metric in metrics.keys():
+ if hasattr(self, metric):
+ collector = getattr(self, metric)
+ for indicator in metrics[metric]:
+ label = indicator["label"]
+
+ if type(collector) is Counter:
+ collector.labels(*label).inc(indicator["value"])
+ elif type(collector) is Gauge:
+ collector.labels(*label).set(indicator["value"])
+ else:
+ raise TypeError(f"Unsupported collector type: {type(collector)}")
+
+ def _wrap_metrics(self) -> dict:
+ metrics = defaultdict(list)
+
+ for labels, value in self.failure_batch_counter._metrics.items():
+ metrics["failure_batch_counter"].append({"label": labels, "value": value._value.get()})
+
+ for labels, value in self.instance_shutdown._metrics.items():
+ metrics["instance_shutdown"].append({"label": labels, "value": value._value.get()})
+
+ for labels, value in self.job_processing_retry._metrics.items():
+ metrics["job_processing_retry"].append({"label": labels, "value": value._value.get()})
+
+ return metrics
+
+ def update_last_sync_record(self, last_sync_record: int):
+ last_record = self.last_sync_record.labels(instance=self.instance_name)._value.get()
+ if last_record < last_sync_record:
+ self.last_sync_record.labels(instance=self.instance_name).set(last_sync_record)
+
+ def update_failure_batch_counter(self):
+ self.failure_batch_counter.labels(instance=self.instance_name).inc(1)
+
+ def update_indexed_domains(self, domain: str, amount: int):
+ self.indexed_domains.labels(instance=self.instance_name, domain=domain).inc(amount)
+
+ def update_exported_domains(self, domain: str, status: str, amount: int):
+ self.exported_domains.labels(instance=self.instance_name, domain=domain, status=status).inc(amount)
+
+ def update_total_processing_duration(self, duration: int):
+ self.total_processing_duration.labels(instance=self.instance_name).set(duration)
+
+ def update_job_processing_duration(self, job_name: str, duration: int):
+ self.job_processing_duration.labels(instance=self.instance_name, job_name=job_name).set(duration)
+
+ def update_export_domains_processing_duration(self, domains: str, duration: int):
+ self.export_domains_processing_duration.labels(instance=self.instance_name, domains=domains).set(duration)
+
+ def update_instance_shutdown(self):
+ self.instance_shutdown.labels(instance=self.instance_name).inc(1)
+ self.persistence.save(self._wrap_metrics())
+
+ def update_job_processing_retry(self, job_name: str, retry: int):
+ self.job_processing_retry.labels(instance=self.instance_name, job_name=job_name).inc(retry)
+ self.persistence.save(self._wrap_metrics())
diff --git a/hemera/indexer/utils/metrics_persistence.py b/hemera/indexer/utils/metrics_persistence.py
new file mode 100644
index 000000000..3d22d7e88
--- /dev/null
+++ b/hemera/indexer/utils/metrics_persistence.py
@@ -0,0 +1,127 @@
+import json
+import logging
+import os
+from datetime import datetime, timezone
+
+from sqlalchemy import func
+from sqlalchemy.dialects.postgresql import insert
+
+from hemera.common.models.metrics_persistence import MetricsPersistence
+from hemera.common.utils.file_utils import write_to_file
+
+logger = logging.getLogger(__name__)
+
+
+class BasePersistence(object):
+
+ def __init__(self, instance_name):
+ self.instance_name = instance_name
+
+ def load(self) -> dict:
+ pass
+
+ def save(self, metrics: dict):
+ pass
+
+ def init(self):
+ pass
+
+
+class PostgresPersistence(BasePersistence):
+
+ def __init__(self, instance_name, service):
+ super().__init__(instance_name)
+ self.service = service
+
+ def load(self):
+ session = self.service.get_service_session()
+ try:
+ metrics = (
+ session.query(MetricsPersistence.metrics)
+ .filter(MetricsPersistence.instance == self.instance_name)
+ .scalar()
+ )
+ except Exception as e:
+ raise e
+ finally:
+ session.close()
+ if metrics is not None:
+ return metrics
+ return {}
+
+ def save(self, metrics: dict):
+ session = self.service.get_service_session()
+ try:
+ conflict_args = {
+ "index_elements": [MetricsPersistence.instance],
+ "set_": {
+ "metrics": metrics,
+ "update_time": func.to_timestamp(int(datetime.now(timezone.utc).timestamp())),
+ },
+ }
+
+ statement = (
+ insert(MetricsPersistence)
+ .values(
+ {
+ "instance": self.instance_name,
+ "metrics": metrics,
+ }
+ )
+ .on_conflict_do_update(**conflict_args)
+ )
+ session.execute(statement)
+ session.commit()
+
+ except Exception as e:
+ raise e
+
+ finally:
+ session.close()
+
+ def init(self):
+ session = self.service.get_service_session()
+ try:
+ metrics = (
+ session.query(MetricsPersistence).filter(MetricsPersistence.instance == self.instance_name).first()
+ )
+
+ if metrics:
+ session.delete(metrics)
+ session.commit()
+ finally:
+ session.close()
+
+
+class FilePersistence(BasePersistence):
+
+ def __init__(self, instance_name):
+ super().__init__(instance_name)
+
+ def load(self):
+ if not os.path.isfile(self.instance_name):
+ return {}
+ with open(self.instance_name, "r") as json_file:
+ return json.load(json_file)
+
+ def save(self, metrics: dict):
+ write_to_file(self.instance_name, json.dumps(metrics))
+
+ def init(self):
+ if os.path.isfile(self.instance_name):
+ os.remove(self.instance_name)
+
+
+def init_persistence(instance_name: str, persistence_type: str, config: dict) -> BasePersistence:
+ if persistence_type == "postgres":
+ try:
+ service = config["db_service"]
+ except KeyError:
+ raise ValueError(f"postgresql persistence loader must provide pg config.")
+ return PostgresPersistence(instance_name, service)
+
+ elif persistence_type == "file":
+ return FilePersistence(instance_name)
+
+ else:
+ raise ValueError("Unable to determine persistence type: " + persistence_type)
diff --git a/hemera/indexer/utils/multicall_hemera/__init__.py b/hemera/indexer/utils/multicall_hemera/__init__.py
new file mode 100644
index 000000000..d25850f1b
--- /dev/null
+++ b/hemera/indexer/utils/multicall_hemera/__init__.py
@@ -0,0 +1,6 @@
+from hemera.indexer.utils.multicall_hemera.call import Call
+from hemera.indexer.utils.multicall_hemera.multi_call import Multicall
+
+"""
+This package provides API for multicall3 smart contract
+"""
diff --git a/indexer/utils/multicall_hemera/abi.py b/hemera/indexer/utils/multicall_hemera/abi.py
similarity index 97%
rename from indexer/utils/multicall_hemera/abi.py
rename to hemera/indexer/utils/multicall_hemera/abi.py
index d0a3b1f46..981bb732a 100644
--- a/indexer/utils/multicall_hemera/abi.py
+++ b/hemera/indexer/utils/multicall_hemera/abi.py
@@ -5,7 +5,7 @@
# @File abi.py
# @Brief
-from common.utils.abi_code_utils import Function
+from hemera.common.utils.abi_code_utils import Function
TRY_BLOCK_AND_AGGREGATE_FUNC = Function(
{
diff --git a/indexer/utils/multicall_hemera/call.py b/hemera/indexer/utils/multicall_hemera/call.py
similarity index 90%
rename from indexer/utils/multicall_hemera/call.py
rename to hemera/indexer/utils/multicall_hemera/call.py
index b248c33a3..436232572 100644
--- a/indexer/utils/multicall_hemera/call.py
+++ b/hemera/indexer/utils/multicall_hemera/call.py
@@ -5,8 +5,8 @@
from eth_typing import Address, ChecksumAddress, HexAddress
from eth_utils import to_checksum_address
-from common.utils.abi_code_utils import Function
-from common.utils.format_utils import format_block_id
+from hemera.common.utils.abi_code_utils import Function
+from hemera.common.utils.format_utils import format_block_id
logger = logging.getLogger(__name__)
@@ -24,7 +24,8 @@ def __init__(
gas_limit: Optional[int] = None,
user_defined_k: Optional[Any] = None,
) -> None:
- self.target = to_checksum_address(target)
+ # self.target = to_checksum_address(target)
+ self.target = target
self.block_number = block_number
self.gas_limit = gas_limit
@@ -37,7 +38,7 @@ def __init__(
self._rpc_params = None
def __repr__(self) -> str:
- return f""
+ return f""
@property
def data(self) -> str:
diff --git a/indexer/utils/multicall_hemera/constants.py b/hemera/indexer/utils/multicall_hemera/constants.py
similarity index 88%
rename from indexer/utils/multicall_hemera/constants.py
rename to hemera/indexer/utils/multicall_hemera/constants.py
index 046219b13..4ffb07d40 100644
--- a/indexer/utils/multicall_hemera/constants.py
+++ b/hemera/indexer/utils/multicall_hemera/constants.py
@@ -4,11 +4,13 @@
# gas limit for a single call
GAS_LIMIT: int = int(os.environ.get("GAS_LIMIT", 5_000_000))
+# for some networks, there may be `MAX_GAS_LIMIT` if this is set, `GAS_LIMIT` will not be used
+MAX_GAS_LIMIT: int = int(os.environ.get("MAX_GAS_LIMIT", 0))
# payload limit in KB
RPC_PAYLOAD_SIZE: int = int(os.environ.get("BATCH_SIZE", 250))
# calls limit
CALLS_LIMIT: int = int(os.environ.get("CALLS_LIMIT", 2000))
-DEFAULT_MULTICALL_ADDRESS = "0xcA11bde05977b3631167028862bE2a173976CA11"
+DEFAULT_MULTICALL_ADDRESS = os.environ.get("DEFAULT_MULTICALL_ADDRESS", "0xcA11bde05977b3631167028862bE2a173976CA11")
@dataclass(frozen=True)
@@ -54,7 +56,7 @@ def get_all_networks(cls) -> Dict[int, "NetworkConfig"]:
ARBITRUM_RINKEBY = NetworkConfig(421611, "ArbitrumRinkeby")
ARBITRUM_GOERLI = NetworkConfig(421613, "ArbitrumGoerli")
-BSC = NetworkConfig(56, "BSC")
+BSC = NetworkConfig(56, "BSC", 15921452)
BSC_TESTNET = NetworkConfig(97, "BSCTestnet")
POLYGON = NetworkConfig(137, "Polygon", 25770160)
@@ -105,6 +107,10 @@ def get_all_networks(cls) -> Dict[int, "NetworkConfig"]:
TAIKO_MAIN = NetworkConfig(167000, "Taiko", 11269)
CYBER_TESTNET = NetworkConfig(111557560, "CyberTestnet")
CYBER = NetworkConfig(7560, "Cyber", 3413302)
+STORY_ODYSSEY_TESTNET = NetworkConfig(1516, "StoryOdysseyTestnet", 14880)
+MONAD_TESTNET = NetworkConfig(10143, "MonadalTestnet", 251449)
+SONIC = NetworkConfig(146, "Sonic", 60)
+SONIC_TESTNET = NetworkConfig(57054, "SonicTestnet", 1100)
def get_multicall_network(chain_id: int) -> NetworkConfig:
diff --git a/indexer/utils/multicall_hemera/multi_call.py b/hemera/indexer/utils/multicall_hemera/multi_call.py
similarity index 74%
rename from indexer/utils/multicall_hemera/multi_call.py
rename to hemera/indexer/utils/multicall_hemera/multi_call.py
index c9f0914e0..ef0ecafbd 100644
--- a/indexer/utils/multicall_hemera/multi_call.py
+++ b/hemera/indexer/utils/multicall_hemera/multi_call.py
@@ -2,11 +2,10 @@
import orjson
-from common.utils.format_utils import format_block_id, hex_str_to_bytes
-from indexer.utils.multicall_hemera import Call
-from indexer.utils.multicall_hemera.abi import AGGREGATE_FUNC, TRY_BLOCK_AND_AGGREGATE_FUNC
-from indexer.utils.multicall_hemera.constants import GAS_LIMIT, get_multicall_address, get_multicall_network
-from indexer.utils.multicall_hemera.util import calculate_execution_time
+from hemera.common.utils.format_utils import format_block_id, hex_str_to_bytes
+from hemera.indexer.utils.multicall_hemera import Call
+from hemera.indexer.utils.multicall_hemera.abi import AGGREGATE_FUNC, TRY_BLOCK_AND_AGGREGATE_FUNC
+from hemera.indexer.utils.multicall_hemera.constants import GAS_LIMIT, get_multicall_address, get_multicall_network
class Multicall:
@@ -32,14 +31,14 @@ def __init__(
self.multicall_address = get_multicall_address(self.network)
self._parameters = None
- @calculate_execution_time
def to_rpc_param(self):
if self.require_success is True:
parameters = [[[call.target, hex_str_to_bytes(call.data)] for call in self.calls]]
else:
parameters = [self.require_success, [[call.target, hex_str_to_bytes(call.data)] for call in self.calls]]
- call_data = self.multicall_func.encode_function_call_data(parameters)
+ # call_data = self.multicall_func.encode_function_call_data(parameters)
+ call_data = self.multicall_func.encode_multicall_data(parameters)
args = [{"to": self.multicall_address, "data": call_data}, format_block_id(self.block_number)]
if self.gas_limit:
args[0]["gas"] = format_block_id(self.gas_limit)
diff --git a/indexer/utils/multicall_hemera/multi_call_helper.py b/hemera/indexer/utils/multicall_hemera/multi_call_helper.py
similarity index 85%
rename from indexer/utils/multicall_hemera/multi_call_helper.py
rename to hemera/indexer/utils/multicall_hemera/multi_call_helper.py
index e1c51ba4e..f55f47942 100644
--- a/indexer/utils/multicall_hemera/multi_call_helper.py
+++ b/hemera/indexer/utils/multicall_hemera/multi_call_helper.py
@@ -4,18 +4,23 @@
# @Author will
# @File multi_call_helper.py
# @Brief
+import json
import logging
import os
from collections import defaultdict
from typing import List
-from common.utils.exception_control import FastShutdownError
-from common.utils.format_utils import bytes_to_hex_str
-from indexer.utils.multicall_hemera import Call, Multicall
-from indexer.utils.multicall_hemera.abi import TRY_BLOCK_AND_AGGREGATE_FUNC
-from indexer.utils.multicall_hemera.constants import CALLS_LIMIT, GAS_LIMIT, get_multicall_network
-from indexer.utils.multicall_hemera.util import calculate_execution_time, make_request_concurrent, rebatch_by_size
-from indexer.utils.provider import get_provider_from_uri
+from hemera.common.utils.exception_control import FastShutdownError
+from hemera.common.utils.format_utils import bytes_to_hex_str
+from hemera.indexer.utils.multicall_hemera import Call, Multicall
+from hemera.indexer.utils.multicall_hemera.abi import TRY_BLOCK_AND_AGGREGATE_FUNC
+from hemera.indexer.utils.multicall_hemera.constants import CALLS_LIMIT, GAS_LIMIT, MAX_GAS_LIMIT, get_multicall_network
+from hemera.indexer.utils.multicall_hemera.util import (
+ calculate_execution_time,
+ make_request_concurrent,
+ rebatch_by_size,
+)
+from hemera.indexer.utils.provider import get_provider_from_uri
class MultiCallHelper:
@@ -134,32 +139,26 @@ def fetch_raw_calls(self, calls: List[Call]):
result = data.get("result")
try:
call.returns = call.decode_output(result)
+ if call.returns is None:
+ self.logger.error(f"multicall helper failed decode call: {call}, data {data}")
except Exception:
call.returns = None
- self.logger.warning(f"multicall helper failed call: {call}")
+ self.logger.error(f"multicall helper failed call: {call}, data {data}")
+ @calculate_execution_time
def construct_multicall_rpc(self, to_execute_multi_calls):
self.logger.info(f"Function total multicalls: {len(to_execute_multi_calls)}")
multicall_rpc = []
if to_execute_multi_calls:
for calls in to_execute_multi_calls:
+ self.logger.debug(f"{len(calls)} calls, at block_number {calls[0].block_number}")
multicall_rpc.append(
Multicall(
calls,
require_success=False,
chain_id=self.chain_id,
block_number=calls[0].block_number,
- gas_limit=(len(calls) * GAS_LIMIT),
+ gas_limit=MAX_GAS_LIMIT if MAX_GAS_LIMIT != 0 else (len(calls) * GAS_LIMIT),
).to_rpc_param()
)
return multicall_rpc
-
- def _construct_single_multicall_rpc(self, calls):
- multicall = Multicall(
- calls,
- require_success=False,
- chain_id=self.chain_id,
- block_number=calls[0].block_number,
- gas_limit=(len(calls) * GAS_LIMIT),
- )
- return multicall.to_rpc_param()
diff --git a/indexer/utils/multicall_hemera/util.py b/hemera/indexer/utils/multicall_hemera/util.py
similarity index 50%
rename from indexer/utils/multicall_hemera/util.py
rename to hemera/indexer/utils/multicall_hemera/util.py
index c4bd15e45..3dcf48406 100644
--- a/indexer/utils/multicall_hemera/util.py
+++ b/hemera/indexer/utils/multicall_hemera/util.py
@@ -2,9 +2,10 @@
# -*- coding: utf-8 -*-
# @Time 2024/8/20 14:13
# @Author will
-# @File util.py.py
+# @File util.py
# @Brief
import atexit
+import json
import logging
import os
import threading
@@ -12,12 +13,17 @@
from concurrent.futures import ThreadPoolExecutor, as_completed
import orjson
+from requests import RequestException
+from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_exponential
-from indexer.utils.multicall_hemera.constants import RPC_PAYLOAD_SIZE
+from hemera.indexer.utils.multicall_hemera.constants import RPC_PAYLOAD_SIZE
logger = logging.getLogger(__name__)
+JOB_RETRIES = int(os.environ.get("JOB_RETRIES", "5"))
+
+
def calculate_execution_time(func):
def wrapper(*args, **kwargs):
start_time = time.time()
@@ -87,19 +93,64 @@ def shutdown(cls):
cls._instance.shutdown(wait=False)
cls._instance = None
+ @classmethod
+ def check_results(cls, results):
+ try:
+ if results:
+ for result in results:
+ if isinstance(result, dict) and "error" in result:
+ error = result["error"]
+ if error.get("code") == 429:
+ # raise it to retry
+ raise Exception(f"Rate limit error: {error.get('message')}")
+ else:
+ # {'error': {'code': -32000, 'message': 'out of gas'}}
+ # {'error': {'code': -32000, 'message': 'execution reverted'}
+ if "out of gas" in error.get("message"):
+ # if out of gas, log the error
+ logger.error(f"rpc error: {json.dumps(result)}")
+ return results
+ except Exception as e:
+ raise e
+
@classmethod
def submit_tasks(cls, func, chunks, max_workers=None):
executor = cls.get_instance(max_workers)
results = [None] * len(chunks)
- try:
- future_to_chunk = {executor.submit(func, chunk[0], i): i for i, chunk in enumerate(chunks)}
-
- for future in as_completed(future_to_chunk):
- index, result = future.result(timeout=30)
- results[index] = result
- except Exception as e:
- logger.error(f"ThreadPoolManager.submit_tasks error: {e}")
- raise e
+ pending_tasks = {i: chunk for i, chunk in enumerate(chunks)}
+ last_time_tasks = len(pending_tasks)
+ attempt = 0
+ max_attempts = JOB_RETRIES
+ min_wait = 1
+ max_wait = 2 ** (JOB_RETRIES - 1)
+
+ while pending_tasks and attempt < max_attempts:
+ futures = {executor.submit(func, chunk[0], i): i for i, chunk in pending_tasks.items()}
+ pending_tasks.clear()
+
+ for future in as_completed(futures):
+ try:
+ index, result = future.result(timeout=30)
+ cls.check_results(result)
+ results[index] = result
+ except Exception as e:
+ # logger.error(f"Task {index} failed with error: {e}")
+ pending_tasks[index] = chunks[index]
+
+ if pending_tasks:
+ if len(pending_tasks) < last_time_tasks:
+ # some task succeed
+ delay = 0
+ else:
+ delay = min(min_wait * (2**attempt), max_wait)
+ attempt += 1
+ last_time_tasks = len(pending_tasks)
+ logger.info(f"Retrying {len(pending_tasks)} failed tasks in {delay} seconds...")
+ time.sleep(delay)
+
+ if pending_tasks:
+ logger.error(f"Some tasks failed after {max_attempts} retries: {list(pending_tasks.keys())}")
+ raise Exception(f"Some tasks failed after {max_attempts} retries: {list(pending_tasks.keys())}")
return results
diff --git a/indexer/utils/parameter_utils.py b/hemera/indexer/utils/parameter_utils.py
similarity index 90%
rename from indexer/utils/parameter_utils.py
rename to hemera/indexer/utils/parameter_utils.py
index b5213dc16..e3fdebf29 100644
--- a/indexer/utils/parameter_utils.py
+++ b/hemera/indexer/utils/parameter_utils.py
@@ -4,9 +4,13 @@
import click
-from common.utils.format_utils import to_snake_case
-from indexer.domain import Domain
-from indexer.exporters.item_exporter import ItemExporterType, check_exporter_in_chosen
+from hemera.common.utils.format_utils import to_snake_case
+from hemera.indexer.domains import Domain
+from hemera.indexer.exporters.item_exporter import ItemExporterType, check_exporter_in_chosen
+
+
+def default_if_none(value, default):
+ return default if value is None else value
def extract_path_from_parameter(cli_path: str) -> str:
diff --git a/indexer/utils/progress_logger.py b/hemera/indexer/utils/progress_logger.py
similarity index 97%
rename from indexer/utils/progress_logger.py
rename to hemera/indexer/utils/progress_logger.py
index 182a570c5..f181f253d 100644
--- a/indexer/utils/progress_logger.py
+++ b/hemera/indexer/utils/progress_logger.py
@@ -3,7 +3,7 @@
from tqdm import tqdm
-from indexer.utils.atomic_counter import AtomicCounter
+from hemera.indexer.utils.atomic_counter import AtomicCounter
class TqdmExtraFormat(tqdm):
diff --git a/indexer/utils/provider.py b/hemera/indexer/utils/provider.py
similarity index 100%
rename from indexer/utils/provider.py
rename to hemera/indexer/utils/provider.py
diff --git a/indexer/utils/reorg.py b/hemera/indexer/utils/reorg.py
similarity index 57%
rename from indexer/utils/reorg.py
rename to hemera/indexer/utils/reorg.py
index d84e22baf..f6e8f6f13 100644
--- a/indexer/utils/reorg.py
+++ b/hemera/indexer/utils/reorg.py
@@ -1,15 +1,18 @@
import logging
from datetime import datetime, timezone
-from sqlalchemy import and_
+from sqlalchemy import and_, func, insert, literal, select
-from common.converter.pg_converter import domain_model_mapping
-from common.models import HemeraModel
-from common.services.postgresql_service import PostgreSQLService
-from common.utils.exception_control import RetriableError
+from hemera.common.models import HemeraModel
+from hemera.common.models.blocks import Blocks
+from hemera.common.models.fix_record import FixRecord
+from hemera.common.services.postgresql_service import PostgreSQLService
+from hemera.common.utils.exception_control import RetriableError
def set_reorg_sign(jobs, block_number, service):
+ from hemera.common.converter.pg_converter import domain_model_mapping
+
conn = service.get_conn()
cur = conn.cursor()
try:
@@ -67,3 +70,39 @@ def should_reorg(block_number: int, table: HemeraModel, service: PostgreSQLServi
finally:
session.close()
return result is not None
+
+
+def check_reorg(service: PostgreSQLService, check_range: int = None):
+ check_where = and_(Blocks.reorg == False, Blocks.number >= check_range) if check_range else Blocks.reorg == False
+
+ inner_query = (
+ select(
+ Blocks.number,
+ Blocks.hash,
+ Blocks.parent_hash,
+ func.lag(Blocks.number, 1).over(order_by=Blocks.number).label("parent_number"),
+ func.lag(Blocks.hash, 1).over(order_by=Blocks.number).label("lag_hash"),
+ )
+ .where(check_where)
+ .alias("align_table")
+ )
+
+ select_stmt = select(
+ inner_query.c.number.label("start_block_number"),
+ (inner_query.c.number + 1).label("last_fixed_block_number"),
+ literal(5).label("remain_process"),
+ literal("submitted").label("job_status"),
+ ).where(
+ and_(
+ inner_query.c.parent_hash != inner_query.c.lag_hash, inner_query.c.number == inner_query.c.parent_number + 1
+ )
+ )
+
+ insert_stmt = insert(FixRecord).from_select(
+ ["start_block_number", "last_fixed_block_number", "remain_process", "job_status"], select_stmt
+ )
+
+ db_session = service.get_service_session()
+ db_session.execute(insert_stmt)
+ db_session.commit()
+ db_session.close()
diff --git a/indexer/utils/rpc_utils.py b/hemera/indexer/utils/rpc_utils.py
similarity index 80%
rename from indexer/utils/rpc_utils.py
rename to hemera/indexer/utils/rpc_utils.py
index 80ebd7802..526890d39 100644
--- a/indexer/utils/rpc_utils.py
+++ b/hemera/indexer/utils/rpc_utils.py
@@ -1,6 +1,9 @@
+import logging
import random
-from common.utils.exception_control import RetriableError, decode_response_error
+from hemera.common.utils.exception_control import RetriableError, decode_response_error
+
+logger = logging.getLogger(__name__)
# TODO: Implement fallback mechanism for provider uris instead of picking randomly
@@ -10,11 +13,15 @@ def pick_random_provider_uri(provider_uri):
def rpc_response_batch_to_results(response):
+ if isinstance(response, list) == False:
+ response = [response]
for response_item in response:
yield rpc_response_to_result(response_item)
def rpc_response_to_result(response):
+ if isinstance(response, dict) == False:
+ return None
result = response.get("result")
if result is None:
error_message = "result is None in response {}.".format(response)
@@ -24,6 +31,7 @@ def rpc_response_to_result(response):
# synced node
raise RetriableError(error_message)
elif response.get("error") is not None:
+ logger.error(f"rpc error response: {response}")
return decode_response_error(response.get("error"))
else:
return result
diff --git a/hemera/indexer/utils/sync_recorder.py b/hemera/indexer/utils/sync_recorder.py
new file mode 100644
index 000000000..26f4621f2
--- /dev/null
+++ b/hemera/indexer/utils/sync_recorder.py
@@ -0,0 +1,175 @@
+import json
+import logging
+import os
+from datetime import datetime, timezone
+from distutils.util import strtobool
+
+from sqlalchemy import func
+from sqlalchemy.dialects.postgresql import insert
+
+from hemera.common.models.failure_records import FailureRecords
+from hemera.common.models.sync_record import SyncRecord
+from hemera.common.utils.file_utils import smart_open, write_to_file
+
+logger = logging.getLogger(__name__)
+
+ASYNC_SUBMIT = bool(strtobool(os.environ.get("ASYNC_SUBMIT", "false")))
+
+
+class BaseRecorder(object):
+
+ def __init__(self, multi_mode: bool):
+ self.multi_mode = multi_mode
+
+ def set_last_synced_block(self, last_synced_block):
+ pass
+
+ def get_last_synced_block(self):
+ pass
+
+ def set_failure_record(self, output_types, start_block, end_block, exception_stage, exception):
+ pass
+
+ def handle_success(self, last_block_number):
+ self.set_last_synced_block(last_block_number)
+ logger.info("Writing last synced block {}".format(last_block_number))
+
+
+class FileSyncRecorder(BaseRecorder):
+
+ def __init__(self, file_name, multi_mode):
+ super().__init__(multi_mode)
+ self.file_name = file_name
+
+ def set_last_synced_block(self, last_synced_block):
+ if ASYNC_SUBMIT or self.multi_mode:
+ wrote_synced_block = self.get_last_synced_block()
+ if wrote_synced_block < last_synced_block:
+ write_to_file(self.file_name, str(last_synced_block) + "\n")
+ else:
+ write_to_file(self.file_name, str(last_synced_block) + "\n")
+
+ def get_last_synced_block(self):
+ if not os.path.isfile(self.file_name):
+ return 0
+ with smart_open(self.file_name, "r") as last_synced_block_file:
+ last_synced_block = last_synced_block_file.read()
+ try:
+ last_synced_block = int(last_synced_block)
+ except ValueError as e:
+ last_synced_block = 0
+ return last_synced_block
+
+ def set_failure_record(self, output_types, start_block, end_block, exception_stage, exception):
+ failure_file = self.file_name + "_failure_records"
+ crash_time = int(datetime.now(timezone.utc).timestamp())
+ content = {
+ "output_types": ",".join(output_types),
+ "start_block_number": start_block,
+ "end_block_number": end_block,
+ "exception_stage": exception_stage,
+ "exception": exception,
+ "crash_time": crash_time,
+ }
+
+ write_to_file(failure_file, json.dumps(content) + "\n", "a+")
+
+
+class PGSyncRecorder(BaseRecorder):
+
+ def __init__(self, key, service, multi_mode):
+ super().__init__(multi_mode)
+ self.key = key
+ self.service = service
+
+ def set_last_synced_block(self, last_synced_block):
+ session = self.service.get_service_session()
+ update_time = func.to_timestamp(int(datetime.now(timezone.utc).timestamp()))
+ try:
+ conflict_args = {
+ "index_elements": [SyncRecord.mission_sign],
+ "set_": {
+ "last_block_number": last_synced_block,
+ "update_time": update_time,
+ },
+ }
+
+ if ASYNC_SUBMIT or self.multi_mode:
+ conflict_args["where"] = SyncRecord.last_block_number <= last_synced_block
+
+ statement = (
+ insert(SyncRecord)
+ .values(
+ {
+ "mission_sign": self.key,
+ "last_block_number": last_synced_block,
+ "update_time": update_time,
+ }
+ )
+ .on_conflict_do_update(**conflict_args)
+ )
+ session.execute(statement)
+ session.commit()
+ except Exception as e:
+ raise e
+ finally:
+ session.close()
+
+ def get_last_synced_block(self):
+ session = self.service.get_service_session()
+ try:
+ result = session.query(SyncRecord.last_block_number).filter(SyncRecord.mission_sign == self.key).scalar()
+ except Exception as e:
+ raise e
+ finally:
+ session.close()
+ if result is not None:
+ return result
+ return 0
+
+ def set_failure_record(self, output_types, start_block, end_block, exception_stage, exception):
+ session = self.service.get_service_session()
+ try:
+ crash_time = func.to_timestamp(int(datetime.now(timezone.utc).timestamp()))
+
+ statement = insert(FailureRecords).values(
+ {
+ "mission_sign": self.key,
+ "output_types": ",".join(output_types),
+ "start_block_number": start_block,
+ "end_block_number": end_block,
+ "exception_stage": exception_stage,
+ "exception": exception,
+ "crash_time": crash_time,
+ }
+ )
+
+ session.execute(statement)
+ session.commit()
+
+ except Exception as e:
+ raise e
+
+ finally:
+ session.close()
+
+
+def create_recorder(sync_recorder: str, config: dict, multi_mode: bool) -> BaseRecorder:
+ recorder_sign = sync_recorder.find(":")
+ if recorder_sign == -1:
+ raise ValueError(f"Invalid sync recorder: {sync_recorder}" "")
+
+ recorder = sync_recorder.split(":")
+
+ if recorder[0] == "pg":
+ try:
+ service = config["db_service"]
+ except KeyError:
+ raise ValueError(f"postgresql sync record must provide pg config.")
+ return PGSyncRecorder(recorder[1], service, multi_mode)
+
+ elif recorder[0] == "file":
+ return FileSyncRecorder(recorder[1], multi_mode)
+
+ else:
+ raise ValueError("Unable to determine sync recorder type: " + sync_recorder)
diff --git a/hemera/indexer/utils/template_generator.py b/hemera/indexer/utils/template_generator.py
new file mode 100644
index 000000000..37020f505
--- /dev/null
+++ b/hemera/indexer/utils/template_generator.py
@@ -0,0 +1,33 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+Time : 2024/12/3 下午5:35
+Author : xuzh
+Project : hemera_indexer
+"""
+import copy
+from pathlib import Path
+
+
+class TemplateGenerator:
+ def __init__(self, template_file: str = None):
+ if template_file:
+ with open(Path(template_file), "r", encoding="utf-8") as f:
+ self.template_file = f.read()
+ else:
+ self.template_file = ""
+ self.replacements = {}
+
+ def add_replacements(self, key: dict, value: str):
+ self.replacements[key] = value
+
+ def generate_file(self, target_path: str) -> None:
+ generate_content = copy.deepcopy(self.template_file)
+ for key, value in self.replacements.items():
+ generate_content = generate_content.replace(key, value)
+
+ target_path = Path(target_path)
+ target_path.parent.mkdir(parents=True, exist_ok=True)
+
+ with open(target_path, "w", encoding="utf-8") as f:
+ f.write(generate_content)
diff --git a/indexer/utils/thread_local_proxy.py b/hemera/indexer/utils/thread_local_proxy.py
similarity index 100%
rename from indexer/utils/thread_local_proxy.py
rename to hemera/indexer/utils/thread_local_proxy.py
diff --git a/indexer/utils/token_fetcher.py b/hemera/indexer/utils/token_fetcher.py
similarity index 93%
rename from indexer/utils/token_fetcher.py
rename to hemera/indexer/utils/token_fetcher.py
index 07547885d..66be69667 100644
--- a/indexer/utils/token_fetcher.py
+++ b/hemera/indexer/utils/token_fetcher.py
@@ -7,17 +7,17 @@
import logging
-from common.utils.format_utils import to_snake_case
-from enumeration.record_level import RecordLevel
-from enumeration.token_type import TokenType
-from indexer.domain.token_id_infos import (
+from hemera.common.enumeration.record_level import RecordLevel
+from hemera.common.enumeration.token_type import TokenType
+from hemera.common.utils.format_utils import to_snake_case
+from hemera.indexer.domains.token_id_infos import (
ERC721TokenIdChange,
ERC721TokenIdDetail,
ERC1155TokenIdDetail,
UpdateERC721TokenIdDetail,
UpdateERC1155TokenIdDetail,
)
-from indexer.utils.abi_setting import (
+from hemera.indexer.utils.abi_setting import (
ERC20_BALANCE_OF_FUNCTION,
ERC721_OWNER_OF_FUNCTION,
ERC721_TOKEN_URI_FUNCTION,
@@ -25,10 +25,10 @@
ERC1155_TOKEN_ID_BALANCE_OF_FUNCTION,
TOKEN_TOTAL_SUPPLY_WITH_ID_FUNCTION,
)
-from indexer.utils.exception_recorder import ExceptionRecorder
-from indexer.utils.multicall_hemera import Call
-from indexer.utils.multicall_hemera.multi_call_helper import MultiCallHelper
-from indexer.utils.multicall_hemera.util import calculate_execution_time
+from hemera.indexer.utils.exception_recorder import ExceptionRecorder
+from hemera.indexer.utils.multicall_hemera import Call
+from hemera.indexer.utils.multicall_hemera.multi_call_helper import MultiCallHelper
+from hemera.indexer.utils.multicall_hemera.util import calculate_execution_time
exception_recorder = ExceptionRecorder()
diff --git a/hemera/main.py b/hemera/main.py
new file mode 100644
index 000000000..4250e3b50
--- /dev/null
+++ b/hemera/main.py
@@ -0,0 +1,3 @@
+from hemera.cli import cli
+
+cli()
diff --git a/indexer/modules/bridge/bedrock/parser/__init__.py b/hemera/migrations/__init__.py
similarity index 100%
rename from indexer/modules/bridge/bedrock/parser/__init__.py
rename to hemera/migrations/__init__.py
diff --git a/migrations/env.py b/hemera/migrations/env.py
similarity index 85%
rename from migrations/env.py
rename to hemera/migrations/env.py
index 65979e21a..4383b6d54 100644
--- a/migrations/env.py
+++ b/hemera/migrations/env.py
@@ -6,10 +6,12 @@
from sqlalchemy import engine_from_config, pool
from sqlalchemy.sql.schema import SchemaItem
-from common.models import db, import_all_models
+from hemera.common.models import db
+from hemera.common.utils.module_loading import import_submodules
# Make sure everything is imported so that alembic can find it all
-import_all_models()
+# import_all_models()
+import_submodules("hemera.common.models")
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
@@ -31,30 +33,8 @@
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
-IGNORE_DB_TABLE = [
- "transactions_multi",
- "hemera_address_transactions",
- "address_transactions_all",
- "chosen_address",
- "af_ether_fi_share_balances",
- "af_ether_fi_position_values",
- "af_ether_fi_share_balances_current",
- "af_ether_fi_lrt_exchange_rate",
- "af_lido_seth_share_balances",
- "af_lido_position_values",
- "af_lido_seth_share_balances_current",
- "af_pendle_pool",
- "af_pendle_user_active_balance",
- "af_pendle_user_active_balance_current",
-]
-PARTITION_TABLES = [
- "contract_internal_transactions",
- "erc20_token_transfers",
- "erc721_token_transfers",
- "logs",
- "traces",
- "transactions",
-]
+IGNORE_DB_TABLE = []
+PARTITION_TABLES = []
def table_able_to_track(**kwargs) -> bool:
diff --git a/hemera/migrations/isolate/20241105_add_address_index_and_stats.py b/hemera/migrations/isolate/20241105_add_address_index_and_stats.py
new file mode 100644
index 000000000..76159f6a6
--- /dev/null
+++ b/hemera/migrations/isolate/20241105_add_address_index_and_stats.py
@@ -0,0 +1,543 @@
+"""add_address_index_and_stats
+
+Revision ID: 872094559593
+Revises: bc23aa19668e
+Create Date: 2024-11-05 13:34:30.692977
+
+"""
+
+from typing import Sequence, Union
+
+import sqlalchemy as sa
+from alembic import op
+from sqlalchemy.dialects import postgresql
+
+# revision identifiers, used by Alembic.
+revision: str = "872094559593"
+down_revision: Union[str, None] = "bc23aa19668e"
+branch_labels: Union[str, Sequence[str], None] = None
+depends_on: Union[str, Sequence[str], None] = None
+
+
+def upgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.create_table(
+ "address_contract_operations",
+ sa.Column("address", postgresql.BYTEA(), nullable=False),
+ sa.Column("trace_from_address", postgresql.BYTEA(), nullable=True),
+ sa.Column("contract_address", postgresql.BYTEA(), nullable=True),
+ sa.Column("trace_id", sa.TEXT(), nullable=False),
+ sa.Column("block_number", sa.INTEGER(), nullable=False),
+ sa.Column("transaction_index", sa.INTEGER(), nullable=False),
+ sa.Column("transaction_hash", postgresql.BYTEA(), nullable=True),
+ sa.Column("block_timestamp", postgresql.TIMESTAMP(), nullable=False),
+ sa.Column("block_hash", postgresql.BYTEA(), nullable=True),
+ sa.Column("error", sa.TEXT(), nullable=True),
+ sa.Column("status", sa.INTEGER(), nullable=True),
+ sa.Column("creation_code", postgresql.BYTEA(), nullable=True),
+ sa.Column("deployed_code", postgresql.BYTEA(), nullable=True),
+ sa.Column("gas", sa.NUMERIC(precision=100), nullable=True),
+ sa.Column("gas_used", sa.NUMERIC(precision=100), nullable=True),
+ sa.Column("trace_type", sa.TEXT(), nullable=True),
+ sa.Column("call_type", sa.TEXT(), nullable=True),
+ sa.Column("transaction_receipt_status", sa.INTEGER(), nullable=True),
+ sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True),
+ sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True),
+ sa.PrimaryKeyConstraint("address", "trace_id", "block_number", "transaction_index", "block_timestamp"),
+ )
+ op.create_index(
+ "address_contract_operations_address_block_tn_t_idx",
+ "address_contract_operations",
+ ["address", sa.text("block_timestamp DESC"), sa.text("block_number DESC"), sa.text("transaction_index DESC")],
+ unique=False,
+ )
+ op.create_table(
+ "address_internal_transactions",
+ sa.Column("address", postgresql.BYTEA(), nullable=False),
+ sa.Column("trace_id", sa.TEXT(), nullable=False),
+ sa.Column("block_number", sa.INTEGER(), nullable=False),
+ sa.Column("transaction_index", sa.INTEGER(), nullable=False),
+ sa.Column("transaction_hash", postgresql.BYTEA(), nullable=True),
+ sa.Column("block_timestamp", postgresql.TIMESTAMP(), nullable=False),
+ sa.Column("block_hash", postgresql.BYTEA(), nullable=True),
+ sa.Column("error", sa.TEXT(), nullable=True),
+ sa.Column("status", sa.INTEGER(), nullable=True),
+ sa.Column("input_method", sa.TEXT(), nullable=True),
+ sa.Column("value", sa.NUMERIC(precision=100), nullable=True),
+ sa.Column("gas", sa.NUMERIC(precision=100), nullable=True),
+ sa.Column("gas_used", sa.NUMERIC(precision=100), nullable=True),
+ sa.Column("trace_type", sa.TEXT(), nullable=True),
+ sa.Column("call_type", sa.TEXT(), nullable=True),
+ sa.Column("txn_type", sa.SMALLINT(), nullable=True),
+ sa.Column("related_address", postgresql.BYTEA(), nullable=True),
+ sa.Column("transaction_receipt_status", sa.INTEGER(), nullable=True),
+ sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True),
+ sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True),
+ sa.PrimaryKeyConstraint("address", "trace_id", "block_number", "transaction_index", "block_timestamp"),
+ )
+ op.create_index(
+ "address_internal_transactions_address_nt_t_idx",
+ "address_internal_transactions",
+ ["address", sa.text("block_timestamp DESC"), sa.text("block_number DESC"), sa.text("transaction_index DESC")],
+ unique=False,
+ )
+ op.create_table(
+ "af_erc1155_token_holdings_current",
+ sa.Column("position_token_address", postgresql.BYTEA(), nullable=False),
+ sa.Column("token_id", sa.NUMERIC(precision=100), nullable=False),
+ sa.Column("wallet_address", postgresql.BYTEA(), nullable=False),
+ sa.Column("block_number", sa.BIGINT(), nullable=True),
+ sa.Column("block_timestamp", sa.BIGINT(), nullable=True),
+ sa.Column("balance", sa.NUMERIC(precision=100), nullable=True),
+ sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True),
+ sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True),
+ sa.PrimaryKeyConstraint("position_token_address", "token_id", "wallet_address"),
+ )
+ op.create_index(
+ "af_erc1155_token_holdings_current_token_block_desc_index",
+ "af_erc1155_token_holdings_current",
+ [sa.text("position_token_address DESC"), sa.text("block_timestamp DESC")],
+ unique=False,
+ )
+ op.create_index(
+ "af_erc1155_token_holdings_current_wallet_block_desc_index",
+ "af_erc1155_token_holdings_current",
+ [sa.text("wallet_address DESC"), sa.text("block_timestamp DESC")],
+ unique=False,
+ )
+ op.create_table(
+ "af_erc1155_token_holdings_hist",
+ sa.Column("position_token_address", postgresql.BYTEA(), nullable=False),
+ sa.Column("token_id", sa.NUMERIC(precision=100), nullable=False),
+ sa.Column("wallet_address", postgresql.BYTEA(), nullable=False),
+ sa.Column("balance", sa.NUMERIC(precision=100), nullable=True),
+ sa.Column("block_number", sa.BIGINT(), nullable=False),
+ sa.Column("block_timestamp", sa.BIGINT(), nullable=False),
+ sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True),
+ sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True),
+ sa.Column("reorg", sa.BOOLEAN(), nullable=True),
+ sa.PrimaryKeyConstraint(
+ "position_token_address", "token_id", "wallet_address", "block_timestamp", "block_number"
+ ),
+ )
+ op.create_index(
+ "feature_erc1155_token_holding_token_block_desc_index",
+ "af_erc1155_token_holdings_hist",
+ [sa.text("position_token_address DESC"), sa.text("block_timestamp DESC")],
+ unique=False,
+ )
+ op.create_index(
+ "feature_erc1155_token_holding_token_wallet_block_desc_index",
+ "af_erc1155_token_holdings_hist",
+ [sa.text("position_token_address DESC"), sa.text("wallet_address DESC"), sa.text("block_number DESC")],
+ unique=False,
+ )
+ op.create_table(
+ "af_index_daily_stats",
+ sa.Column("address", postgresql.BYTEA(), nullable=False),
+ sa.Column("block_date", sa.DATE(), nullable=False),
+ sa.Column("transaction_in_count", sa.INTEGER(), nullable=True),
+ sa.Column("transaction_out_count", sa.INTEGER(), nullable=True),
+ sa.Column("transaction_self_count", sa.INTEGER(), nullable=True),
+ sa.Column("transaction_in_value", sa.BIGINT(), nullable=True),
+ sa.Column("transaction_out_value", sa.BIGINT(), nullable=True),
+ sa.Column("transaction_self_value", sa.BIGINT(), nullable=True),
+ sa.Column("transaction_in_fee", sa.NUMERIC(), nullable=True),
+ sa.Column("transaction_out_fee", sa.NUMERIC(), nullable=True),
+ sa.Column("transaction_self_fee", sa.NUMERIC(), nullable=True),
+ sa.Column("internal_transaction_in_count", sa.INTEGER(), nullable=True),
+ sa.Column("internal_transaction_out_count", sa.INTEGER(), nullable=True),
+ sa.Column("internal_transaction_self_count", sa.INTEGER(), nullable=True),
+ sa.Column("internal_transaction_in_value", sa.BIGINT(), nullable=True),
+ sa.Column("internal_transaction_out_value", sa.BIGINT(), nullable=True),
+ sa.Column("internal_transaction_self_value", sa.BIGINT(), nullable=True),
+ sa.Column("erc20_transfer_in_count", sa.INTEGER(), nullable=True),
+ sa.Column("erc20_transfer_out_count", sa.INTEGER(), nullable=True),
+ sa.Column("erc20_transfer_self_count", sa.INTEGER(), nullable=True),
+ sa.Column("nft_transfer_in_count", sa.INTEGER(), nullable=True),
+ sa.Column("nft_transfer_out_count", sa.INTEGER(), nullable=True),
+ sa.Column("nft_transfer_self_count", sa.INTEGER(), nullable=True),
+ sa.Column("nft_721_transfer_in_count", sa.INTEGER(), nullable=True),
+ sa.Column("nft_721_transfer_out_count", sa.INTEGER(), nullable=True),
+ sa.Column("nft_721_transfer_self_count", sa.INTEGER(), nullable=True),
+ sa.Column("nft_1155_transfer_in_count", sa.INTEGER(), nullable=True),
+ sa.Column("nft_1155_transfer_out_count", sa.INTEGER(), nullable=True),
+ sa.Column("nft_1155_transfer_self_count", sa.INTEGER(), nullable=True),
+ sa.Column("contract_creation_count", sa.INTEGER(), nullable=True),
+ sa.Column("contract_destruction_count", sa.INTEGER(), nullable=True),
+ sa.Column("contract_operation_count", sa.INTEGER(), nullable=True),
+ sa.Column("transaction_count", sa.INTEGER(), nullable=True),
+ sa.Column("internal_transaction_count", sa.INTEGER(), nullable=True),
+ sa.Column("erc20_transfer_count", sa.INTEGER(), nullable=True),
+ sa.Column("nft_transfer_count", sa.INTEGER(), nullable=True),
+ sa.Column("nft_721_transfer_count", sa.INTEGER(), nullable=True),
+ sa.Column("nft_1155_transfer_count", sa.INTEGER(), nullable=True),
+ sa.PrimaryKeyConstraint("address", "block_date"),
+ )
+ op.create_table(
+ "af_index_na_scheduled_metadata",
+ sa.Column("id", sa.INTEGER(), nullable=False),
+ sa.Column("dag_id", sa.VARCHAR(), nullable=True),
+ sa.Column("execution_date", postgresql.TIMESTAMP(), nullable=True),
+ sa.Column("last_data_timestamp", postgresql.TIMESTAMP(), nullable=True),
+ sa.PrimaryKeyConstraint("id"),
+ )
+ op.create_table(
+ "af_index_stats",
+ sa.Column("address", postgresql.BYTEA(), nullable=False),
+ sa.Column("transaction_in_count", sa.INTEGER(), nullable=True),
+ sa.Column("transaction_out_count", sa.INTEGER(), nullable=True),
+ sa.Column("transaction_self_count", sa.INTEGER(), nullable=True),
+ sa.Column("transaction_in_value", sa.NUMERIC(), nullable=True),
+ sa.Column("transaction_out_value", sa.NUMERIC(), nullable=True),
+ sa.Column("transaction_self_value", sa.NUMERIC(), nullable=True),
+ sa.Column("transaction_in_fee", sa.NUMERIC(), nullable=True),
+ sa.Column("transaction_out_fee", sa.NUMERIC(), nullable=True),
+ sa.Column("transaction_self_fee", sa.NUMERIC(), nullable=True),
+ sa.Column("internal_transaction_in_count", sa.INTEGER(), nullable=True),
+ sa.Column("internal_transaction_out_count", sa.INTEGER(), nullable=True),
+ sa.Column("internal_transaction_self_count", sa.INTEGER(), nullable=True),
+ sa.Column("internal_transaction_in_value", sa.NUMERIC(), nullable=True),
+ sa.Column("internal_transaction_out_value", sa.NUMERIC(), nullable=True),
+ sa.Column("internal_transaction_self_value", sa.NUMERIC(), nullable=True),
+ sa.Column("erc20_transfer_in_count", sa.INTEGER(), nullable=True),
+ sa.Column("erc20_transfer_out_count", sa.INTEGER(), nullable=True),
+ sa.Column("erc20_transfer_self_count", sa.INTEGER(), nullable=True),
+ sa.Column("nft_transfer_in_count", sa.INTEGER(), nullable=True),
+ sa.Column("nft_transfer_out_count", sa.INTEGER(), nullable=True),
+ sa.Column("nft_transfer_self_count", sa.INTEGER(), nullable=True),
+ sa.Column("nft_721_transfer_in_count", sa.INTEGER(), nullable=True),
+ sa.Column("nft_721_transfer_out_count", sa.INTEGER(), nullable=True),
+ sa.Column("nft_721_transfer_self_count", sa.INTEGER(), nullable=True),
+ sa.Column("nft_1155_transfer_in_count", sa.INTEGER(), nullable=True),
+ sa.Column("nft_1155_transfer_out_count", sa.INTEGER(), nullable=True),
+ sa.Column("nft_1155_transfer_self_count", sa.INTEGER(), nullable=True),
+ sa.Column("contract_creation_count", sa.INTEGER(), nullable=True),
+ sa.Column("contract_destruction_count", sa.INTEGER(), nullable=True),
+ sa.Column("contract_operation_count", sa.INTEGER(), nullable=True),
+ sa.Column("transaction_count", sa.INTEGER(), nullable=True),
+ sa.Column("internal_transaction_count", sa.INTEGER(), nullable=True),
+ sa.Column("erc20_transfer_count", sa.INTEGER(), nullable=True),
+ sa.Column("nft_transfer_count", sa.INTEGER(), nullable=True),
+ sa.Column("nft_721_transfer_count", sa.INTEGER(), nullable=True),
+ sa.Column("nft_1155_transfer_count", sa.INTEGER(), nullable=True),
+ sa.Column("tag", sa.VARCHAR(), nullable=True),
+ sa.PrimaryKeyConstraint("address"),
+ )
+ op.create_table(
+ "af_index_token_address_daily_stats",
+ sa.Column("address", postgresql.BYTEA(), nullable=False),
+ sa.Column("token_holder_count", sa.INTEGER(), nullable=True),
+ sa.Column("token_transfer_count", sa.INTEGER(), nullable=True),
+ sa.Column("update_time", postgresql.TIMESTAMP(), nullable=True),
+ sa.PrimaryKeyConstraint("address"),
+ )
+ op.create_table(
+ "af_index_token_address_stats",
+ sa.Column("address", postgresql.BYTEA(), nullable=False),
+ sa.Column("token_holder_count", sa.INTEGER(), nullable=True),
+ sa.Column("token_transfer_count", sa.INTEGER(), nullable=True),
+ sa.Column("update_time", postgresql.TIMESTAMP(), nullable=True),
+ sa.PrimaryKeyConstraint("address"),
+ )
+ op.create_table(
+ "af_stats_na_daily_addresses",
+ sa.Column("block_date", sa.DATE(), nullable=False),
+ sa.Column("active_address_cnt", sa.BIGINT(), nullable=True),
+ sa.Column("receiver_address_cnt", sa.BIGINT(), nullable=True),
+ sa.Column("sender_address_cnt", sa.BIGINT(), nullable=True),
+ sa.Column("total_address_cnt", sa.BIGINT(), nullable=True),
+ sa.Column("new_address_cnt", sa.BIGINT(), nullable=True),
+ sa.PrimaryKeyConstraint("block_date"),
+ )
+ op.create_table(
+ "af_stats_na_daily_blocks",
+ sa.Column("block_date", sa.DATE(), nullable=False),
+ sa.Column("cnt", sa.BIGINT(), nullable=True),
+ sa.Column("avg_size", sa.NUMERIC(), nullable=True),
+ sa.Column("avg_gas_limit", sa.NUMERIC(), nullable=True),
+ sa.Column("avg_gas_used", sa.NUMERIC(), nullable=True),
+ sa.Column("total_gas_used", sa.BIGINT(), nullable=True),
+ sa.Column("avg_gas_used_percentage", sa.NUMERIC(), nullable=True),
+ sa.Column("avg_txn_cnt", sa.NUMERIC(), nullable=True),
+ sa.Column("total_cnt", sa.BIGINT(), nullable=True),
+ sa.Column("block_interval", sa.NUMERIC(), nullable=True),
+ sa.PrimaryKeyConstraint("block_date"),
+ )
+ op.create_table(
+ "af_stats_na_daily_bridge_transactions",
+ sa.Column("block_date", sa.DATE(), nullable=False),
+ sa.Column("deposit_cnt", sa.BIGINT(), nullable=True),
+ sa.Column("withdraw_cnt", sa.BIGINT(), nullable=True),
+ sa.PrimaryKeyConstraint("block_date"),
+ )
+ op.create_table(
+ "af_stats_na_daily_tokens",
+ sa.Column("block_date", sa.DATE(), nullable=False),
+ sa.Column("erc20_active_address_cnt", sa.INTEGER(), nullable=True),
+ sa.Column("erc20_total_transfer_cnt", sa.BIGINT(), nullable=True),
+ sa.Column("erc721_active_address_cnt", sa.INTEGER(), nullable=True),
+ sa.Column("erc721_total_transfer_cnt", sa.BIGINT(), nullable=True),
+ sa.Column("erc1155_active_address_cnt", sa.INTEGER(), nullable=True),
+ sa.Column("erc1155_total_transfer_cnt", sa.BIGINT(), nullable=True),
+ sa.PrimaryKeyConstraint("block_date"),
+ )
+ op.create_table(
+ "af_stats_na_daily_transactions",
+ sa.Column("block_date", sa.DATE(), nullable=False),
+ sa.Column("cnt", sa.BIGINT(), nullable=True),
+ sa.Column("total_cnt", sa.BIGINT(), nullable=True),
+ sa.Column("txn_error_cnt", sa.BIGINT(), nullable=True),
+ sa.Column("avg_transaction_fee", sa.NUMERIC(), nullable=True),
+ sa.Column("avg_gas_price", sa.NUMERIC(), nullable=True),
+ sa.Column("max_gas_price", sa.NUMERIC(), nullable=True),
+ sa.Column("min_gas_price", sa.NUMERIC(), nullable=True),
+ sa.Column("avg_receipt_l1_fee", sa.NUMERIC(), nullable=True),
+ sa.Column("max_receipt_l1_fee", sa.NUMERIC(), nullable=True),
+ sa.Column("min_receipt_l1_fee", sa.NUMERIC(), nullable=True),
+ sa.Column("avg_receipt_l1_gas_price", sa.NUMERIC(), nullable=True),
+ sa.Column("max_receipt_l1_gas_price", sa.NUMERIC(), nullable=True),
+ sa.Column("min_receipt_l1_gas_price", sa.NUMERIC(), nullable=True),
+ sa.PrimaryKeyConstraint("block_date"),
+ )
+ op.create_table(
+ "coin_prices",
+ sa.Column("block_date", sa.DateTime(), nullable=False),
+ sa.Column("price", sa.Numeric(), nullable=True),
+ sa.PrimaryKeyConstraint("block_date"),
+ )
+ op.create_table(
+ "scheduled_metadata",
+ sa.Column("id", sa.INTEGER(), nullable=False),
+ sa.Column("dag_id", sa.VARCHAR(), nullable=True),
+ sa.Column("execution_date", sa.DateTime(), nullable=True),
+ sa.Column("last_data_timestamp", sa.DateTime(), nullable=True),
+ sa.PrimaryKeyConstraint("id"),
+ )
+ op.drop_table("daily_wallet_addresses_aggregates")
+ op.drop_table("daily_addresses_aggregates")
+ op.drop_table("daily_blocks_aggregates")
+ op.drop_table("daily_tokens_aggregates")
+ op.drop_table("scheduled_token_count_metadata")
+ op.drop_table("scheduled_wallet_count_metadata")
+ op.drop_table("daily_contract_interacted_aggregates")
+ op.drop_table("daily_transactions_aggregates")
+ op.drop_table("statistics_wallet_addresses")
+ # ### end Alembic commands ###
+
+
+def downgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.create_table(
+ "statistics_wallet_addresses",
+ sa.Column("address", postgresql.BYTEA(), autoincrement=False, nullable=False),
+ sa.Column("txn_in_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column("txn_out_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column("txn_in_value", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True),
+ sa.Column("txn_out_value", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True),
+ sa.Column("internal_txn_in_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column("internal_txn_out_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column("internal_txn_in_value", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True),
+ sa.Column("internal_txn_out_value", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True),
+ sa.Column("erc20_transfer_in_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column("erc721_transfer_in_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column("erc1155_transfer_in_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column("erc20_transfer_out_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column("erc721_transfer_out_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column("erc1155_transfer_out_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column("txn_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column("internal_txn_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column("erc20_transfer_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column("erc721_transfer_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column("erc1155_transfer_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column("deposit_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column("withdraw_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column("tag", sa.VARCHAR(), autoincrement=False, nullable=True),
+ sa.PrimaryKeyConstraint("address", name="statistics_wallet_addresses_pkey"),
+ )
+ op.create_table(
+ "daily_transactions_aggregates",
+ sa.Column("block_date", sa.DATE(), autoincrement=False, nullable=False),
+ sa.Column("cnt", sa.BIGINT(), autoincrement=False, nullable=True),
+ sa.Column("total_cnt", sa.BIGINT(), autoincrement=False, nullable=True),
+ sa.Column("txn_error_cnt", sa.BIGINT(), autoincrement=False, nullable=True),
+ sa.Column("avg_transaction_fee", sa.NUMERIC(), autoincrement=False, nullable=True),
+ sa.Column("avg_gas_price", sa.NUMERIC(), autoincrement=False, nullable=True),
+ sa.Column("max_gas_price", sa.NUMERIC(), autoincrement=False, nullable=True),
+ sa.Column("min_gas_price", sa.NUMERIC(), autoincrement=False, nullable=True),
+ sa.Column("avg_receipt_l1_fee", sa.NUMERIC(), autoincrement=False, nullable=True),
+ sa.Column("max_receipt_l1_fee", sa.NUMERIC(), autoincrement=False, nullable=True),
+ sa.Column("min_receipt_l1_fee", sa.NUMERIC(), autoincrement=False, nullable=True),
+ sa.Column("avg_receipt_l1_gas_used", sa.NUMERIC(), autoincrement=False, nullable=True),
+ sa.Column("max_receipt_l1_gas_used", sa.NUMERIC(), autoincrement=False, nullable=True),
+ sa.Column("min_receipt_l1_gas_used", sa.NUMERIC(), autoincrement=False, nullable=True),
+ sa.Column("avg_receipt_l1_gas_price", sa.NUMERIC(), autoincrement=False, nullable=True),
+ sa.Column("max_receipt_l1_gas_price", sa.NUMERIC(), autoincrement=False, nullable=True),
+ sa.Column("min_receipt_l1_gas_price", sa.NUMERIC(), autoincrement=False, nullable=True),
+ sa.Column("avg_receipt_l1_fee_scalar", sa.NUMERIC(), autoincrement=False, nullable=True),
+ sa.Column("max_receipt_l1_fee_scalar", sa.NUMERIC(), autoincrement=False, nullable=True),
+ sa.Column("min_receipt_l1_fee_scalar", sa.NUMERIC(), autoincrement=False, nullable=True),
+ sa.PrimaryKeyConstraint("block_date", name="daily_transactions_aggregates_pkey"),
+ )
+ op.create_table(
+ "daily_contract_interacted_aggregates",
+ sa.Column("block_date", sa.DATE(), autoincrement=False, nullable=False),
+ sa.Column("from_address", postgresql.BYTEA(), autoincrement=False, nullable=False),
+ sa.Column("to_address", postgresql.BYTEA(), autoincrement=False, nullable=False),
+ sa.Column("contract_interacted_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.PrimaryKeyConstraint(
+ "block_date", "from_address", "to_address", name="daily_contract_interacted_aggregates_pkey"
+ ),
+ )
+ op.create_table(
+ "scheduled_wallet_count_metadata",
+ sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False),
+ sa.Column("dag_id", sa.VARCHAR(), autoincrement=False, nullable=True),
+ sa.Column("execution_date", postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
+ sa.Column("last_data_timestamp", postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
+ sa.PrimaryKeyConstraint("id", name="scheduled_wallet_count_metadata_pkey"),
+ )
+ op.create_table(
+ "scheduled_token_count_metadata",
+ sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False),
+ sa.Column("dag_id", sa.VARCHAR(), autoincrement=False, nullable=True),
+ sa.Column("execution_date", postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
+ sa.Column("last_data_timestamp", postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
+ sa.PrimaryKeyConstraint("id", name="scheduled_token_count_metadata_pkey"),
+ )
+ op.create_table(
+ "daily_tokens_aggregates",
+ sa.Column("block_date", sa.DATE(), autoincrement=False, nullable=False),
+ sa.Column("erc20_active_address_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column("erc20_total_transfer_cnt", sa.BIGINT(), autoincrement=False, nullable=True),
+ sa.Column("erc721_active_address_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column("erc721_total_transfer_cnt", sa.BIGINT(), autoincrement=False, nullable=True),
+ sa.Column("erc1155_active_address_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column("erc1155_total_transfer_cnt", sa.BIGINT(), autoincrement=False, nullable=True),
+ sa.PrimaryKeyConstraint("block_date", name="daily_tokens_aggregates_pkey"),
+ )
+ op.create_table(
+ "daily_blocks_aggregates",
+ sa.Column("block_date", sa.DATE(), autoincrement=False, nullable=False),
+ sa.Column("cnt", sa.BIGINT(), autoincrement=False, nullable=True),
+ sa.Column("avg_size", sa.NUMERIC(), autoincrement=False, nullable=True),
+ sa.Column("avg_gas_limit", sa.NUMERIC(), autoincrement=False, nullable=True),
+ sa.Column("avg_gas_used", sa.NUMERIC(), autoincrement=False, nullable=True),
+ sa.Column("total_gas_used", sa.BIGINT(), autoincrement=False, nullable=True),
+ sa.Column("avg_gas_used_percentage", sa.NUMERIC(), autoincrement=False, nullable=True),
+ sa.Column("avg_txn_cnt", sa.NUMERIC(), autoincrement=False, nullable=True),
+ sa.Column("total_cnt", sa.BIGINT(), autoincrement=False, nullable=True),
+ sa.Column("block_interval", sa.NUMERIC(), autoincrement=False, nullable=True),
+ sa.PrimaryKeyConstraint("block_date", name="daily_blocks_aggregates_pkey"),
+ )
+ op.create_table(
+ "daily_addresses_aggregates",
+ sa.Column("block_date", sa.DATE(), autoincrement=False, nullable=False),
+ sa.Column("active_address_cnt", sa.BIGINT(), autoincrement=False, nullable=True),
+ sa.Column("receiver_address_cnt", sa.BIGINT(), autoincrement=False, nullable=True),
+ sa.Column("sender_address_cnt", sa.BIGINT(), autoincrement=False, nullable=True),
+ sa.Column("total_address_cnt", sa.BIGINT(), autoincrement=False, nullable=True),
+ sa.Column("new_address_cnt", sa.BIGINT(), autoincrement=False, nullable=True),
+ sa.PrimaryKeyConstraint("block_date", name="daily_addresses_aggregates_pkey"),
+ )
+ op.create_table(
+ "daily_wallet_addresses_aggregates",
+ sa.Column("address", postgresql.BYTEA(), autoincrement=False, nullable=False),
+ sa.Column("block_date", sa.DATE(), autoincrement=False, nullable=False),
+ sa.Column("txn_in_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column("txn_out_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column("txn_in_value", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True),
+ sa.Column("txn_out_value", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True),
+ sa.Column("internal_txn_in_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column("internal_txn_out_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column("internal_txn_in_value", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True),
+ sa.Column("internal_txn_out_value", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True),
+ sa.Column("erc20_transfer_in_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column("erc721_transfer_in_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column("erc1155_transfer_in_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column("erc20_transfer_out_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column("erc721_transfer_out_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column("erc1155_transfer_out_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column(
+ "internal_txn_cnt",
+ sa.INTEGER(),
+ sa.Computed("(internal_txn_in_cnt + internal_txn_out_cnt)", persisted=True),
+ autoincrement=False,
+ nullable=True,
+ ),
+ sa.Column(
+ "erc20_transfer_cnt",
+ sa.INTEGER(),
+ sa.Computed("(erc20_transfer_in_cnt + erc20_transfer_out_cnt)", persisted=True),
+ autoincrement=False,
+ nullable=True,
+ ),
+ sa.Column(
+ "erc721_transfer_cnt",
+ sa.INTEGER(),
+ sa.Computed("(erc721_transfer_in_cnt + erc721_transfer_out_cnt)", persisted=True),
+ autoincrement=False,
+ nullable=True,
+ ),
+ sa.Column(
+ "erc1155_transfer_cnt",
+ sa.INTEGER(),
+ sa.Computed("(erc1155_transfer_in_cnt + erc1155_transfer_out_cnt)", persisted=True),
+ autoincrement=False,
+ nullable=True,
+ ),
+ sa.Column("txn_self_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column("txn_in_error_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column("txn_out_error_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column("txn_self_error_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column(
+ "txn_cnt",
+ sa.INTEGER(),
+ sa.Computed("((txn_in_cnt + txn_out_cnt) - txn_self_cnt)", persisted=True),
+ autoincrement=False,
+ nullable=True,
+ ),
+ sa.Column("deposit_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column("withdraw_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column("gas_in_used", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True),
+ sa.Column("l2_txn_in_fee", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True),
+ sa.Column("l1_txn_in_fee", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True),
+ sa.Column("txn_in_fee", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True),
+ sa.Column("gas_out_used", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True),
+ sa.Column("l2_txn_out_fee", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True),
+ sa.Column("l1_txn_out_fee", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True),
+ sa.Column("txn_out_fee", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True),
+ sa.Column("contract_deployed_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column("from_address_unique_interacted_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column("to_address_unique_interacted_cnt", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.PrimaryKeyConstraint("address", "block_date", name="daily_wallet_addresses_aggregates_pkey"),
+ )
+ op.drop_table("scheduled_metadata")
+ op.drop_table("coin_prices")
+ op.drop_table("af_stats_na_daily_transactions")
+ op.drop_table("af_stats_na_daily_tokens")
+ op.drop_table("af_stats_na_daily_bridge_transactions")
+ op.drop_table("af_stats_na_daily_blocks")
+ op.drop_table("af_stats_na_daily_addresses")
+ op.drop_table("af_index_token_address_stats")
+ op.drop_table("af_index_token_address_daily_stats")
+ op.drop_table("af_index_stats")
+ op.drop_table("af_index_na_scheduled_metadata")
+ op.drop_table("af_index_daily_stats")
+ op.drop_index(
+ "feature_erc1155_token_holding_token_wallet_block_desc_index", table_name="af_erc1155_token_holdings_hist"
+ )
+ op.drop_index("feature_erc1155_token_holding_token_block_desc_index", table_name="af_erc1155_token_holdings_hist")
+ op.drop_table("af_erc1155_token_holdings_hist")
+ op.drop_index(
+ "af_erc1155_token_holdings_current_wallet_block_desc_index", table_name="af_erc1155_token_holdings_current"
+ )
+ op.drop_index(
+ "af_erc1155_token_holdings_current_token_block_desc_index", table_name="af_erc1155_token_holdings_current"
+ )
+ op.drop_table("af_erc1155_token_holdings_current")
+ op.drop_index("address_internal_transactions_address_nt_t_idx", table_name="address_internal_transactions")
+ op.drop_table("address_internal_transactions")
+ op.drop_index("address_contract_operations_address_block_tn_t_idx", table_name="address_contract_operations")
+ op.drop_table("address_contract_operations")
+ # ### end Alembic commands ###
diff --git a/migrations/manual_versions/20240704_base_version.sql b/hemera/migrations/manual_versions/20240704_base_version.sql
similarity index 100%
rename from migrations/manual_versions/20240704_base_version.sql
rename to hemera/migrations/manual_versions/20240704_base_version.sql
diff --git a/migrations/manual_versions/20240708_tokens_table_add_column_block_number.sql b/hemera/migrations/manual_versions/20240708_tokens_table_add_column_block_number.sql
similarity index 100%
rename from migrations/manual_versions/20240708_tokens_table_add_column_block_number.sql
rename to hemera/migrations/manual_versions/20240708_tokens_table_add_column_block_number.sql
diff --git a/migrations/manual_versions/20240716_add_api_server_table.sql b/hemera/migrations/manual_versions/20240716_add_api_server_table.sql
similarity index 100%
rename from migrations/manual_versions/20240716_add_api_server_table.sql
rename to hemera/migrations/manual_versions/20240716_add_api_server_table.sql
diff --git a/migrations/manual_versions/20240725_update_index_table_optimize.sql b/hemera/migrations/manual_versions/20240725_update_index_table_optimize.sql
similarity index 100%
rename from migrations/manual_versions/20240725_update_index_table_optimize.sql
rename to hemera/migrations/manual_versions/20240725_update_index_table_optimize.sql
diff --git a/migrations/manual_versions/20240726_modify_sync_record_table.sql b/hemera/migrations/manual_versions/20240726_modify_sync_record_table.sql
similarity index 100%
rename from migrations/manual_versions/20240726_modify_sync_record_table.sql
rename to hemera/migrations/manual_versions/20240726_modify_sync_record_table.sql
diff --git a/migrations/manual_versions/20240731_add_feature_records_and_uniswap_v3_.sql b/hemera/migrations/manual_versions/20240731_add_feature_records_and_uniswap_v3_.sql
similarity index 100%
rename from migrations/manual_versions/20240731_add_feature_records_and_uniswap_v3_.sql
rename to hemera/migrations/manual_versions/20240731_add_feature_records_and_uniswap_v3_.sql
diff --git a/migrations/manual_versions/20240731_add_user_ops_table.sql b/hemera/migrations/manual_versions/20240731_add_user_ops_table.sql
similarity index 100%
rename from migrations/manual_versions/20240731_add_user_ops_table.sql
rename to hemera/migrations/manual_versions/20240731_add_user_ops_table.sql
diff --git a/migrations/manual_versions/20240802_add_exception_recorder_table.sql b/hemera/migrations/manual_versions/20240802_add_exception_recorder_table.sql
similarity index 100%
rename from migrations/manual_versions/20240802_add_exception_recorder_table.sql
rename to hemera/migrations/manual_versions/20240802_add_exception_recorder_table.sql
diff --git a/migrations/manual_versions/20240802_add_l2_chain_table.sql b/hemera/migrations/manual_versions/20240802_add_l2_chain_table.sql
similarity index 100%
rename from migrations/manual_versions/20240802_add_l2_chain_table.sql
rename to hemera/migrations/manual_versions/20240802_add_l2_chain_table.sql
diff --git a/migrations/manual_versions/20240802_add_uniswap_v2_table.sql b/hemera/migrations/manual_versions/20240802_add_uniswap_v2_table.sql
similarity index 100%
rename from migrations/manual_versions/20240802_add_uniswap_v2_table.sql
rename to hemera/migrations/manual_versions/20240802_add_uniswap_v2_table.sql
diff --git a/migrations/manual_versions/20240805_add_column_to_contracts_table.sql b/hemera/migrations/manual_versions/20240805_add_column_to_contracts_table.sql
similarity index 100%
rename from migrations/manual_versions/20240805_add_column_to_contracts_table.sql
rename to hemera/migrations/manual_versions/20240805_add_column_to_contracts_table.sql
diff --git a/migrations/manual_versions/20240806_add_current_traits_activeness.sql b/hemera/migrations/manual_versions/20240806_add_current_traits_activeness.sql
similarity index 100%
rename from migrations/manual_versions/20240806_add_current_traits_activeness.sql
rename to hemera/migrations/manual_versions/20240806_add_current_traits_activeness.sql
diff --git a/migrations/manual_versions/20240808_add_blue_chip_holding.sql b/hemera/migrations/manual_versions/20240808_add_blue_chip_holding.sql
similarity index 100%
rename from migrations/manual_versions/20240808_add_blue_chip_holding.sql
rename to hemera/migrations/manual_versions/20240808_add_blue_chip_holding.sql
diff --git a/migrations/manual_versions/20240813_add_daily_wallet_address_tables.sql b/hemera/migrations/manual_versions/20240813_add_daily_wallet_address_tables.sql
similarity index 100%
rename from migrations/manual_versions/20240813_add_daily_wallet_address_tables.sql
rename to hemera/migrations/manual_versions/20240813_add_daily_wallet_address_tables.sql
diff --git a/migrations/manual_versions/20240827_add_token_price_table.sql b/hemera/migrations/manual_versions/20240827_add_token_price_table.sql
similarity index 100%
rename from migrations/manual_versions/20240827_add_token_price_table.sql
rename to hemera/migrations/manual_versions/20240827_add_token_price_table.sql
diff --git a/migrations/manual_versions/20240830_add_address_token_deposit_table.sql b/hemera/migrations/manual_versions/20240830_add_address_token_deposit_table.sql
similarity index 100%
rename from migrations/manual_versions/20240830_add_address_token_deposit_table.sql
rename to hemera/migrations/manual_versions/20240830_add_address_token_deposit_table.sql
diff --git a/migrations/manual_versions/20240831_add_ens.sql b/hemera/migrations/manual_versions/20240831_add_ens.sql
similarity index 100%
rename from migrations/manual_versions/20240831_add_ens.sql
rename to hemera/migrations/manual_versions/20240831_add_ens.sql
diff --git a/migrations/manual_versions/20240906_add_uniswap_v3_enhance_table.sql b/hemera/migrations/manual_versions/20240906_add_uniswap_v3_enhance_table.sql
similarity index 100%
rename from migrations/manual_versions/20240906_add_uniswap_v3_enhance_table.sql
rename to hemera/migrations/manual_versions/20240906_add_uniswap_v3_enhance_table.sql
diff --git a/migrations/manual_versions/20240910_add_address_index.sql b/hemera/migrations/manual_versions/20240910_add_address_index.sql
similarity index 100%
rename from migrations/manual_versions/20240910_add_address_index.sql
rename to hemera/migrations/manual_versions/20240910_add_address_index.sql
diff --git a/migrations/manual_versions/20240911_add_opensea.sql b/hemera/migrations/manual_versions/20240911_add_opensea.sql
similarity index 100%
rename from migrations/manual_versions/20240911_add_opensea.sql
rename to hemera/migrations/manual_versions/20240911_add_opensea.sql
diff --git a/migrations/manual_versions/20240912_add_merchant_and_uniswap_daily_table.sql b/hemera/migrations/manual_versions/20240912_add_merchant_and_uniswap_daily_table.sql
similarity index 100%
rename from migrations/manual_versions/20240912_add_merchant_and_uniswap_daily_table.sql
rename to hemera/migrations/manual_versions/20240912_add_merchant_and_uniswap_daily_table.sql
diff --git a/migrations/manual_versions/20240927_add_merchant_moe_table.sql b/hemera/migrations/manual_versions/20240927_add_merchant_moe_table.sql
similarity index 100%
rename from migrations/manual_versions/20240927_add_merchant_moe_table.sql
rename to hemera/migrations/manual_versions/20240927_add_merchant_moe_table.sql
diff --git a/migrations/manual_versions/20241017_earlier_table_change.sql b/hemera/migrations/manual_versions/20241017_earlier_table_change.sql
similarity index 100%
rename from migrations/manual_versions/20241017_earlier_table_change.sql
rename to hemera/migrations/manual_versions/20241017_earlier_table_change.sql
diff --git a/migrations/manual_versions/20241105_add_address_index_and_stats.sql b/hemera/migrations/manual_versions/20241105_add_address_index_and_stats.sql
similarity index 100%
rename from migrations/manual_versions/20241105_add_address_index_and_stats.sql
rename to hemera/migrations/manual_versions/20241105_add_address_index_and_stats.sql
diff --git a/migrations/manual_versions/20241121_add_failure_records_table.sql b/hemera/migrations/manual_versions/20241121_add_failure_records_table.sql
similarity index 100%
rename from migrations/manual_versions/20241121_add_failure_records_table.sql
rename to hemera/migrations/manual_versions/20241121_add_failure_records_table.sql
diff --git a/migrations/manual_versions/20241128_update_table_for_0.6.0.sql b/hemera/migrations/manual_versions/20241128_update_table_for_0.6.0.sql
similarity index 100%
rename from migrations/manual_versions/20241128_update_table_for_0.6.0.sql
rename to hemera/migrations/manual_versions/20241128_update_table_for_0.6.0.sql
diff --git a/migrations/script.py.mako b/hemera/migrations/script.py.mako
similarity index 100%
rename from migrations/script.py.mako
rename to hemera/migrations/script.py.mako
diff --git a/migrations/versions/20240704_base_version.py b/hemera/migrations/versions/20240704_base_version.py
similarity index 100%
rename from migrations/versions/20240704_base_version.py
rename to hemera/migrations/versions/20240704_base_version.py
diff --git a/migrations/versions/20240708_tokens_table_add_column_block_number.py b/hemera/migrations/versions/20240708_tokens_table_add_column_block_number.py
similarity index 100%
rename from migrations/versions/20240708_tokens_table_add_column_block_number.py
rename to hemera/migrations/versions/20240708_tokens_table_add_column_block_number.py
diff --git a/migrations/versions/20240716_add_api_server_table.py b/hemera/migrations/versions/20240716_add_api_server_table.py
similarity index 100%
rename from migrations/versions/20240716_add_api_server_table.py
rename to hemera/migrations/versions/20240716_add_api_server_table.py
diff --git a/migrations/versions/20240725_update_index_table_optimize.py b/hemera/migrations/versions/20240725_update_index_table_optimize.py
similarity index 100%
rename from migrations/versions/20240725_update_index_table_optimize.py
rename to hemera/migrations/versions/20240725_update_index_table_optimize.py
diff --git a/migrations/versions/20240726_modify_sync_record_table.py b/hemera/migrations/versions/20240726_modify_sync_record_table.py
similarity index 100%
rename from migrations/versions/20240726_modify_sync_record_table.py
rename to hemera/migrations/versions/20240726_modify_sync_record_table.py
diff --git a/migrations/versions/20240731_add_feature_records_and_uniswap_v3_.py b/hemera/migrations/versions/20240731_add_feature_records_and_uniswap_v3_.py
similarity index 100%
rename from migrations/versions/20240731_add_feature_records_and_uniswap_v3_.py
rename to hemera/migrations/versions/20240731_add_feature_records_and_uniswap_v3_.py
diff --git a/migrations/versions/20240731_add_user_ops_table.py b/hemera/migrations/versions/20240731_add_user_ops_table.py
similarity index 100%
rename from migrations/versions/20240731_add_user_ops_table.py
rename to hemera/migrations/versions/20240731_add_user_ops_table.py
diff --git a/migrations/versions/20240802_add_exception_recorder_table.py b/hemera/migrations/versions/20240802_add_exception_recorder_table.py
similarity index 100%
rename from migrations/versions/20240802_add_exception_recorder_table.py
rename to hemera/migrations/versions/20240802_add_exception_recorder_table.py
diff --git a/migrations/versions/20240802_add_l2_chain_table.py b/hemera/migrations/versions/20240802_add_l2_chain_table.py
similarity index 100%
rename from migrations/versions/20240802_add_l2_chain_table.py
rename to hemera/migrations/versions/20240802_add_l2_chain_table.py
diff --git a/migrations/versions/20240802_add_uniswap_v2_table.py b/hemera/migrations/versions/20240802_add_uniswap_v2_table.py
similarity index 100%
rename from migrations/versions/20240802_add_uniswap_v2_table.py
rename to hemera/migrations/versions/20240802_add_uniswap_v2_table.py
diff --git a/migrations/versions/20240805_add_column_to_contracts_table.py b/hemera/migrations/versions/20240805_add_column_to_contracts_table.py
similarity index 100%
rename from migrations/versions/20240805_add_column_to_contracts_table.py
rename to hemera/migrations/versions/20240805_add_column_to_contracts_table.py
diff --git a/migrations/versions/20240806_add_current_traits_activeness.py b/hemera/migrations/versions/20240806_add_current_traits_activeness.py
similarity index 100%
rename from migrations/versions/20240806_add_current_traits_activeness.py
rename to hemera/migrations/versions/20240806_add_current_traits_activeness.py
diff --git a/migrations/versions/20240808_add_blue_chip_holding.py b/hemera/migrations/versions/20240808_add_blue_chip_holding.py
similarity index 100%
rename from migrations/versions/20240808_add_blue_chip_holding.py
rename to hemera/migrations/versions/20240808_add_blue_chip_holding.py
diff --git a/migrations/versions/20240813_add_daily_wallet_address_tables.py b/hemera/migrations/versions/20240813_add_daily_wallet_address_tables.py
similarity index 100%
rename from migrations/versions/20240813_add_daily_wallet_address_tables.py
rename to hemera/migrations/versions/20240813_add_daily_wallet_address_tables.py
diff --git a/migrations/versions/20240827_add_token_price_table.py b/hemera/migrations/versions/20240827_add_token_price_table.py
similarity index 100%
rename from migrations/versions/20240827_add_token_price_table.py
rename to hemera/migrations/versions/20240827_add_token_price_table.py
diff --git a/migrations/versions/20240830_add_address_token_deposit_table.py b/hemera/migrations/versions/20240830_add_address_token_deposit_table.py
similarity index 100%
rename from migrations/versions/20240830_add_address_token_deposit_table.py
rename to hemera/migrations/versions/20240830_add_address_token_deposit_table.py
diff --git a/migrations/versions/20240831_add_ens.py b/hemera/migrations/versions/20240831_add_ens.py
similarity index 100%
rename from migrations/versions/20240831_add_ens.py
rename to hemera/migrations/versions/20240831_add_ens.py
diff --git a/migrations/versions/20240906_add_uniswap_v3_enhance_table.py b/hemera/migrations/versions/20240906_add_uniswap_v3_enhance_table.py
similarity index 100%
rename from migrations/versions/20240906_add_uniswap_v3_enhance_table.py
rename to hemera/migrations/versions/20240906_add_uniswap_v3_enhance_table.py
diff --git a/migrations/versions/20240910_add_address_index.py b/hemera/migrations/versions/20240910_add_address_index.py
similarity index 100%
rename from migrations/versions/20240910_add_address_index.py
rename to hemera/migrations/versions/20240910_add_address_index.py
diff --git a/migrations/versions/20240911_add_opensea.py b/hemera/migrations/versions/20240911_add_opensea.py
similarity index 100%
rename from migrations/versions/20240911_add_opensea.py
rename to hemera/migrations/versions/20240911_add_opensea.py
diff --git a/migrations/versions/20240912_add_merchant_and_uniswap_daily_table.py b/hemera/migrations/versions/20240912_add_merchant_and_uniswap_daily_table.py
similarity index 100%
rename from migrations/versions/20240912_add_merchant_and_uniswap_daily_table.py
rename to hemera/migrations/versions/20240912_add_merchant_and_uniswap_daily_table.py
diff --git a/migrations/versions/20240927_add_merchant_moe_table.py b/hemera/migrations/versions/20240927_add_merchant_moe_table.py
similarity index 100%
rename from migrations/versions/20240927_add_merchant_moe_table.py
rename to hemera/migrations/versions/20240927_add_merchant_moe_table.py
diff --git a/migrations/versions/20241017_earlier_table_change.py b/hemera/migrations/versions/20241017_earlier_table_change.py
similarity index 100%
rename from migrations/versions/20241017_earlier_table_change.py
rename to hemera/migrations/versions/20241017_earlier_table_change.py
diff --git a/migrations/versions/20241105_add_address_index_and_stats.py b/hemera/migrations/versions/20241105_add_address_index_and_stats.py
similarity index 100%
rename from migrations/versions/20241105_add_address_index_and_stats.py
rename to hemera/migrations/versions/20241105_add_address_index_and_stats.py
diff --git a/migrations/versions/20241121_add_failure_records_table.py b/hemera/migrations/versions/20241121_add_failure_records_table.py
similarity index 100%
rename from migrations/versions/20241121_add_failure_records_table.py
rename to hemera/migrations/versions/20241121_add_failure_records_table.py
diff --git a/migrations/versions/20241128_update_table_for_0.6.0.py b/hemera/migrations/versions/20241128_update_table_for_0.6.0.py
similarity index 100%
rename from migrations/versions/20241128_update_table_for_0.6.0.py
rename to hemera/migrations/versions/20241128_update_table_for_0.6.0.py
diff --git a/hemera/migrations/versions/20250214_sync_1_0_0.py b/hemera/migrations/versions/20250214_sync_1_0_0.py
new file mode 100644
index 000000000..4df1671b1
--- /dev/null
+++ b/hemera/migrations/versions/20250214_sync_1_0_0.py
@@ -0,0 +1,79 @@
+"""sync 1.0.0
+
+Revision ID: ab4e180ea7a0
+Revises: 3c7ea7b95dc5
+Create Date: 2025-02-14 11:27:59.209992
+
+"""
+
+from typing import Sequence, Union
+
+import sqlalchemy as sa
+from alembic import op
+from sqlalchemy import ARRAY, BIGINT, BOOLEAN, INTEGER, NUMERIC, TIMESTAMP, VARCHAR
+from sqlalchemy.dialects.postgresql import BYTEA
+
+# revision identifiers, used by Alembic.
+revision: str = "ab4e180ea7a0"
+down_revision: Union[str, None] = "3c7ea7b95dc5"
+branch_labels: Union[str, Sequence[str], None] = None
+depends_on: Union[str, Sequence[str], None] = None
+
+
+def upgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.create_table(
+ "nft_transfers",
+ sa.Column("transaction_hash", BYTEA, nullable=False),
+ sa.Column("block_hash", BYTEA, nullable=False),
+ sa.Column("log_index", sa.Integer(), nullable=False),
+ sa.Column("token_id", NUMERIC(78, 0), nullable=False),
+ sa.Column("from_address", BYTEA, nullable=True),
+ sa.Column("to_address", BYTEA, nullable=True),
+ sa.Column("token_address", BYTEA, nullable=True),
+ sa.Column("value", NUMERIC(100, 0), nullable=True),
+ sa.Column("block_number", BIGINT, nullable=False),
+ sa.Column("block_timestamp", TIMESTAMP(timezone=False), nullable=False),
+ sa.Column("create_time", TIMESTAMP(timezone=False), nullable=True),
+ sa.Column("update_time", TIMESTAMP(timezone=False), nullable=True),
+ sa.Column("reorg", sa.Boolean(), nullable=True),
+ sa.PrimaryKeyConstraint(
+ "transaction_hash",
+ "block_timestamp",
+ "block_number",
+ "log_index",
+ "block_hash",
+ "token_id",
+ name="nft_transfers_pkey",
+ ),
+ )
+ op.add_column("tokens", sa.Column("no_balance_of", BOOLEAN(), nullable=True, default=False))
+ op.add_column("tokens", sa.Column("fail_balance_of_count", INTEGER(), nullable=True, default=0))
+ op.add_column("tokens", sa.Column("no_total_supply", BOOLEAN(), nullable=True, default=False))
+ op.add_column("tokens", sa.Column("fail_total_supply_count", INTEGER(), nullable=True, default=0))
+ op.add_column("tokens", sa.Column("tags", ARRAY(VARCHAR()), nullable=True))
+ op.add_column("tokens", sa.Column("succeed_balance_of_count", INTEGER(), nullable=True, default=0))
+ op.drop_constraint("address_token_balances_pkey", "address_token_balances", type_="primary")
+ op.create_primary_key(
+ "address_token_balances_pkey",
+ "address_token_balances",
+ ["address", "token_address", "token_id", "block_number", "block_timestamp"],
+ )
+ # ### end Alembic commands ###
+
+
+def downgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_table("nft_transfers")
+ op.drop_column("tokens", "no_balance_of")
+ op.drop_column("tokens", "fail_balance_of_count")
+ op.drop_column("tokens", "no_total_supply")
+ op.drop_column("tokens", "fail_total_supply_count")
+ op.drop_column("tokens", "tags")
+ op.drop_column("tokens", "succeed_balance_of_count")
+ op.drop_constraint("address_token_balances_pkey", "address_token_balances", type_="primary")
+ op.create_primary_key(
+ "address_token_balances_pkey",
+ "address_token_balances",
+ ["address", "token_address", "token_id", "block_number"],
+ )
diff --git a/resource/hemera.ini.example b/hemera/resource/hemera.ini.example
similarity index 100%
rename from resource/hemera.ini.example
rename to hemera/resource/hemera.ini.example
diff --git a/hemera/resource/template/custom_api_namespace.example b/hemera/resource/template/custom_api_namespace.example
new file mode 100644
index 000000000..21143728a
--- /dev/null
+++ b/hemera/resource/template/custom_api_namespace.example
@@ -0,0 +1,7 @@
+from flask_restx.namespace import Namespace
+
+${job}_namespace = Namespace(
+ "${job_descript} Namespace",
+ path="/",
+ description="${job_descript} API",
+)
diff --git a/hemera/resource/template/custom_domain.example b/hemera/resource/template/custom_domain.example
new file mode 100644
index 000000000..bf3f22383
--- /dev/null
+++ b/hemera/resource/template/custom_domain.example
@@ -0,0 +1,26 @@
+from dataclasses import dataclass
+
+from hemera.indexer.domains import Domain
+
+
+@dataclass
+class AddressFeatureValueCurrent(Domain):
+ address: str
+ feature_value: int
+ block_number: int
+ block_timestamp: int
+
+
+@dataclass
+class AddressFeatureValueHistory(Domain):
+ address: str
+ feature_value: int
+ block_number: int
+ block_timestamp: int
+
+@dataclass AddressFeatureEvent(Domain):
+ transaction_hash: str
+ address: str
+ event_value: int
+ block_number: int
+ block_timestamp: int
diff --git a/hemera/resource/template/custom_init.example b/hemera/resource/template/custom_init.example
new file mode 100644
index 000000000..daec669d6
--- /dev/null
+++ b/hemera/resource/template/custom_init.example
@@ -0,0 +1,18 @@
+from __future__ import annotations
+
+import packaging.version
+
+from hemera import __version__ as hemera_version
+
+__all__ = ["__version__"]
+
+__version__ = "0.1.0"
+
+from hemera.common.enumeration.entity_type import DynamicEntityTypeRegistry
+from hemera_udf.${job_name}.domains import *
+
+if packaging.version.parse(packaging.version.parse(hemera_version).base_version) < packaging.version.parse("1.0.0"):
+ raise RuntimeError(f"The package `hemera-modules-xxx:{__version__}` needs Hemera 1.0.0+")
+
+value = DynamicEntityTypeRegistry.register("${entity_name}")
+DynamicEntityTypeRegistry.register_output_types(value, {CustomDomainA, CustomDomainB})
diff --git a/hemera/resource/template/custom_module.example b/hemera/resource/template/custom_module.example
new file mode 100644
index 000000000..d9c5e7f45
--- /dev/null
+++ b/hemera/resource/template/custom_module.example
@@ -0,0 +1,101 @@
+from typing import Type, Union
+
+from sqlalchemy import Column, Index, desc, func, text
+from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, NUMERIC, TIMESTAMP
+
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.${job}.domains import AddressFeatureValueCurrent, AddressFeatureValueHistory, AddressFeatureEvent
+
+
+class AddressFeatureValuesCurrent(HemeraModel):
+ __tablename__ = "address_feature_values_current"
+ address = Column(BYTEA, primary_key=True)
+ feature_value = Column(NUMERIC)
+
+ block_number = Column(BIGINT)
+ block_timestamp = Column(TIMESTAMP)
+
+ create_time = Column(TIMESTAMP, server_default=func.now())
+ update_time = Column(TIMESTAMP, server_default=func.now())
+
+ @staticmethod
+ def model_domain_mapping():
+ return [
+ {
+ "domain": AddressFeatureValueCurrent,
+ "conflict_do_update": True,
+ "update_strategy": "EXCLUDED.block_number > address_feature_values_current.block_number",
+ "converter": general_converter,
+ },
+ ]
+
+
+class AddressFeatureValuesHistory(HemeraModel):
+ __tablename__ = "address_feature_values_history"
+ address = Column(BYTEA, primary_key=True)
+ feature_value = Column(NUMERIC)
+
+ block_number = Column(BIGINT, primary_key=True)
+ block_timestamp = Column(TIMESTAMP)
+
+ create_time = Column(TIMESTAMP, server_default=func.now())
+ update_time = Column(TIMESTAMP, server_default=func.now())
+
+ # If the custom job supports reorg,
+ # the data table that stores the data must contain a reorg field.
+ reorg = Column(BOOLEAN, server_default=text("false"))
+
+ __table_args__ = (PrimaryKeyConstraint("address", "block_number"),)
+
+ @staticmethod
+ def model_domain_mapping():
+ return [
+ {
+ "domain": AddressFeatureValueHistory,
+ "conflict_do_update": False,
+ "update_strategy": None,
+ "converter": general_converter,
+ }
+ ]
+
+
+class AddressFeatureEvents(HemeraModel):
+ __tablename__ = "address_feature_events"
+ transaction_hash = Column(BYTEA, primary_key=True)
+ address = Column(BYTEA)
+ event_value = Column(NUMERIC)
+
+ block_number = Column(BIGINT, primary_key=True)
+ block_timestamp = Column(TIMESTAMP)
+
+ create_time = Column(TIMESTAMP, server_default=func.now())
+ update_time = Column(TIMESTAMP, server_default=func.now())
+
+ # If the custom job supports reorg,
+ # the data table that stores the data must contain a reorg field.
+ reorg = Column(BOOLEAN, server_default=text("false"))
+
+ __table_args__ = (PrimaryKeyConstraint("transaction_hash", "block_number"),)
+
+ @staticmethod
+ def model_domain_mapping():
+ return [
+ {
+ "domain": AddressFeatureValueEvent,
+ "conflict_do_update": False,
+ "update_strategy": None,
+ "converter": general_converter,
+ }
+ ]
+
+Index("address_feature_events_address_index", AddressFeatureEvents.address)
+
+# This is a custom data conversion function. Usually you can use 'general_converter' directly.
+def converter(
+ table: Type[HemeraModel],
+ data: Union[CustomDomainA],
+ is_update=False,
+):
+ # Special data conversion logic here
+ converted_data = do_some_work(table, data, is_update)
+ return converted_data
diff --git a/hemera/resource/template/export_custom_job.example b/hemera/resource/template/export_custom_job.example
new file mode 100644
index 000000000..575bf7994
--- /dev/null
+++ b/hemera/resource/template/export_custom_job.example
@@ -0,0 +1,52 @@
+from typing import List, Union
+
+from hemera.indexer.jobs.base_job import ExtensionJob, Collector
+from hemera.indexer.utils.collection_utils import distinct_collections_by_group
+from hemera_udf.${job}.domains import *
+
+# Also could inherit from FilterTransactionDataJob, like: class Export${job_name}Job(FilterTransactionDataJob):
+class Export${job_name}Job(ExtensionJob):
+ # If the data processing logic of the developed job supports re-running with on-chain data reorg,
+ # set able_to_reorg to True so that the scheduler can automatically start the job during the reorg process.
+ able_to_reorg = False
+
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+
+ # If the data you need to process is only part of the contract log, event, or a specific type of transaction,
+ # you can inherit the custom class from FilterTransactionDataJob instead of ExtensionJob, and implement the self.get_filter method.
+ # This will speed up the efficiency of the previous task and only index some relevant data.
+ def get_filter(self):
+
+ filter = [
+ TransactionFilterByTransactionInfo(
+ ToAddressSpecification(address=contract),
+ FromAddressSpecification(address=contract),
+ FuncSignSpecification(func_sign=ERC20_BALANCE_OF_FUNCTION.get_signature()),
+ ),
+ TransactionFilterByLogs([TopicSpecification(addresses=addresses, topics=topics)]),
+ ]
+
+ return filter
+
+ def _udf(self, transactions: List[Transaction], logs: List[Log], output: Collector[Union[AddressFeatureEvent, AddressFeatureValueHistory, AddressFeatureValueCurrent]]):
+ AddressFeatureEvent, AddressFeatureValueHistory = some_work_here(transactions, logs)
+
+ AddressFeatureValueCurrent = some_distinct_logic(AddressFeatureEvent)
+
+ output.collects(AddressFeatureEvent)
+ output.collects(AddressFeatureValueHistory)
+ output.collects(AddressFeatureValueCurrent)
+
+ # more collect example:
+ # for item in job_custom_domain_a_list:
+ # output.collect(item)
+ #
+ # for item in job_custom_domain_a_list:
+ # output.collect_domain(item)
+ #
+ # for item in job_custom_domain_a_list:
+ # output.collect_item(item.type(), item)
+ #
+ # output.collect_domains(job_custom_domain_a_list)
+ # output.collect_items(job_custom_domain_a_list[0].type(), job_custom_domain_a_list)
diff --git a/hemera/version.py b/hemera/version.py
new file mode 100644
index 000000000..2a242f969
--- /dev/null
+++ b/hemera/version.py
@@ -0,0 +1,5 @@
+from __future__ import annotations
+
+from hemera import __version__ as version
+
+__all__ = ["version"]
diff --git a/indexer/modules/custom/README.md b/hemera_udf/README.md
similarity index 85%
rename from indexer/modules/custom/README.md
rename to hemera_udf/README.md
index 223ebd795..96dc13214 100644
--- a/indexer/modules/custom/README.md
+++ b/hemera_udf/README.md
@@ -17,7 +17,5 @@
1. Entities in the `models` directory relate to the database and require setting primary keys, indexes, etc.
The `model_domain_mapping` maps the `dataclass` entities to database tables and includes settings for update
strategies.
-2. Entities in the `domain` directory inherit from either `Domain` or `FilterData`, depending on whether your job
- requires preliminary filtering.
-3. The output within the `job` is handled through the method `self._collect_item`, where each entity instance is added
+2. The output within the `job` is handled through the method `self._collect_item`, where each entity instance is added
one by one.
diff --git a/hemera_udf/__init__.py b/hemera_udf/__init__.py
new file mode 100644
index 000000000..6e45a91db
--- /dev/null
+++ b/hemera_udf/__init__.py
@@ -0,0 +1 @@
+__version__ = "1.0.0a1"
diff --git a/hemera_udf/aave_v2/__init__.py b/hemera_udf/aave_v2/__init__.py
new file mode 100644
index 000000000..590884a24
--- /dev/null
+++ b/hemera_udf/aave_v2/__init__.py
@@ -0,0 +1,49 @@
+from __future__ import annotations
+
+import packaging.version
+
+from hemera import __version__ as hemera_version
+
+__all__ = ["__version__"]
+
+__version__ = "0.1.0"
+
+from hemera.common.enumeration.entity_type import DynamicEntityTypeRegistry
+from hemera_udf.aave_v2.domains.aave_v2_domain import (
+ AaveV2AddressCurrentD,
+ AaveV2BorrowD,
+ AaveV2CallRecordsD,
+ AaveV2DepositD,
+ AaveV2FlashLoanD,
+ AaveV2LiquidationAddressCurrentD,
+ AaveV2LiquidationCallD,
+ AaveV2RepayD,
+ AaveV2ReserveD,
+ AaveV2ReserveDataCurrentD,
+ AaveV2ReserveDataD,
+ AaveV2TransferD,
+ AaveV2WithdrawD,
+)
+
+if packaging.version.parse(packaging.version.parse(hemera_version).base_version) < packaging.version.parse("1.0.0"):
+ raise RuntimeError(f"The package `hemera-modules-xxx:{__version__}` needs Hemera 1.0.0+")
+
+value = DynamicEntityTypeRegistry.register("AAVE_V2")
+DynamicEntityTypeRegistry.register_output_types(
+ value,
+ {
+ AaveV2ReserveD,
+ AaveV2DepositD,
+ AaveV2WithdrawD,
+ AaveV2BorrowD,
+ AaveV2RepayD,
+ AaveV2FlashLoanD,
+ AaveV2LiquidationCallD,
+ AaveV2AddressCurrentD,
+ AaveV2LiquidationAddressCurrentD,
+ AaveV2CallRecordsD,
+ AaveV2ReserveDataD,
+ AaveV2ReserveDataCurrentD,
+ AaveV2TransferD,
+ },
+)
diff --git a/hemera_udf/aave_v2/aave_v2_processors.py b/hemera_udf/aave_v2/aave_v2_processors.py
new file mode 100644
index 000000000..d6e19a618
--- /dev/null
+++ b/hemera_udf/aave_v2/aave_v2_processors.py
@@ -0,0 +1,199 @@
+import logging
+from abc import ABC, abstractmethod
+from typing import Any, Type, TypeVar
+
+from hemera.common.utils.abi_code_utils import Event
+from hemera.common.utils.web3_utils import extract_eth_address
+from hemera.indexer.utils.multicall_hemera import Call
+from hemera_udf.aave_v2.abi.abi import DECIMALS_FUNCTIOIN, SYMBOL_FUNCTIOIN
+
+logger = logging.getLogger(__name__)
+
+T = TypeVar("T")
+
+
+class EventProcessor(ABC):
+ """Abstract base processor for handling different event types"""
+
+ def __init__(self, event: Event, data_class: Type[T], multicall_helper=None):
+ self.event = event
+ self.data_class = data_class
+ self.multicall_helper = multicall_helper
+
+ def process(self, log: Any) -> T:
+ """Process log data with common field handling and custom processing"""
+ try:
+ decoded_log = self.event.decode_log(log)
+ common_fields = self._extract_common_fields(log, self.event)
+ specific_fields = self._process_specific_fields(log, decoded_log)
+ return self.data_class(**common_fields, **specific_fields)
+ except Exception as e:
+ logger.error(f"Error processing {self.data_class.__name__}: {str(e)}")
+ raise
+
+ def _extract_common_fields(self, log: Any, event: Any) -> dict:
+ """Extract common fields present in all events"""
+ return {
+ "block_number": log.block_number,
+ "block_timestamp": log.block_timestamp,
+ "transaction_hash": log.transaction_hash,
+ "log_index": log.log_index,
+ "topic0": getattr(log, "topic0", None),
+ "event_name": getattr(event, "get_name", lambda: None)(),
+ }
+
+ @abstractmethod
+ def _process_specific_fields(self, log: Any, decoded_log: Any) -> dict:
+ """Process event-specific fields - to be implemented by concrete processors"""
+ pass
+
+
+class ReserveInitProcessor(EventProcessor):
+ """0x3a0ca721fc364424566385a1aa271ed508cc2c0949c2272575fb3013a163a45f"""
+
+ def _get_token_info(self, address: str) -> dict:
+ decimals_call = Call(target=address, function_abi=DECIMALS_FUNCTIOIN, block_number="latest")
+ symbol_call = Call(target=address, function_abi=SYMBOL_FUNCTIOIN, block_number="latest")
+ self.multicall_helper.execute_calls([decimals_call, symbol_call])
+ return {"decimals": decimals_call.returns["decimals"], "symbol": symbol_call.returns["symbol"]}
+
+ def _process_specific_fields(self, log: Any, decoded_log: Any) -> dict:
+ asset = extract_eth_address(log.topic1)
+ if asset == "0x9f8f72aa9304c8b593d555f12ef6589cc3a579a2":
+ asset_info = {
+ "symbol": "MKR",
+ "decimals": 18,
+ }
+ else:
+ asset_info = self._get_token_info(asset)
+
+ a_token = extract_eth_address(log.topic2)
+ a_token_info = self._get_token_info(a_token)
+
+ stable_debt_token = decoded_log.get("stableDebtToken")
+ stable_debt_info = self._get_token_info(stable_debt_token)
+
+ variable_debt_token = decoded_log.get("variableDebtToken")
+ variable_debt_info = self._get_token_info(variable_debt_token)
+
+ return {
+ "asset": asset,
+ "asset_symbol": asset_info["symbol"],
+ "asset_decimals": asset_info["decimals"],
+ "a_token_address": a_token,
+ "a_token_symbol": a_token_info["symbol"],
+ "a_token_decimals": a_token_info["decimals"],
+ "stable_debt_token_address": stable_debt_token,
+ "stable_debt_token_decimals": stable_debt_info["decimals"],
+ "stable_debt_token_symbol": stable_debt_info["symbol"],
+ "variable_debt_token_address": variable_debt_token,
+ "variable_debt_token_symbol": variable_debt_info["symbol"],
+ "variable_debt_token_decimals": variable_debt_info["decimals"],
+ "interest_rate_strategy_address": decoded_log.get("interestRateStrategyAddress"),
+ }
+
+
+class DepositProcessor(EventProcessor):
+ """0xde6857219544bb5b7746f48ed30be6386fefc61b2f864cacf559893bf50fd951"""
+
+ def _process_specific_fields(self, log: Any, decoded_log: Any) -> dict:
+ return {
+ "reserve": extract_eth_address(log.topic1),
+ "on_behalf_of": extract_eth_address(log.topic2),
+ "referral": log.topic3,
+ "aave_user": decoded_log.get("user"),
+ "amount": decoded_log.get("amount"),
+ }
+
+
+class WithdrawProcessor(EventProcessor):
+ """0x3115d1449a7b732c986cba18244e897a450f61e1bb8d589cd2e69e6c8924f9f7"""
+
+ def _process_specific_fields(self, log: Any, decoded_log: Any) -> dict:
+ return {
+ "reserve": extract_eth_address(log.topic1),
+ "aave_user": extract_eth_address(log.topic2),
+ "amount": decoded_log.get("amount"),
+ }
+
+
+class BorrowProcessor(EventProcessor):
+ """0xc6a898309e823ee50bac64e45ca8adba6690e99e7841c45d754e2a38e9019d9b"""
+
+ def _process_specific_fields(self, log: Any, decoded_log: Any) -> dict:
+ return {
+ "reserve": extract_eth_address(log.topic1),
+ "on_behalf_of": extract_eth_address(log.topic2),
+ "referral": log.topic3,
+ "aave_user": decoded_log.get("user"),
+ "amount": decoded_log.get("amount"),
+ "borrow_rate_mode": decoded_log.get("borrowRateMode"),
+ "borrow_rate": decoded_log.get("borrowRate"),
+ }
+
+
+class RepayProcessor(EventProcessor):
+ """0x4cdde6e09bb755c9a5589ebaec640bbfedff1362d4b255ebf8339782b9942faa"""
+
+ def _process_specific_fields(self, log: Any, decoded_log: Any) -> dict:
+ return {
+ "reserve": extract_eth_address(log.topic1),
+ "aave_user": extract_eth_address(log.topic2),
+ "repayer": extract_eth_address(log.topic3),
+ "amount": decoded_log.get("amount"),
+ }
+
+
+class FlashLoanProcessor(EventProcessor):
+ """0x631042c832b07452973831137f2d73e395028b44b250dedc5abb0ee766e168ac"""
+
+ def _process_specific_fields(self, log: Any, decoded_log: Any) -> dict:
+ return {
+ "target": extract_eth_address(log.topic1),
+ "aave_user": extract_eth_address(log.topic2),
+ "reserve": extract_eth_address(log.topic3),
+ "amount": decoded_log.get("amount"),
+ "premium": decoded_log.get("premium"),
+ "referral": decoded_log.get("referralCode"),
+ }
+
+
+class LiquidationCallProcessor(EventProcessor):
+ """0xe413a321e8681d831f4dbccbca790d2952b56f977908e45be37335533e005286"""
+
+ def _process_specific_fields(self, log: Any, decoded_log: Any) -> dict:
+ return {
+ "collateral_asset": extract_eth_address(log.topic1),
+ "debt_asset": extract_eth_address(log.topic2),
+ "aave_user": extract_eth_address(log.topic3),
+ "debt_to_cover": decoded_log.get("debtToCover"),
+ "liquidated_collateral_amount": decoded_log.get("liquidatedCollateralAmount"),
+ "liquidator": decoded_log.get("liquidator"),
+ "receive_atoken": decoded_log.get("receiveAToken"),
+ }
+
+
+class ReserveDataUpdateProcessor(EventProcessor):
+ """0x804c9b842b2748a22bb64b345453a3de7ca54a6ca45ce00d415894979e22897a"""
+
+ def _process_specific_fields(self, log: Any, decoded_log: Any) -> dict:
+ return {
+ "asset": extract_eth_address(log.topic1),
+ "liquidity_rate": decoded_log.get("liquidityRate"),
+ "stable_borrow_rate": decoded_log.get("stableBorrowRate"),
+ "variable_borrow_rate": decoded_log.get("variableBorrowRate"),
+ "liquidity_index": decoded_log.get("liquidityIndex"),
+ "variable_borrow_index": decoded_log.get("variableBorrowIndex"),
+ }
+
+
+class TransferProcessor(EventProcessor):
+ """0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef"""
+
+ def _process_specific_fields(self, log: Any, decoded_log: Any) -> dict:
+ return {
+ "a_token": log.address,
+ "amount": decoded_log.get("value"),
+ "aave_from": extract_eth_address(log.topic1),
+ "aave_to": extract_eth_address(log.topic2),
+ }
diff --git a/indexer/modules/bridge/domain/__init__.py b/hemera_udf/aave_v2/abi/__init__.py
similarity index 100%
rename from indexer/modules/bridge/domain/__init__.py
rename to hemera_udf/aave_v2/abi/__init__.py
diff --git a/hemera_udf/aave_v2/abi/abi.py b/hemera_udf/aave_v2/abi/abi.py
new file mode 100644
index 000000000..35b8ab3f2
--- /dev/null
+++ b/hemera_udf/aave_v2/abi/abi.py
@@ -0,0 +1,181 @@
+from hemera.common.utils.abi_code_utils import Event, Function
+
+SCALED_BALANCE_OF_FUNCTION = Function(
+ {
+ "inputs": [{"internalType": "address", "name": "user", "type": "address"}],
+ "name": "scaledBalanceOf",
+ "outputs": [{"internalType": "uint256", "name": "balance", "type": "uint256"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+PRINCIPAL_BALANCE_OF_FUNCTION = Function(
+ {
+ "inputs": [{"internalType": "address", "name": "user", "type": "address"}],
+ "name": "principalBalanceOf",
+ "outputs": [{"internalType": "uint256", "name": "balance", "type": "uint256"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+
+DECIMALS_FUNCTIOIN = Function(
+ {
+ "constant": True,
+ "inputs": [],
+ "name": "decimals",
+ "outputs": [{"name": "decimals", "type": "uint8"}],
+ "payable": False,
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+
+SYMBOL_FUNCTIOIN = Function(
+ {
+ "constant": True,
+ "inputs": [],
+ "name": "symbol",
+ "outputs": [{"name": "symbol", "type": "string"}],
+ "payable": False,
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+
+RESERVE_INITIALIZED_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "address", "name": "asset", "type": "address"},
+ {"indexed": True, "internalType": "address", "name": "aToken", "type": "address"},
+ {"indexed": False, "internalType": "address", "name": "stableDebtToken", "type": "address"},
+ {"indexed": False, "internalType": "address", "name": "variableDebtToken", "type": "address"},
+ {"indexed": False, "internalType": "address", "name": "interestRateStrategyAddress", "type": "address"},
+ ],
+ "name": "ReserveInitialized",
+ "type": "event",
+ }
+)
+
+DEPOSIT_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "address", "name": "reserve", "type": "address"},
+ {"indexed": False, "internalType": "address", "name": "user", "type": "address"},
+ {"indexed": True, "internalType": "address", "name": "onBehalfOf", "type": "address"},
+ {"indexed": False, "internalType": "uint256", "name": "amount", "type": "uint256"},
+ {"indexed": True, "internalType": "uint16", "name": "referral", "type": "uint16"},
+ ],
+ "name": "Deposit",
+ "type": "event",
+ }
+)
+
+WITHDRAW_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "address", "name": "reserve", "type": "address"},
+ {"indexed": True, "internalType": "address", "name": "user", "type": "address"},
+ {"indexed": True, "internalType": "address", "name": "to", "type": "address"},
+ {"indexed": False, "internalType": "uint256", "name": "amount", "type": "uint256"},
+ ],
+ "name": "Withdraw",
+ "type": "event",
+ }
+)
+
+BORROW_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "puinternalType": "address", "name": "reserve", "type": "address"},
+ {"indexed": False, "internalType": "address", "name": "user", "type": "address"},
+ {"indexed": True, "internalType": "address", "name": "onBehalfOf", "type": "address"},
+ {"indexed": False, "internalType": "uint256", "name": "amount", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "borrowRateMode", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "borrowRate", "type": "uint256"},
+ {"indexed": True, "internalType": "uint16", "name": "referral", "type": "uint16"},
+ ],
+ "name": "Borrow",
+ "type": "event",
+ }
+)
+
+REPAY_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "address", "name": "reserve", "type": "address"},
+ {"indexed": True, "internalType": "address", "name": "user", "type": "address"},
+ {"indexed": True, "internalType": "address", "name": "repayer", "type": "address"},
+ {"indexed": False, "internalType": "uint256", "name": "amount", "type": "uint256"},
+ ],
+ "name": "Repay",
+ "type": "event",
+ }
+)
+
+FLUSH_LOAN_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "address", "name": "target", "type": "address"},
+ {"indexed": True, "internalType": "address", "name": "initiator", "type": "address"},
+ {"indexed": True, "internalType": "address", "name": "asset", "type": "address"},
+ {"indexed": False, "internalType": "uint256", "name": "amount", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "premium", "type": "uint256"},
+ {"indexed": False, "internalType": "uint16", "name": "referralCode", "type": "uint16"},
+ ],
+ "name": "FlashLoan",
+ "type": "event",
+ }
+)
+
+LIQUIDATION_CALL_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "address", "name": "collateralAsset", "type": "address"},
+ {"indexed": True, "internalType": "address", "name": "debtAsset", "type": "address"},
+ {"indexed": True, "internalType": "address", "name": "user", "type": "address"},
+ {"indexed": False, "internalType": "uint256", "name": "debtToCover", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "liquidatedCollateralAmount", "type": "uint256"},
+ {"indexed": False, "internalType": "address", "name": "liquidator", "type": "address"},
+ {"indexed": False, "internalType": "bool", "name": "receiveAToken", "type": "bool"},
+ ],
+ "name": "LiquidationCall",
+ "type": "event",
+ }
+)
+
+RESERVE_DATA_UPDATED_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "address", "name": "reserve", "type": "address"},
+ {"indexed": False, "internalType": "uint256", "name": "liquidityRate", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "stableBorrowRate", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "variableBorrowRate", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "liquidityIndex", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "variableBorrowIndex", "type": "uint256"},
+ ],
+ "name": "ReserveDataUpdated",
+ "type": "event",
+ }
+)
+
+TRANSFER_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "address", "name": "from", "type": "address"},
+ {"indexed": True, "internalType": "address", "name": "to", "type": "address"},
+ {"indexed": False, "internalType": "uint256", "name": "value", "type": "uint256"},
+ ],
+ "name": "Transfer",
+ "type": "event",
+ }
+)
diff --git a/indexer/modules/bridge/morphl2/__init__.py b/hemera_udf/aave_v2/domains/__init__.py
similarity index 100%
rename from indexer/modules/bridge/morphl2/__init__.py
rename to hemera_udf/aave_v2/domains/__init__.py
diff --git a/hemera_udf/aave_v2/domains/aave_v2_domain.py b/hemera_udf/aave_v2/domains/aave_v2_domain.py
new file mode 100644
index 000000000..0e7fc008b
--- /dev/null
+++ b/hemera_udf/aave_v2/domains/aave_v2_domain.py
@@ -0,0 +1,177 @@
+from dataclasses import dataclass
+from typing import Optional
+
+from hemera.indexer.domains import Domain
+
+
+@dataclass
+class AaveV2ReserveD(Domain):
+ asset: str
+ asset_decimals: Optional[int]
+ asset_symbol: Optional[str]
+ a_token_address: str
+ a_token_symbol: Optional[str]
+ a_token_decimals: Optional[int]
+ stable_debt_token_address: str
+ stable_debt_token_symbol: Optional[str]
+ stable_debt_token_decimals: Optional[int]
+ variable_debt_token_address: str
+ variable_debt_token_symbol: Optional[str]
+ variable_debt_token_decimals: Optional[int]
+ interest_rate_strategy_address: str
+ block_number: int
+ block_timestamp: int
+ transaction_hash: str
+ log_index: int
+ topic0: Optional[str] = None
+ event_name: Optional[str] = None
+
+
+@dataclass
+class AaveV2BaseRecord(Domain):
+ block_number: Optional[int] = None
+ block_timestamp: Optional[int] = None
+ transaction_hash: Optional[str] = None
+ log_index: Optional[int] = None
+ event_name: Optional[str] = None
+ topic0: Optional[str] = None
+
+
+@dataclass
+class AaveV2DepositD(AaveV2BaseRecord):
+ reserve: Optional[str] = None
+ on_behalf_of: Optional[str] = None
+ referral: Optional[int] = None
+ aave_user: Optional[str] = None
+ amount: Optional[int] = None
+ #
+ _after: Optional[int] = None
+
+
+@dataclass
+class AaveV2WithdrawD(AaveV2BaseRecord):
+ reserve: Optional[str] = None
+ aave_user: Optional[str] = None
+ to_address: Optional[str] = None
+ amount: Optional[int] = None
+ #
+ _after: Optional[int] = None
+
+
+@dataclass
+class AaveV2BorrowD(AaveV2BaseRecord):
+ reserve: Optional[str] = None
+ on_behalf_of: Optional[str] = None
+ referral: Optional[int] = None
+ aave_user: Optional[str] = None
+ amount: Optional[int] = None
+ borrow_rate_mode: Optional[int] = None
+ borrow_rate: Optional[int] = None
+ #
+ _after: Optional[int] = None
+
+
+@dataclass
+class AaveV2RepayD(AaveV2BaseRecord):
+ reserve: Optional[str] = None
+ aave_user: Optional[str] = None
+ repayer: Optional[str] = None
+ amount: Optional[int] = None
+ borrow_rate_mode: Optional[int] = None
+ # debt, after repayed
+ _after: Optional[int] = None
+
+
+@dataclass
+class AaveV2TransferD(AaveV2BaseRecord):
+ aave_from: Optional[str] = None
+ aave_to: Optional[str] = None
+ a_token: Optional[str] = None
+ amount: Optional[int] = None
+
+
+@dataclass
+class AaveV2FlashLoanD(AaveV2BaseRecord):
+ target: Optional[str] = None
+ # initiator -> user, use `aave_user`
+ aave_user: Optional[str] = None
+ reserve: Optional[str] = None
+ amount: Optional[int] = None
+ premium: Optional[int] = None
+ referral: Optional[int] = None
+
+
+@dataclass
+class AaveV2LiquidationCallD(AaveV2BaseRecord):
+ collateral_asset: Optional[str] = None
+ debt_asset: Optional[str] = None
+ aave_user: Optional[str] = None
+ debt_to_cover: Optional[int] = None
+ liquidated_collateral_amount: Optional[int] = None
+ liquidator: Optional[str] = None
+ receive_atoken: Optional[str] = None
+ debt_after_liquidation: Optional[int] = None
+ collateral_after_liquidation: Optional[int] = None
+
+
+@dataclass
+class AaveV2AddressCurrentD(Domain):
+ address: Optional[str] = None
+ asset: Optional[str] = None
+ supply_amount: Optional[int] = None
+ borrow_amount: Optional[int] = None
+ borrow_rate_mode: Optional[int] = None
+ block_number: Optional[int] = None
+ block_timestamp: Optional[int] = None
+
+
+@dataclass
+class AaveV2LiquidationAddressCurrentD(Domain):
+ address: Optional[str] = None
+ asset: Optional[str] = None
+ block_number: Optional[int] = None
+ last_total_value_of_liquidation: Optional[int] = None
+ last_liquidation_time: Optional[int] = None
+
+
+def aave_v2_address_current_factory():
+ return AaveV2AddressCurrentD(
+ address=None, asset=None, supply_amount=0, borrow_amount=0, block_timestamp=None, block_number=None
+ )
+
+
+@dataclass
+class AaveV2CallRecordsD(Domain):
+ target: Optional[str] = None
+ params: Optional[str] = None
+ function: Optional[str] = None
+ block_number: Optional[int] = None
+ result: Optional[str] = None
+
+
+@dataclass
+class AaveV2ReserveDataD(Domain):
+ block_timestamp: Optional[int] = None
+ transaction_hash: Optional[str] = None
+ log_index: Optional[int] = None
+ topic0: Optional[str] = None
+ event_name: Optional[str] = None
+ asset: Optional[str] = None
+ liquidity_rate: Optional[int] = None
+ stable_borrow_rate: Optional[int] = None
+ variable_borrow_rate: Optional[int] = None
+ liquidity_index: Optional[int] = None
+ variable_borrow_index: Optional[int] = None
+ block_number: Optional[int] = None
+
+
+@dataclass
+class AaveV2ReserveDataCurrentD(Domain):
+ asset: Optional[str] = None
+ block_number: Optional[int] = None
+ block_timestamp: Optional[int] = None
+ liquidity_rate: Optional[int] = None
+ stable_borrow_rate: Optional[int] = None
+ variable_borrow_rate: Optional[int] = None
+ liquidity_index: Optional[int] = None
+ variable_borrow_index: Optional[int] = None
diff --git a/hemera_udf/aave_v2/export_aave_v2_job.py b/hemera_udf/aave_v2/export_aave_v2_job.py
new file mode 100644
index 000000000..dfe92df8c
--- /dev/null
+++ b/hemera_udf/aave_v2/export_aave_v2_job.py
@@ -0,0 +1,511 @@
+import logging
+from collections import defaultdict
+
+from hemera.common.utils.exception_control import FastShutdownError
+from hemera.common.utils.format_utils import bytes_to_hex_str
+from hemera.indexer.domains.log import Log
+from hemera.indexer.jobs import FilterTransactionDataJob
+from hemera.indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera.indexer.utils.collection_utils import merge_dataclasses
+from hemera.indexer.utils.multicall_hemera import Call
+from hemera.indexer.utils.multicall_hemera.multi_call_helper import MultiCallHelper
+from hemera_udf.aave_v2.aave_v2_processors import (
+ BorrowProcessor,
+ DepositProcessor,
+ FlashLoanProcessor,
+ LiquidationCallProcessor,
+ RepayProcessor,
+ ReserveDataUpdateProcessor,
+ ReserveInitProcessor,
+ TransferProcessor,
+ WithdrawProcessor,
+)
+from hemera_udf.aave_v2.abi.abi import (
+ BORROW_EVENT,
+ DEPOSIT_EVENT,
+ FLUSH_LOAN_EVENT,
+ LIQUIDATION_CALL_EVENT,
+ PRINCIPAL_BALANCE_OF_FUNCTION,
+ REPAY_EVENT,
+ RESERVE_DATA_UPDATED_EVENT,
+ RESERVE_INITIALIZED_EVENT,
+ SCALED_BALANCE_OF_FUNCTION,
+ TRANSFER_EVENT,
+ WITHDRAW_EVENT,
+)
+from hemera_udf.aave_v2.domains.aave_v2_domain import (
+ AaveV2AddressCurrentD,
+ AaveV2BorrowD,
+ AaveV2CallRecordsD,
+ AaveV2DepositD,
+ AaveV2FlashLoanD,
+ AaveV2LiquidationAddressCurrentD,
+ AaveV2LiquidationCallD,
+ AaveV2RepayD,
+ AaveV2ReserveD,
+ AaveV2ReserveDataCurrentD,
+ AaveV2ReserveDataD,
+ AaveV2TransferD,
+ AaveV2WithdrawD,
+ aave_v2_address_current_factory,
+)
+from hemera_udf.aave_v2.models.aave_v2_reserve import AaveV2Reserve
+
+logger = logging.getLogger(__name__)
+
+
+class ExportAaveV2Job(FilterTransactionDataJob):
+ """This job extract aave_v2 related infos"""
+
+ dependency_types = [Log]
+ output_types = [
+ AaveV2ReserveD,
+ AaveV2DepositD,
+ AaveV2WithdrawD,
+ AaveV2BorrowD,
+ AaveV2RepayD,
+ AaveV2FlashLoanD,
+ AaveV2LiquidationCallD,
+ AaveV2AddressCurrentD,
+ AaveV2LiquidationAddressCurrentD,
+ AaveV2CallRecordsD,
+ AaveV2ReserveDataD,
+ AaveV2TransferD,
+ AaveV2ReserveDataCurrentD,
+ ]
+ able_to_reorg = False
+
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+ self.db_service = kwargs["config"].get("db_service")
+ self.contract_addresses = {
+ "POOL_V2": self.user_defined_config["POOL_V2"],
+ "POOL_CONFIGURE": self.user_defined_config["POOL_CONFIGURE"],
+ }
+
+ self.multicall_helper = MultiCallHelper(self._web3, kwargs)
+
+ # sig -> processor
+ self._event_processors = {}
+ self._initialize_events_and_processors()
+
+ # init relative tokens
+ self.reserve_dic = {}
+ self.a_token_reserve_dic = {}
+ self.vary_debt_reserve_dic = {}
+ self.stable_debt_reserve_dic = {}
+ self._read_reserve()
+
+ self.address_set = set(
+ list(self.a_token_reserve_dic.keys())
+ + list(self.contract_addresses.values())
+ + list(self.vary_debt_reserve_dic)
+ + list(self.stable_debt_reserve_dic)
+ )
+
+ def _read_reserve(self):
+
+ with self.db_service.get_service_session() as session:
+ result = session.query(AaveV2Reserve).all()
+ for rr in result:
+ item = AaveV2ReserveD(
+ asset=bytes_to_hex_str(rr.asset),
+ asset_symbol=rr.asset_symbol,
+ asset_decimals=rr.asset_decimals,
+ a_token_address=bytes_to_hex_str(rr.a_token_address),
+ a_token_symbol=rr.a_token_symbol,
+ a_token_decimals=rr.a_token_decimals,
+ stable_debt_token_address=(
+ bytes_to_hex_str(rr.stable_debt_token_address) if rr.stable_debt_token_address else None
+ ),
+ stable_debt_token_symbol=rr.stable_debt_token_symbol,
+ stable_debt_token_decimals=rr.stable_debt_token_decimals,
+ variable_debt_token_address=(
+ bytes_to_hex_str(rr.variable_debt_token_address) if rr.variable_debt_token_address else None
+ ),
+ variable_debt_token_symbol=rr.variable_debt_token_symbol,
+ variable_debt_token_decimals=rr.variable_debt_token_decimals,
+ interest_rate_strategy_address=(
+ bytes_to_hex_str(rr.interest_rate_strategy_address) if rr.interest_rate_strategy_address else None
+ ),
+ block_number=rr.block_number,
+ block_timestamp=rr.block_timestamp,
+ transaction_hash=bytes_to_hex_str(rr.transaction_hash) if rr.transaction_hash else None,
+ log_index=rr.log_index,
+ )
+ self.reserve_dic[item.asset] = item
+ self.vary_debt_reserve_dic[item.variable_debt_token_address] = item
+ self.stable_debt_reserve_dic[item.stable_debt_token_address] = item
+ self.a_token_reserve_dic[item.a_token_address] = item
+
+ def _initialize_events_and_processors(self):
+ processors = [
+ ReserveInitProcessor(RESERVE_INITIALIZED_EVENT, AaveV2ReserveD, multicall_helper=self.multicall_helper),
+ DepositProcessor(DEPOSIT_EVENT, AaveV2DepositD),
+ WithdrawProcessor(WITHDRAW_EVENT, AaveV2WithdrawD),
+ BorrowProcessor(BORROW_EVENT, AaveV2BorrowD),
+ RepayProcessor(REPAY_EVENT, AaveV2RepayD),
+ FlashLoanProcessor(FLUSH_LOAN_EVENT, AaveV2FlashLoanD),
+ LiquidationCallProcessor(LIQUIDATION_CALL_EVENT, AaveV2LiquidationCallD),
+ ReserveDataUpdateProcessor(RESERVE_DATA_UPDATED_EVENT, AaveV2ReserveDataD),
+ TransferProcessor(TRANSFER_EVENT, AaveV2TransferD),
+ ]
+ self._event_processors = {p.event.get_signature(): p for p in processors}
+
+ def get_filter(self):
+ topics = [p for p in self._event_processors.keys()]
+ return TransactionFilterByLogs(
+ [
+ TopicSpecification(
+ addresses=list(self.address_set),
+ topics=topics,
+ ),
+ ]
+ )
+
+ def is_aave_v2_address(self, address):
+ return address in self.address_set
+
+ def update_address_current(self, aave_records, address_token_block_balance_dic):
+ def nested_dict():
+ return defaultdict(aave_v2_address_current_factory)
+
+ res_d = defaultdict(nested_dict)
+ for a_record in aave_records:
+ if a_record.type() == AaveV2WithdrawD.type() or a_record.type() == AaveV2DepositD.type():
+ reserve = self.reserve_dic[a_record.reserve]
+ address = a_record.aave_user
+ after = address_token_block_balance_dic[address][reserve.a_token_address][a_record.block_number]
+
+ res_d[address][reserve.asset].address = a_record.aave_user
+ res_d[address][reserve.asset].asset = reserve.asset
+ res_d[address][reserve.asset].block_number = a_record.block_number
+ res_d[address][reserve.asset].block_timestamp = a_record.block_timestamp
+ res_d[address][reserve.asset].supply_amount = after
+
+ elif a_record.type() == AaveV2TransferD.type():
+ # a_token
+ reserve = self.a_token_reserve_dic.get(a_record.a_token)
+ if reserve:
+ aave_from = a_record.aave_from
+ after_transfer = address_token_block_balance_dic[aave_from][reserve.a_token_address][
+ a_record.block_number
+ ]
+ res_d[aave_from][reserve.asset].address = aave_from
+ res_d[aave_from][reserve.asset].asset = reserve.asset
+ res_d[aave_from][reserve.asset].block_number = a_record.block_number
+ res_d[aave_from][reserve.asset].block_timestamp = a_record.block_timestamp
+ res_d[aave_from][reserve.asset].supply_amount = after_transfer
+
+ aave_to = a_record.aave_to
+ after_receive = address_token_block_balance_dic[aave_to][reserve.a_token_address][
+ a_record.block_number
+ ]
+ res_d[aave_to][reserve.asset].address = aave_to
+ res_d[aave_to][reserve.asset].asset = reserve.asset
+ res_d[aave_to][reserve.asset].block_number = a_record.block_number
+ res_d[aave_to][reserve.asset].block_timestamp = a_record.block_timestamp
+ res_d[aave_to][reserve.asset].supply_amount = after_receive
+ else:
+ reserve = self.vary_debt_reserve_dic.get(a_record.a_token)
+ if reserve:
+ aave_from = a_record.aave_from
+ after_transfer = address_token_block_balance_dic[aave_from][
+ reserve.variable_debt_token_address
+ ][a_record.block_number]
+ aave_to = a_record.aave_to
+ after_receive = address_token_block_balance_dic[aave_to][reserve.variable_debt_token_address][
+ a_record.block_number
+ ]
+ else:
+ reserve = self.stable_debt_reserve_dic[a_record.a_record.a_token]
+ aave_from = a_record.aave_from
+ after_transfer = address_token_block_balance_dic[aave_from][reserve.stable_debt_token_address][
+ a_record.block_number
+ ]
+ aave_to = a_record.aave_to
+ after_receive = address_token_block_balance_dic[aave_to][reserve.stable_debt_token_address][
+ a_record.block_number
+ ]
+
+ res_d[aave_from][reserve.asset].address = aave_from
+ res_d[aave_from][reserve.asset].asset = reserve.asset
+ res_d[aave_from][reserve.asset].block_number = a_record.block_number
+ res_d[aave_from][reserve.asset].block_timestamp = a_record.block_timestamp
+ res_d[aave_from][reserve.asset].borrow_amount = after_transfer
+
+ res_d[aave_to][reserve.asset].address = aave_to
+ res_d[aave_to][reserve.asset].asset = reserve.asset
+ res_d[aave_to][reserve.asset].block_number = a_record.block_number
+ res_d[aave_to][reserve.asset].block_timestamp = a_record.block_timestamp
+ res_d[aave_to][reserve.asset].borrow_amount = after_receive
+
+ elif a_record.type() == AaveV2RepayD.type() or a_record.type() == AaveV2BorrowD.type():
+ address = a_record.aave_user
+ reserve = self.reserve_dic[a_record.reserve]
+ block_number = a_record.block_number
+
+ vary_token = reserve.variable_debt_token_address
+ vary_debt = 0
+ if address in address_token_block_balance_dic:
+ if vary_token in address_token_block_balance_dic[address]:
+ vary_debt = address_token_block_balance_dic[address][vary_token][block_number]
+ stable_token = reserve.stable_debt_token_address
+ stable_debt = 0
+ if address in address_token_block_balance_dic:
+ if stable_token in address_token_block_balance_dic[address]:
+ stable_debt = address_token_block_balance_dic[address][stable_token][block_number]
+
+ res_d[address][reserve.asset].asset = reserve.asset
+ res_d[address][reserve.asset].address = address
+ res_d[address][reserve.asset].borrow_amount = stable_debt + vary_debt
+ res_d[address][reserve.asset].block_number = a_record.block_number
+ res_d[address][reserve.asset].block_timestamp = a_record.block_timestamp
+ elif a_record.type() == AaveV2LiquidationCallD.type():
+ address = a_record.aave_user
+ block_number = a_record.block_number
+
+ debt_asset = a_record.debt_asset
+ debt_reserve = self.reserve_dic[debt_asset]
+ vary_token = debt_reserve.variable_debt_token_address
+ vary_debt = 0
+ if address in address_token_block_balance_dic:
+ if vary_token in address_token_block_balance_dic[address]:
+ vary_debt = address_token_block_balance_dic[address][vary_token][block_number]
+ stable_token = debt_reserve.stable_debt_token_address
+ stable_debt = 0
+ if address in address_token_block_balance_dic:
+ if stable_token in address_token_block_balance_dic[address]:
+ stable_debt = address_token_block_balance_dic[address][stable_token][block_number]
+ a_record.debt_after_liquidation = vary_debt + stable_debt
+
+ collateral_asset = a_record.collateral_asset
+ collateral_reserve = self.reserve_dic[collateral_asset]
+
+ a_record.collateral_after_liquidation = address_token_block_balance_dic[address][
+ collateral_reserve.a_token_address
+ ][block_number]
+
+ res_d[address][collateral_asset].asset = collateral_asset
+ res_d[address][collateral_asset].address = address
+ res_d[address][collateral_asset].supply_amount = a_record.collateral_after_liquidation
+ res_d[address][collateral_asset].block_number = a_record.block_number
+ res_d[address][collateral_asset].block_timestamp = a_record.block_timestamp
+
+ res_d[address][debt_asset].asset = debt_asset
+ res_d[address][debt_asset].address = address
+ res_d[address][debt_asset].borrow_amount = a_record.debt_after_liquidation
+ res_d[address][debt_asset].block_number = a_record.block_number
+ res_d[address][debt_asset].block_timestamp = a_record.block_timestamp
+
+ # record last liquidation time and amount
+ self._collect_item(
+ AaveV2LiquidationAddressCurrentD.type(),
+ AaveV2LiquidationAddressCurrentD(
+ address=address,
+ asset=collateral_asset,
+ last_liquidation_time=a_record.block_timestamp,
+ last_total_value_of_liquidation=a_record.liquidated_collateral_amount,
+ block_number=a_record.block_number,
+ ),
+ )
+ return res_d
+
+ def _collect(self, **kwargs):
+ logs = self._data_buff[Log.type()]
+ aave_records = []
+ for log in logs:
+ if not self.is_aave_v2_address(log.address):
+ continue
+ try:
+ processor = self._event_processors.get(log.topic0)
+ if processor is None:
+ continue
+ processed_data = processor.process(log)
+
+ if processed_data.type() == AaveV2ReserveD.type():
+ # update reserve
+ self.reserve_dic[processed_data.asset] = processed_data
+ self.a_token_reserve_dic[processed_data.a_token_address] = processed_data
+ elif processed_data.type() == AaveV2ReserveDataD.type():
+ self._collect_item(
+ AaveV2ReserveDataCurrentD.type(),
+ AaveV2ReserveDataCurrentD(
+ asset=processed_data.asset,
+ block_number=processed_data.block_number,
+ block_timestamp=processed_data.block_timestamp,
+ liquidity_rate=processed_data.liquidity_rate,
+ stable_borrow_rate=processed_data.stable_borrow_rate,
+ variable_borrow_rate=processed_data.variable_borrow_rate,
+ liquidity_index=processed_data.liquidity_index,
+ variable_borrow_index=processed_data.variable_borrow_index,
+ ),
+ )
+ self._collect_item(processed_data.type(), processed_data)
+ aave_records.append(processed_data)
+ except Exception as e:
+ logger.error(f"Error processing log {log.log_index} " f"in tx {log.transaction_hash}: {str(e)}")
+ raise FastShutdownError(f"Error processing log {log.log_index} " f"in tx {log.transaction_hash}")
+
+ address_token_block_balance_dic = self._enrich_records(aave_records)
+ res_d = self.update_address_current(aave_records, address_token_block_balance_dic)
+ address_currents = []
+ for address, outer_dic in res_d.items():
+ for reserve, kad in outer_dic.items():
+ address_currents.append(kad)
+ self._collect_items(AaveV2AddressCurrentD.type(), address_currents)
+ merge_dataclasses(self, AaveV2ReserveDataD, ["asset"])
+ merge_dataclasses(self, AaveV2ReserveDataCurrentD, ["asset"])
+ merge_dataclasses(self, AaveV2LiquidationAddressCurrentD, ["address", "asset"])
+
+ logger.info("This batch of data have processed")
+
+ def _enrich_records(self, aave_records):
+ eth_call_lis = []
+ for a_record in aave_records:
+ if a_record.type() == AaveV2DepositD.type():
+ reserve = self.reserve_dic[a_record.reserve]
+ eth_call_lis.append(
+ Call(
+ target=reserve.a_token_address,
+ function_abi=SCALED_BALANCE_OF_FUNCTION,
+ parameters=[a_record.aave_user],
+ block_number=a_record.block_number,
+ )
+ )
+ elif a_record.type() == AaveV2WithdrawD.type():
+ reserve = self.reserve_dic[a_record.reserve]
+ eth_call_lis.append(
+ Call(
+ target=reserve.a_token_address,
+ function_abi=SCALED_BALANCE_OF_FUNCTION,
+ parameters=[a_record.aave_user],
+ block_number=a_record.block_number,
+ )
+ )
+ elif a_record.type() == AaveV2TransferD.type():
+ if a_record.a_token in self.a_token_reserve_dic or a_record.a_token in self.vary_debt_reserve_dic:
+ a_token_address = a_record.a_token
+ eth_call_lis.append(
+ Call(
+ target=a_token_address,
+ function_abi=SCALED_BALANCE_OF_FUNCTION,
+ parameters=[a_record.aave_from],
+ block_number=a_record.block_number,
+ )
+ )
+ eth_call_lis.append(
+ Call(
+ target=a_token_address,
+ function_abi=SCALED_BALANCE_OF_FUNCTION,
+ parameters=[a_record.aave_to],
+ block_number=a_record.block_number,
+ )
+ )
+ elif a_record.a_token in self.stable_debt_reserve_dic:
+ a_token_address = a_record.a_token
+ eth_call_lis.append(
+ Call(
+ target=a_token_address,
+ function_abi=PRINCIPAL_BALANCE_OF_FUNCTION,
+ parameters=[a_record.aave_from],
+ block_number=a_record.block_number,
+ )
+ )
+ eth_call_lis.append(
+ Call(
+ target=a_token_address,
+ function_abi=PRINCIPAL_BALANCE_OF_FUNCTION,
+ parameters=[a_record.aave_to],
+ block_number=a_record.block_number,
+ )
+ )
+ elif a_record.type() == AaveV2RepayD.type() or a_record.type() == AaveV2BorrowD.type():
+ reserve = self.reserve_dic[a_record.reserve]
+ eth_call_lis.append(
+ Call(
+ target=reserve.stable_debt_token_address,
+ function_abi=PRINCIPAL_BALANCE_OF_FUNCTION,
+ parameters=[a_record.aave_user],
+ block_number=a_record.block_number,
+ )
+ )
+ eth_call_lis.append(
+ Call(
+ target=reserve.variable_debt_token_address,
+ function_abi=SCALED_BALANCE_OF_FUNCTION,
+ parameters=[a_record.aave_user],
+ block_number=a_record.block_number,
+ )
+ )
+
+ elif a_record.type() == AaveV2LiquidationCallD.type():
+ aave_user = a_record.aave_user
+ collateral_asset = a_record.collateral_asset
+ collateral_reserve = self.reserve_dic[collateral_asset]
+ eth_call_lis.append(
+ Call(
+ target=collateral_reserve.a_token_address,
+ function_abi=SCALED_BALANCE_OF_FUNCTION,
+ parameters=[aave_user],
+ block_number=a_record.block_number,
+ )
+ )
+ debt_asset = a_record.debt_asset
+ debt_reserve = self.reserve_dic[debt_asset]
+
+ eth_call_lis.append(
+ Call(
+ target=debt_reserve.stable_debt_token_address,
+ function_abi=PRINCIPAL_BALANCE_OF_FUNCTION,
+ parameters=[aave_user],
+ block_number=a_record.block_number,
+ )
+ )
+ eth_call_lis.append(
+ Call(
+ target=debt_reserve.variable_debt_token_address,
+ function_abi=SCALED_BALANCE_OF_FUNCTION,
+ parameters=[aave_user],
+ block_number=a_record.block_number,
+ )
+ )
+
+ enriched_eth_call_lis = self.multicall_helper.execute_calls(eth_call_lis)
+
+ address_token_block_balance_dic = {}
+
+ unique_set = set()
+ for cl in enriched_eth_call_lis:
+ k = (
+ cl.target.lower(),
+ cl.block_number,
+ cl.function_abi.get_name(),
+ ",".join(cl.parameters if cl.parameters else ""),
+ )
+ if k in unique_set:
+ continue
+ unique_set.add(k)
+ self._collect_item(
+ AaveV2CallRecordsD.type(),
+ AaveV2CallRecordsD(
+ target=cl.target.lower(),
+ params=",".join(cl.parameters) if cl.parameters else "",
+ function=cl.function_abi.get_name(),
+ block_number=cl.block_number,
+ result=str(cl.returns),
+ ),
+ )
+ token = cl.target.lower()
+ block_number = cl.block_number
+
+ address = cl.parameters[0]
+ if address not in address_token_block_balance_dic:
+ address_token_block_balance_dic[address] = dict()
+ if token not in address_token_block_balance_dic[address]:
+ address_token_block_balance_dic[address][token] = dict()
+ if cl.returns is None:
+ address_token_block_balance_dic[address][token][block_number] = 0
+ else:
+ address_token_block_balance_dic[address][token][block_number] = cl.returns["balance"]
+ return address_token_block_balance_dic
diff --git a/indexer/modules/custom/__init__.py b/hemera_udf/aave_v2/models/__init__.py
similarity index 100%
rename from indexer/modules/custom/__init__.py
rename to hemera_udf/aave_v2/models/__init__.py
diff --git a/hemera_udf/aave_v2/models/aave_v2_address_current.py b/hemera_udf/aave_v2/models/aave_v2_address_current.py
new file mode 100644
index 000000000..0de025383
--- /dev/null
+++ b/hemera_udf/aave_v2/models/aave_v2_address_current.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time 2024/10/22 11:33
+# @Author will
+
+from sqlalchemy import BOOLEAN, INT, NUMERIC, Column, PrimaryKeyConstraint, func, text
+from sqlalchemy.dialects.mysql import BIGINT
+from sqlalchemy.dialects.postgresql import BYTEA, TIMESTAMP
+
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.aave_v2.domains.aave_v2_domain import AaveV2AddressCurrentD, AaveV2LiquidationAddressCurrentD
+
+
+class AaveV2AddressCurrent(HemeraModel):
+ __tablename__ = "af_aave_v2_address_current"
+ address = Column(BYTEA, primary_key=True)
+ asset = Column(BYTEA, primary_key=True)
+
+ supply_amount = Column(NUMERIC(100))
+ borrow_amount = Column(NUMERIC(100))
+ borrow_rate_mode = Column(INT)
+ block_number = Column(BIGINT)
+ block_timestamp = Column(TIMESTAMP)
+ last_total_value_of_liquidation = Column(NUMERIC(100))
+ last_liquidation_time = Column(TIMESTAMP)
+
+ create_time = Column(TIMESTAMP, server_default=func.now())
+ update_time = Column(TIMESTAMP, server_default=func.now())
+ reorg = Column(BOOLEAN, server_default=text("false"))
+
+ __table_args__ = (PrimaryKeyConstraint("address", "asset"),)
+
+ @staticmethod
+ def model_domain_mapping():
+ return [
+ {
+ "domain": AaveV2AddressCurrentD,
+ "conflict_do_update": True,
+ "update_strategy": "EXCLUDED.block_number >= af_aave_v2_address_current.block_number",
+ "converter": general_converter,
+ },
+ {
+ "domain": AaveV2LiquidationAddressCurrentD,
+ "conflict_do_update": True,
+ "update_strategy": None,
+ "converter": general_converter,
+ },
+ ]
diff --git a/hemera_udf/aave_v2/models/aave_v2_call_records.py b/hemera_udf/aave_v2/models/aave_v2_call_records.py
new file mode 100644
index 000000000..38c0849ea
--- /dev/null
+++ b/hemera_udf/aave_v2/models/aave_v2_call_records.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time 2024/10/25 16:12
+# @Author will
+# @Brief
+from sqlalchemy import BOOLEAN, VARCHAR, Column, PrimaryKeyConstraint, func, text
+from sqlalchemy.dialects.mysql import BIGINT
+from sqlalchemy.dialects.postgresql import BYTEA, TIMESTAMP
+
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.aave_v2.domains.aave_v2_domain import AaveV2CallRecordsD
+
+
+class AaveV2CallRecords(HemeraModel):
+ __tablename__ = "af_aave_v2_call_records"
+ target = Column(BYTEA, primary_key=True)
+ params = Column(VARCHAR, primary_key=True)
+ function = Column(VARCHAR, primary_key=True)
+ block_number = Column(BIGINT, primary_key=True)
+ result = Column(VARCHAR)
+
+ create_time = Column(TIMESTAMP, server_default=func.now())
+ update_time = Column(TIMESTAMP, server_default=func.now())
+ reorg = Column(BOOLEAN, server_default=text("false"))
+
+ __table_args__ = (PrimaryKeyConstraint("target", "params", "function", "block_number"),)
+
+ @staticmethod
+ def model_domain_mapping():
+ return [
+ {
+ "domain": AaveV2CallRecordsD,
+ "conflict_do_update": True,
+ "update_strategy": None,
+ "converter": general_converter,
+ }
+ ]
diff --git a/hemera_udf/aave_v2/models/aave_v2_events.py b/hemera_udf/aave_v2/models/aave_v2_events.py
new file mode 100644
index 000000000..ca1210eac
--- /dev/null
+++ b/hemera_udf/aave_v2/models/aave_v2_events.py
@@ -0,0 +1,107 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time 2024/10/22 14:24
+# @Author will
+# @File aave_v2_lending_records.py
+# @Brief
+from sqlalchemy import BIGINT, BOOLEAN, INT, INTEGER, NUMERIC, VARCHAR, Column, PrimaryKeyConstraint, func, text
+from sqlalchemy.dialects.postgresql import BYTEA, TIMESTAMP
+
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.aave_v2.domains.aave_v2_domain import (
+ AaveV2BorrowD,
+ AaveV2DepositD,
+ AaveV2FlashLoanD,
+ AaveV2LiquidationCallD,
+ AaveV2RepayD,
+ AaveV2TransferD,
+ AaveV2WithdrawD,
+)
+
+
+class AaveV2Events(HemeraModel):
+ __tablename__ = "af_aave_v2_events"
+
+ transaction_hash = Column(BYTEA, primary_key=True)
+ log_index = Column(INTEGER, primary_key=True)
+ block_number = Column(BIGINT)
+ block_timestamp = Column(TIMESTAMP)
+ event_name = Column(VARCHAR)
+ topic0 = Column(BYTEA)
+
+ reserve = Column(BYTEA)
+ a_token = Column(BYTEA)
+ aave_user = Column(BYTEA)
+ repayer = Column(BYTEA)
+ amount = Column(NUMERIC(100))
+ premium = Column(NUMERIC(100))
+ on_behalf_of = Column(BYTEA)
+ referral = Column(INT)
+ borrow_rate_mode = Column(INT)
+ borrow_rate = Column(NUMERIC(100))
+ aave_from = Column(BYTEA)
+ aave_to = Column(BYTEA)
+ collateral_asset = Column(BYTEA)
+ debt_asset = Column(BYTEA)
+ debt_to_cover = Column(NUMERIC(100))
+ liquidated_collateral_amount = Column(NUMERIC(100))
+ liquidator = Column(BYTEA)
+ receive_atoken = Column(BOOLEAN)
+
+ create_time = Column(TIMESTAMP, server_default=func.now())
+ update_time = Column(TIMESTAMP, server_default=func.now())
+ reorg = Column(BOOLEAN, server_default=text("false"))
+
+ __table_args__ = (
+ PrimaryKeyConstraint(
+ "transaction_hash",
+ "log_index",
+ ),
+ )
+
+ @staticmethod
+ def model_domain_mapping():
+ return [
+ {
+ "domain": AaveV2DepositD,
+ "conflict_do_update": True,
+ "update_strategy": None,
+ "converter": general_converter,
+ },
+ {
+ "domain": AaveV2WithdrawD,
+ "conflict_do_update": True,
+ "update_strategy": None,
+ "converter": general_converter,
+ },
+ {
+ "domain": AaveV2BorrowD,
+ "conflict_do_update": True,
+ "update_strategy": None,
+ "converter": general_converter,
+ },
+ {
+ "domain": AaveV2RepayD,
+ "conflict_do_update": True,
+ "update_strategy": None,
+ "converter": general_converter,
+ },
+ {
+ "domain": AaveV2FlashLoanD,
+ "conflict_do_update": True,
+ "update_strategy": None,
+ "converter": general_converter,
+ },
+ {
+ "domain": AaveV2LiquidationCallD,
+ "conflict_do_update": True,
+ "update_strategy": None,
+ "converter": general_converter,
+ },
+ {
+ "domain": AaveV2TransferD,
+ "conflict_do_update": True,
+ "update_strategy": None,
+ "converter": general_converter,
+ },
+ ]
diff --git a/hemera_udf/aave_v2/models/aave_v2_reserve.py b/hemera_udf/aave_v2/models/aave_v2_reserve.py
new file mode 100644
index 000000000..127f1dff1
--- /dev/null
+++ b/hemera_udf/aave_v2/models/aave_v2_reserve.py
@@ -0,0 +1,59 @@
+from sqlalchemy import NUMERIC, VARCHAR, Column, PrimaryKeyConstraint, func, text
+from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, INTEGER, TIMESTAMP
+
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.aave_v2.domains.aave_v2_domain import AaveV2ReserveD, AaveV2ReserveDataCurrentD
+
+
+class AaveV2Reserve(HemeraModel):
+ __tablename__ = "af_aave_v2_reserve"
+ asset = Column(BYTEA, primary_key=True)
+ asset_decimals = Column(NUMERIC(100))
+ asset_symbol = Column(VARCHAR)
+
+ a_token_address = Column(BYTEA)
+ a_token_decimals = Column(NUMERIC(100))
+ a_token_symbol = Column(VARCHAR)
+
+ stable_debt_token_address = Column(BYTEA)
+ stable_debt_token_decimals = Column(NUMERIC(100))
+ stable_debt_token_symbol = Column(VARCHAR)
+
+ variable_debt_token_address = Column(BYTEA)
+ variable_debt_token_decimals = Column(NUMERIC(100))
+ variable_debt_token_symbol = Column(VARCHAR)
+
+ interest_rate_strategy_address = Column(BYTEA)
+
+ liquidity_rate = Column(NUMERIC(100))
+ stable_borrow_rate = Column(NUMERIC(100))
+ variable_borrow_rate = Column(NUMERIC(100))
+ liquidity_index = Column(NUMERIC(100))
+ variable_borrow_index = Column(NUMERIC(100))
+
+ block_number = Column(BIGINT)
+ block_timestamp = Column(TIMESTAMP)
+ transaction_hash = Column(BYTEA)
+ log_index = Column(INTEGER)
+ create_time = Column(TIMESTAMP, server_default=func.now())
+ update_time = Column(TIMESTAMP, server_default=func.now())
+ reorg = Column(BOOLEAN, server_default=text("false"))
+
+ __table_args__ = (PrimaryKeyConstraint("asset"),)
+
+ @staticmethod
+ def model_domain_mapping():
+ return [
+ {
+ "domain": AaveV2ReserveD,
+ "conflict_do_update": True,
+ "update_strategy": "EXCLUDED.block_number >= af_aave_v2_reserve.block_number",
+ "converter": general_converter,
+ },
+ {
+ "domain": AaveV2ReserveDataCurrentD,
+ "conflict_do_update": True,
+ "update_strategy": "EXCLUDED.block_number >= af_aave_v2_reserve.block_number",
+ "converter": general_converter,
+ },
+ ]
diff --git a/hemera_udf/aave_v2/models/aave_v2_reserve_rates_records.py b/hemera_udf/aave_v2/models/aave_v2_reserve_rates_records.py
new file mode 100644
index 000000000..7647df47e
--- /dev/null
+++ b/hemera_udf/aave_v2/models/aave_v2_reserve_rates_records.py
@@ -0,0 +1,37 @@
+from sqlalchemy import NUMERIC, Column, PrimaryKeyConstraint, func, text
+from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, INTEGER, TIMESTAMP
+
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.aave_v2.domains.aave_v2_domain import AaveV2ReserveDataD
+
+
+class AaveV2ReserveRates(HemeraModel):
+ __tablename__ = "af_aave_v2_reserve_rates"
+ asset = Column(BYTEA, primary_key=True)
+ block_number = Column(BIGINT, primary_key=True)
+
+ liquidity_rate = Column(NUMERIC(100))
+ stable_borrow_rate = Column(NUMERIC(100))
+ variable_borrow_rate = Column(NUMERIC(100))
+ liquidity_index = Column(NUMERIC(100))
+ variable_borrow_index = Column(NUMERIC(100))
+
+ block_timestamp = Column(TIMESTAMP)
+ transaction_hash = Column(BYTEA)
+ log_index = Column(INTEGER)
+ create_time = Column(TIMESTAMP, server_default=func.now())
+ update_time = Column(TIMESTAMP, server_default=func.now())
+ reorg = Column(BOOLEAN, server_default=text("false"))
+
+ __table_args__ = (PrimaryKeyConstraint("asset", "block_number"),)
+
+ @staticmethod
+ def model_domain_mapping():
+ return [
+ {
+ "domain": AaveV2ReserveDataD,
+ "conflict_do_update": True,
+ "update_strategy": None,
+ "converter": general_converter,
+ },
+ ]
diff --git a/indexer/modules/custom/address_index/endpoint/__init__.py b/hemera_udf/aci_features/__init__.py
similarity index 100%
rename from indexer/modules/custom/address_index/endpoint/__init__.py
rename to hemera_udf/aci_features/__init__.py
diff --git a/common/models/all_features_value_records.py b/hemera_udf/aci_features/all_features_value_records.py
similarity index 85%
rename from common/models/all_features_value_records.py
rename to hemera_udf/aci_features/all_features_value_records.py
index 6f86f2936..fa36b66f4 100644
--- a/common/models/all_features_value_records.py
+++ b/hemera_udf/aci_features/all_features_value_records.py
@@ -1,14 +1,8 @@
from sqlalchemy import Column, Index, PrimaryKeyConstraint, desc, func
from sqlalchemy.dialects.postgresql import BIGINT, BYTEA, JSONB, NUMERIC, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.all_features_value_record import (
- AllFeatureValueRecordBlueChipHolders,
- AllFeatureValueRecordTraitsActiveness,
- AllFeatureValueRecordUniswapV2Info,
- AllFeatureValueRecordUniswapV3Pool,
- AllFeatureValueRecordUniswapV3Token,
-)
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.aci_features.domains import *
class AllFeatureValueRecords(HemeraModel):
diff --git a/indexer/modules/custom/common_utils.py b/hemera_udf/aci_features/common_utils.py
similarity index 77%
rename from indexer/modules/custom/common_utils.py
rename to hemera_udf/aci_features/common_utils.py
index 1da4d1f12..782756e53 100644
--- a/indexer/modules/custom/common_utils.py
+++ b/hemera_udf/aci_features/common_utils.py
@@ -1,16 +1,10 @@
-import json
import logging
from typing import cast
-import eth_abi
-from web3 import Web3
from web3.types import ABIFunction
-from common.utils.abi_code_utils import encode_data
-from indexer.executors.batch_work_executor import BatchWorkExecutor
-from indexer.utils.abi import function_abi_to_4byte_selector_str
-from indexer.utils.json_rpc_requests import generate_eth_call_json_rpc
-from indexer.utils.rpc_utils import rpc_response_to_result, zip_rpc_response
+from hemera.common.utils.abi_code_utils import encode_data
+from hemera.indexer.utils.abi import function_abi_to_4byte_selector_str
logger = logging.getLogger(__name__)
diff --git a/indexer/modules/custom/all_features_value_record.py b/hemera_udf/aci_features/domains.py
similarity index 92%
rename from indexer/modules/custom/all_features_value_record.py
rename to hemera_udf/aci_features/domains.py
index 1ec2a828a..71f7228c0 100644
--- a/indexer/modules/custom/all_features_value_record.py
+++ b/hemera_udf/aci_features/domains.py
@@ -1,11 +1,11 @@
from dataclasses import asdict, dataclass
from typing import Dict, Optional
-from indexer.domain import FilterData
+from hemera.indexer.domains import Domain
@dataclass
-class AllFeatureValueRecord(FilterData):
+class AllFeatureValueRecord(Domain):
feature_id: int
block_number: int
address: str
@@ -17,7 +17,12 @@ def to_dict(self) -> Dict:
@dataclass
-class AllFeatureValueRecordUniswapV3Pool(AllFeatureValueRecord):
+class AllFeatureValueRecordTraitsActiveness(AllFeatureValueRecord):
+ pass
+
+
+@dataclass
+class AllFeatureValueRecordBlueChipHolders(AllFeatureValueRecord):
def __init__(
self,
feature_id: int,
@@ -30,7 +35,7 @@ def __init__(
@dataclass
-class AllFeatureValueRecordUniswapV3Token(AllFeatureValueRecord):
+class AllFeatureValueRecordUniswapV2Info(AllFeatureValueRecord):
def __init__(
self,
feature_id: int,
@@ -43,7 +48,7 @@ def __init__(
@dataclass
-class AllFeatureValueRecordUniswapV2Info(AllFeatureValueRecord):
+class AllFeatureValueRecordUniswapV3Pool(AllFeatureValueRecord):
def __init__(
self,
feature_id: int,
@@ -56,14 +61,7 @@ def __init__(
@dataclass
-class AllFeatureValueRecordTraitsActiveness(AllFeatureValueRecord):
- @classmethod
- def is_filter_data(cls):
- return False
-
-
-@dataclass
-class AllFeatureValueRecordBlueChipHolders(AllFeatureValueRecord):
+class AllFeatureValueRecordUniswapV3Token(AllFeatureValueRecord):
def __init__(
self,
feature_id: int,
diff --git a/indexer/modules/custom/feature_type.py b/hemera_udf/aci_features/feature_type.py
similarity index 100%
rename from indexer/modules/custom/feature_type.py
rename to hemera_udf/aci_features/feature_type.py
diff --git a/hemera_udf/address_index/__init__.py b/hemera_udf/address_index/__init__.py
new file mode 100644
index 000000000..e0ba40a2d
--- /dev/null
+++ b/hemera_udf/address_index/__init__.py
@@ -0,0 +1,42 @@
+from __future__ import annotations
+
+import packaging.version
+
+from hemera import __version__ as hemera_version
+
+__all__ = ["__version__"]
+
+__version__ = "0.1.0"
+
+from hemera.common.enumeration.entity_type import DynamicEntityTypeRegistry
+from hemera.indexer.domains.block import Block
+from hemera.indexer.domains.log import Log
+from hemera.indexer.domains.token import Token
+from hemera.indexer.domains.token_transfer import ERC20TokenTransfer, ERC721TokenTransfer, ERC1155TokenTransfer
+from hemera.indexer.domains.transaction import Transaction
+from hemera_udf.address_index.domains import *
+
+if packaging.version.parse(packaging.version.parse(hemera_version).base_version) < packaging.version.parse("1.0.0"):
+ raise RuntimeError(f"The package `hemera-modules-xxx:{__version__}` needs Hemera 1.0.0+")
+
+value = DynamicEntityTypeRegistry.register("ADDRESS_INDEX")
+DynamicEntityTypeRegistry.register_output_types(
+ value,
+ {
+ Block,
+ Transaction,
+ Log,
+ Token,
+ ERC20TokenTransfer,
+ ERC721TokenTransfer,
+ ERC1155TokenTransfer,
+ AddressNftTransfer,
+ AddressTokenHolder,
+ AddressTokenTransfer,
+ TokenAddressNftInventory,
+ AddressTransaction,
+ AddressNft1155Holder,
+ AddressContractOperation,
+ AddressInternalTransaction,
+ },
+)
diff --git a/hemera_udf/address_index/domains.py b/hemera_udf/address_index/domains.py
new file mode 100644
index 000000000..5f926a408
--- /dev/null
+++ b/hemera_udf/address_index/domains.py
@@ -0,0 +1,128 @@
+from dataclasses import dataclass
+
+from hemera.indexer.domains import Domain
+
+
+@dataclass
+class AddressContractOperation(Domain):
+ address: str
+
+ trace_from_address: str
+ contract_address: str
+
+ trace_id: str
+ block_number: int
+ transaction_index: int
+ transaction_hash: str
+ block_timestamp: int
+ block_hash: str
+
+ error: str
+ status: int
+
+ creation_code: str
+ deployed_code: str
+
+ gas: int
+ gas_used: int
+
+ trace_type: str
+ call_type: str
+
+ transaction_receipt_status: int
+
+
+@dataclass
+class AddressInternalTransaction(Domain):
+ address: str
+
+ trace_id: str
+ block_number: int
+ transaction_index: int
+ transaction_hash: str
+ block_timestamp: int
+ block_hash: str
+
+ error: str
+ status: int
+
+ input_method: str
+
+ value: int
+ gas: int
+ gas_used: int
+
+ trace_type: str
+ call_type: str
+
+ txn_type: int
+ related_address: str
+
+ transaction_receipt_status: int
+
+
+@dataclass
+class AddressNft1155Holder(Domain):
+ address: str
+ token_address: str
+ token_id: int
+ balance_of: str
+
+
+@dataclass
+class AddressNftTransfer(Domain):
+ address: str
+ block_number: int
+ log_index: int
+ transaction_hash: str
+ block_timestamp: int
+ block_hash: str
+ token_address: str
+ related_address: str
+ transfer_type: int
+ token_id: int
+ value: int
+
+
+@dataclass
+class AddressTokenHolder(Domain):
+ address: str
+ token_address: str
+ balance_of: str
+
+
+@dataclass
+class AddressTokenTransfer(Domain):
+ address: str
+ block_number: int
+ log_index: int
+ transaction_hash: str
+ block_timestamp: int
+ block_hash: str
+ token_address: str
+ related_address: str
+ transfer_type: int
+ value: int
+
+
+@dataclass
+class AddressTransaction(Domain):
+ address: str
+ block_number: int
+ transaction_index: int
+ transaction_hash: str
+ block_timestamp: int
+ block_hash: str
+ txn_type: int
+ related_address: str
+ value: int
+ transaction_fee: int
+ receipt_status: int
+ method: str
+
+
+@dataclass
+class TokenAddressNftInventory(Domain):
+ token_address: str
+ token_id: int
+ wallet_address: str
diff --git a/indexer/modules/custom/address_index/__init__.py b/hemera_udf/address_index/endpoint/__init__.py
similarity index 84%
rename from indexer/modules/custom/address_index/__init__.py
rename to hemera_udf/address_index/endpoint/__init__.py
index 7771d334b..46e2e777b 100644
--- a/indexer/modules/custom/address_index/__init__.py
+++ b/hemera_udf/address_index/endpoint/__init__.py
@@ -1,5 +1,3 @@
-from functools import wraps
-
from flask_restx.namespace import Namespace
address_profile_namespace = Namespace(
diff --git a/indexer/modules/custom/address_index/endpoint/routes.py b/hemera_udf/address_index/endpoint/routes.py
similarity index 77%
rename from indexer/modules/custom/address_index/endpoint/routes.py
rename to hemera_udf/address_index/endpoint/routes.py
index b3be9d03c..bcb7fb25a 100644
--- a/indexer/modules/custom/address_index/endpoint/routes.py
+++ b/hemera_udf/address_index/endpoint/routes.py
@@ -3,25 +3,27 @@
import flask
from flask_restx import Resource
-from api.app.cache import cache
-from common.utils.format_utils import format_to_dict
-from indexer.modules.custom.address_index import address_profile_namespace
-from indexer.modules.custom.address_index.schemas.api import (
+from hemera.api.app.cache import cache
+from hemera.common.utils.format_utils import format_to_dict
+from hemera_udf.address_index.endpoint import address_profile_namespace
+from hemera_udf.address_index.schemas.api import (
aci_score_response_model,
- address_base_info_model,
address_base_info_response_model,
address_developer_info_response_model,
validate_eth_address,
)
-from indexer.modules.custom.address_index.utils.helpers import (
+from hemera_udf.address_index.utils.helpers import (
get_address_assets,
get_address_base_info,
get_address_developer_info,
+ get_all_udf_dashboards,
+ get_all_udf_dashboards_data,
get_contract_deployed_events,
get_contract_deployer_profile,
+ get_daily_active_address,
get_wallet_address_volumes,
)
-from indexer.modules.custom.address_index.utils.score import calculate_aci_score
+from hemera_udf.address_index.utils.score import calculate_aci_score
PAGE_SIZE = 10
MAX_TRANSACTION = 500000
@@ -128,3 +130,25 @@ class ACIVolumes(Resource):
@cache.cached(timeout=360, query_string=True)
def get(self, address):
address_bytes = bytes.fromhex(address[2:])
+
+
+@address_profile_namespace.route("/v1/aci/udf_dashboards")
+class UDFDashboards(Resource):
+ @cache.cached(timeout=360, query_string=True)
+ def get(self):
+ return get_all_udf_dashboards()
+
+
+@address_profile_namespace.route("/v1/aci/udf_dashboards_data")
+class UDFDashboards(Resource):
+ @cache.cached(timeout=360, query_string=True)
+ def get(self):
+ return get_all_udf_dashboards_data()
+
+
+@address_profile_namespace.route("/v1/aci/daily_active_address")
+class DailyAddress(Resource):
+ @cache.cached(timeout=360, query_string=True)
+ def get(self):
+ time_range = flask.request.args.get("time_range", "7d")
+ return get_daily_active_address(time_range)
diff --git a/indexer/modules/custom/address_index/models/__init__.py b/hemera_udf/address_index/exporters/__init__.py
similarity index 100%
rename from indexer/modules/custom/address_index/models/__init__.py
rename to hemera_udf/address_index/exporters/__init__.py
diff --git a/indexer/exporters/hemera_address_postgres_item_exporter.py b/hemera_udf/address_index/exporters/hemera_address_postgres_item_exporter.py
similarity index 83%
rename from indexer/exporters/hemera_address_postgres_item_exporter.py
rename to hemera_udf/address_index/exporters/hemera_address_postgres_item_exporter.py
index 59a8d4531..56ed9ecde 100644
--- a/indexer/exporters/hemera_address_postgres_item_exporter.py
+++ b/hemera_udf/address_index/exporters/hemera_address_postgres_item_exporter.py
@@ -5,15 +5,11 @@
from dateutil.tz import tzlocal
from psycopg2.extras import execute_values
-from common.converter.pg_converter import domain_model_mapping
-from common.models import HemeraModel
-from common.services.hemera_postgresql_service import HemeraPostgreSQLService
-from indexer.domain.token import Token
-from indexer.exporters.base_exporter import BaseExporter, group_by_item_type
-from indexer.modules.custom.address_index.domain import *
-from indexer.modules.custom.address_index.domain.address_contract_operation import AddressContractOperation
-from indexer.modules.custom.address_index.domain.address_internal_transaction import AddressInternalTransaction
-from indexer.modules.custom.address_index.domain.address_nft_1155_holders import AddressNft1155Holder
+from hemera.common.models import HemeraModel
+from hemera.common.services.hemera_postgresql_service import HemeraPostgreSQLService
+from hemera.indexer.domains.token import Token
+from hemera.indexer.exporters.base_exporter import BaseExporter, group_by_item_type
+from hemera_udf.address_index.domains import *
logger = logging.getLogger(__name__)
@@ -38,6 +34,9 @@ def __init__(self, output, chain_id):
service = HemeraPostgreSQLService(url)
self.service = service
self.chain_id = chain_id
+ from hemera.common.converter.pg_converter import domain_model_mapping
+
+ self._domain_model_mapping = domain_model_mapping
def export_items(self, items, **kwargs):
start_time = datetime.now(tzlocal())
@@ -52,7 +51,7 @@ def export_items(self, items, **kwargs):
item_group = items_grouped_by_type.get(item_type)
if item_group:
- pg_config = domain_model_mapping[item_type]
+ pg_config = self._domain_model_mapping[item_type]
table = pg_config["table"]
do_update = pg_config["conflict_do_update"]
diff --git a/indexer/modules/custom/address_index/schemas/__init__.py b/hemera_udf/address_index/jobs/__init__.py
similarity index 100%
rename from indexer/modules/custom/address_index/schemas/__init__.py
rename to hemera_udf/address_index/jobs/__init__.py
diff --git a/indexer/modules/custom/address_index/address_index_job.py b/hemera_udf/address_index/jobs/address_index_job.py
similarity index 92%
rename from indexer/modules/custom/address_index/address_index_job.py
rename to hemera_udf/address_index/jobs/address_index_job.py
index deca4b0f5..5b2b45cb8 100644
--- a/indexer/modules/custom/address_index/address_index_job.py
+++ b/hemera_udf/address_index/jobs/address_index_job.py
@@ -3,25 +3,18 @@
from itertools import groupby
from typing import List, Union
-from common.utils.web3_utils import ZERO_ADDRESS
-from indexer.domain.contract_internal_transaction import ContractInternalTransaction
-from indexer.domain.token_id_infos import UpdateERC721TokenIdDetail
-from indexer.domain.token_transfer import ERC20TokenTransfer, ERC721TokenTransfer, ERC1155TokenTransfer
-from indexer.domain.transaction import Transaction
-from indexer.executors.batch_work_executor import BatchWorkExecutor
-from indexer.jobs.base_job import ExtensionJob
-from indexer.jobs.export_token_balances_job import extract_token_parameters
-from indexer.jobs.export_token_id_infos_job import generate_token_id_info
-from indexer.modules.custom.address_index.domain.address_contract_operation import AddressContractOperation
-from indexer.modules.custom.address_index.domain.address_internal_transaction import AddressInternalTransaction
-from indexer.modules.custom.address_index.domain.address_nft_1155_holders import AddressNft1155Holder
-from indexer.modules.custom.address_index.domain.address_nft_transfer import AddressNftTransfer
-from indexer.modules.custom.address_index.domain.address_token_holder import AddressTokenHolder
-from indexer.modules.custom.address_index.domain.address_token_transfer import AddressTokenTransfer
-from indexer.modules.custom.address_index.domain.address_transaction import AddressTransaction
-from indexer.modules.custom.address_index.domain.token_address_nft_inventory import TokenAddressNftInventory
-from indexer.utils.collection_utils import distinct_collections_by_group
-from indexer.utils.token_fetcher import TokenFetcher
+from hemera.common.utils.web3_utils import ZERO_ADDRESS
+from hemera.indexer.domains.contract_internal_transaction import ContractInternalTransaction
+from hemera.indexer.domains.token_id_infos import UpdateERC721TokenIdDetail
+from hemera.indexer.domains.token_transfer import ERC20TokenTransfer, ERC721TokenTransfer, ERC1155TokenTransfer
+from hemera.indexer.domains.transaction import Transaction
+from hemera.indexer.executors.batch_work_executor import BatchWorkExecutor
+from hemera.indexer.jobs.base_job import ExtensionJob
+from hemera.indexer.jobs.export_token_balances_job import extract_token_parameters
+from hemera.indexer.jobs.export_token_id_infos_job import generate_token_id_info
+from hemera.indexer.utils.collection_utils import distinct_collections_by_group
+from hemera.indexer.utils.token_fetcher import TokenFetcher
+from hemera_udf.address_index.domains import *
logger = logging.getLogger(__name__)
diff --git a/indexer/modules/custom/address_index/utils/__init__.py b/hemera_udf/address_index/models/__init__.py
similarity index 100%
rename from indexer/modules/custom/address_index/utils/__init__.py
rename to hemera_udf/address_index/models/__init__.py
diff --git a/indexer/modules/custom/address_index/models/address_contract_operation.py b/hemera_udf/address_index/models/address_contract_operation.py
similarity index 90%
rename from indexer/modules/custom/address_index/models/address_contract_operation.py
rename to hemera_udf/address_index/models/address_contract_operation.py
index 3e48fbab7..904acae06 100644
--- a/indexer/modules/custom/address_index/models/address_contract_operation.py
+++ b/hemera_udf/address_index/models/address_contract_operation.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, Index, desc, func
from sqlalchemy.dialects.postgresql import BYTEA, INTEGER, NUMERIC, TEXT, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.address_index.domain.address_contract_operation import AddressContractOperation
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.address_index.domains import AddressContractOperation
class AddressContractOperations(HemeraModel):
diff --git a/indexer/modules/custom/address_index/models/address_index_daily_stats.py b/hemera_udf/address_index/models/address_index_daily_stats.py
similarity index 83%
rename from indexer/modules/custom/address_index/models/address_index_daily_stats.py
rename to hemera_udf/address_index/models/address_index_daily_stats.py
index a610fda32..daf8ad64c 100644
--- a/indexer/modules/custom/address_index/models/address_index_daily_stats.py
+++ b/hemera_udf/address_index/models/address_index_daily_stats.py
@@ -1,7 +1,7 @@
from sqlalchemy import Column
from sqlalchemy.dialects.postgresql import BIGINT, BYTEA, DATE, INTEGER, NUMERIC
-from common.models import HemeraModel
+from hemera.common.models import HemeraModel
class AddressIndexDailyStats(HemeraModel):
@@ -14,9 +14,9 @@ class AddressIndexDailyStats(HemeraModel):
transaction_out_count = Column(INTEGER)
transaction_self_count = Column(INTEGER)
- transaction_in_value = Column(BIGINT)
- transaction_out_value = Column(BIGINT)
- transaction_self_value = Column(BIGINT)
+ transaction_in_value = Column(NUMERIC)
+ transaction_out_value = Column(NUMERIC)
+ transaction_self_value = Column(NUMERIC)
transaction_in_fee = Column(NUMERIC)
transaction_out_fee = Column(NUMERIC)
@@ -26,9 +26,9 @@ class AddressIndexDailyStats(HemeraModel):
internal_transaction_out_count = Column(INTEGER)
internal_transaction_self_count = Column(INTEGER)
- internal_transaction_in_value = Column(BIGINT)
- internal_transaction_out_value = Column(BIGINT)
- internal_transaction_self_value = Column(BIGINT)
+ internal_transaction_in_value = Column(NUMERIC)
+ internal_transaction_out_value = Column(NUMERIC)
+ internal_transaction_self_value = Column(NUMERIC)
erc20_transfer_in_count = Column(INTEGER)
erc20_transfer_out_count = Column(INTEGER)
diff --git a/indexer/modules/custom/address_index/models/address_index_stats.py b/hemera_udf/address_index/models/address_index_stats.py
similarity index 97%
rename from indexer/modules/custom/address_index/models/address_index_stats.py
rename to hemera_udf/address_index/models/address_index_stats.py
index 43a1815b4..2ff1e44af 100644
--- a/indexer/modules/custom/address_index/models/address_index_stats.py
+++ b/hemera_udf/address_index/models/address_index_stats.py
@@ -1,7 +1,7 @@
from sqlalchemy import Column
from sqlalchemy.dialects.postgresql import BYTEA, INTEGER, NUMERIC, VARCHAR
-from common.models import HemeraModel
+from hemera.common.models import HemeraModel
class AddressIndexStats(HemeraModel):
diff --git a/indexer/modules/custom/address_index/models/address_internal_transaciton.py b/hemera_udf/address_index/models/address_internal_transaciton.py
similarity index 90%
rename from indexer/modules/custom/address_index/models/address_internal_transaciton.py
rename to hemera_udf/address_index/models/address_internal_transaciton.py
index 1ffff36f5..a2c4c8ec5 100644
--- a/indexer/modules/custom/address_index/models/address_internal_transaciton.py
+++ b/hemera_udf/address_index/models/address_internal_transaciton.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, Index, desc, func
from sqlalchemy.dialects.postgresql import BYTEA, INTEGER, NUMERIC, SMALLINT, TEXT, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.address_index.domain.address_internal_transaction import AddressInternalTransaction
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.address_index.domains import AddressInternalTransaction
class AddressInternalTransactions(HemeraModel):
diff --git a/indexer/modules/custom/address_index/models/address_nft_1155_holders.py b/hemera_udf/address_index/models/address_nft_1155_holders.py
similarity index 86%
rename from indexer/modules/custom/address_index/models/address_nft_1155_holders.py
rename to hemera_udf/address_index/models/address_nft_1155_holders.py
index 20e42c3ab..58cc926bc 100644
--- a/indexer/modules/custom/address_index/models/address_nft_1155_holders.py
+++ b/hemera_udf/address_index/models/address_nft_1155_holders.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, Index, desc, func
from sqlalchemy.dialects.postgresql import BYTEA, NUMERIC, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.address_index.domain.address_nft_1155_holders import AddressNft1155Holder
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.address_index.domains import AddressNft1155Holder
class AddressNftTokenHolders(HemeraModel):
diff --git a/indexer/modules/custom/address_index/models/address_nft_transfers.py b/hemera_udf/address_index/models/address_nft_transfers.py
similarity index 89%
rename from indexer/modules/custom/address_index/models/address_nft_transfers.py
rename to hemera_udf/address_index/models/address_nft_transfers.py
index de2b9f684..a65dbf25f 100644
--- a/indexer/modules/custom/address_index/models/address_nft_transfers.py
+++ b/hemera_udf/address_index/models/address_nft_transfers.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, func
from sqlalchemy.dialects.postgresql import BYTEA, INTEGER, NUMERIC, SMALLINT, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.address_index.domain import AddressNftTransfer
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.address_index.domains import AddressNftTransfer
class AddressNftTransfers(HemeraModel):
diff --git a/indexer/modules/custom/address_index/models/address_token_holders.py b/hemera_udf/address_index/models/address_token_holders.py
similarity index 87%
rename from indexer/modules/custom/address_index/models/address_token_holders.py
rename to hemera_udf/address_index/models/address_token_holders.py
index 23df5b604..ac0ccd212 100644
--- a/indexer/modules/custom/address_index/models/address_token_holders.py
+++ b/hemera_udf/address_index/models/address_token_holders.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, Index, desc, func
from sqlalchemy.dialects.postgresql import BYTEA, NUMERIC, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.address_index.domain import AddressTokenHolder
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.address_index.domains import AddressTokenHolder
class AddressTokenHolders(HemeraModel):
diff --git a/indexer/modules/custom/address_index/models/address_token_transfers.py b/hemera_udf/address_index/models/address_token_transfers.py
similarity index 88%
rename from indexer/modules/custom/address_index/models/address_token_transfers.py
rename to hemera_udf/address_index/models/address_token_transfers.py
index b1c7b94c6..369230b0b 100644
--- a/indexer/modules/custom/address_index/models/address_token_transfers.py
+++ b/hemera_udf/address_index/models/address_token_transfers.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, func
from sqlalchemy.dialects.postgresql import BYTEA, INTEGER, NUMERIC, SMALLINT, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.address_index.domain import AddressTokenTransfer
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.address_index.domains import AddressTokenTransfer
class AddressTokenTransfers(HemeraModel):
diff --git a/indexer/modules/custom/address_index/models/address_transactions.py b/hemera_udf/address_index/models/address_transactions.py
similarity index 92%
rename from indexer/modules/custom/address_index/models/address_transactions.py
rename to hemera_udf/address_index/models/address_transactions.py
index e8c8897b1..cf552ec96 100644
--- a/indexer/modules/custom/address_index/models/address_transactions.py
+++ b/hemera_udf/address_index/models/address_transactions.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, Index, desc, func
from sqlalchemy.dialects.postgresql import BYTEA, INTEGER, NUMERIC, SMALLINT, TEXT, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.address_index.domain import AddressTransaction
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.address_index.domains import AddressTransaction
class AddressTransactions(HemeraModel):
diff --git a/hemera_udf/address_index/models/dashboard_daily_address.py b/hemera_udf/address_index/models/dashboard_daily_address.py
new file mode 100644
index 000000000..77e41d8db
--- /dev/null
+++ b/hemera_udf/address_index/models/dashboard_daily_address.py
@@ -0,0 +1,14 @@
+from sqlalchemy import Column, Date, PrimaryKeyConstraint
+from sqlalchemy.dialects.postgresql import INTEGER
+
+from hemera.common.models import HemeraModel
+
+
+class AFDashboardDailyAddressStats(HemeraModel):
+ __tablename__ = "af_dashboard_daily_address_stats"
+
+ block_date = Column(Date, nullable=False, primary_key=True)
+ active_addresses = Column(INTEGER, nullable=False)
+ new_addresses = Column(INTEGER)
+
+ __table_args__ = (PrimaryKeyConstraint("block_date"),)
diff --git a/hemera_udf/address_index/models/distribution_daily_stats.py b/hemera_udf/address_index/models/distribution_daily_stats.py
new file mode 100644
index 000000000..bd0594209
--- /dev/null
+++ b/hemera_udf/address_index/models/distribution_daily_stats.py
@@ -0,0 +1,16 @@
+from sqlalchemy import VARCHAR, Column, Date, Double, Numeric, PrimaryKeyConstraint
+
+from hemera.common.models import HemeraModel
+
+
+class AFDistributionDailyStats(HemeraModel):
+ __tablename__ = "af_distribution_daily_stats"
+
+ distribution_name = Column(VARCHAR, nullable=False)
+ block_date = Column(Date, nullable=False)
+ x = Column(Numeric, nullable=False)
+ value = Column(Numeric)
+ percentage = Column(Double)
+ total_value = Column(Numeric)
+
+ __table_args__ = (PrimaryKeyConstraint("distribution_name", "block_date", "x"),)
diff --git a/hemera_udf/address_index/models/metrics_distribution_daily_stats.py b/hemera_udf/address_index/models/metrics_distribution_daily_stats.py
new file mode 100644
index 000000000..f2ebdccf7
--- /dev/null
+++ b/hemera_udf/address_index/models/metrics_distribution_daily_stats.py
@@ -0,0 +1,14 @@
+from sqlalchemy import VARCHAR, Column, Date, Double, Numeric, PrimaryKeyConstraint
+
+from hemera.common.models import HemeraModel
+
+
+class AFMetricsDistributionDailyStats(HemeraModel):
+ __tablename__ = "af_metrics_distribution_daily_stats"
+
+ distribution_name = Column(VARCHAR, nullable=False)
+ block_date = Column(Date, nullable=False)
+ avg = Column(Numeric)
+ stdev = Column(Numeric)
+
+ __table_args__ = (PrimaryKeyConstraint("distribution_name", "block_date"),)
diff --git a/indexer/modules/custom/address_index/models/scheduled_metadata.py b/hemera_udf/address_index/models/scheduled_metadata.py
similarity index 89%
rename from indexer/modules/custom/address_index/models/scheduled_metadata.py
rename to hemera_udf/address_index/models/scheduled_metadata.py
index 8e0c7bc3a..e4a317c8d 100644
--- a/indexer/modules/custom/address_index/models/scheduled_metadata.py
+++ b/hemera_udf/address_index/models/scheduled_metadata.py
@@ -1,7 +1,7 @@
from sqlalchemy import Column
from sqlalchemy.dialects.postgresql import INTEGER, TIMESTAMP, VARCHAR
-from common.models import HemeraModel
+from hemera.common.models import HemeraModel
class ScheduledMetadata(HemeraModel):
diff --git a/indexer/modules/custom/address_index/models/token_address_index.py b/hemera_udf/address_index/models/token_address_index.py
similarity index 90%
rename from indexer/modules/custom/address_index/models/token_address_index.py
rename to hemera_udf/address_index/models/token_address_index.py
index c6f258e7e..dced1d7b9 100644
--- a/indexer/modules/custom/address_index/models/token_address_index.py
+++ b/hemera_udf/address_index/models/token_address_index.py
@@ -1,7 +1,7 @@
from sqlalchemy import DATE, Column, func
from sqlalchemy.dialects.postgresql import BIGINT, BYTEA, INTEGER, NUMERIC, TIMESTAMP
-from common.models import HemeraModel
+from hemera.common.models import HemeraModel
class TokenAddressIndexStats(HemeraModel):
diff --git a/indexer/modules/custom/address_index/models/token_address_index_daily_stats.py b/hemera_udf/address_index/models/token_address_index_daily_stats.py
similarity index 90%
rename from indexer/modules/custom/address_index/models/token_address_index_daily_stats.py
rename to hemera_udf/address_index/models/token_address_index_daily_stats.py
index 5e02d8795..0ae74f2d2 100644
--- a/indexer/modules/custom/address_index/models/token_address_index_daily_stats.py
+++ b/hemera_udf/address_index/models/token_address_index_daily_stats.py
@@ -1,7 +1,7 @@
from sqlalchemy import Column, func
from sqlalchemy.dialects.postgresql import BYTEA, INTEGER, TIMESTAMP
-from common.models import HemeraModel
+from hemera.common.models import HemeraModel
class TokenAddressIndexStats(HemeraModel):
diff --git a/indexer/modules/custom/address_index/models/token_address_nft_inventories.py b/hemera_udf/address_index/models/token_address_nft_inventories.py
similarity index 88%
rename from indexer/modules/custom/address_index/models/token_address_nft_inventories.py
rename to hemera_udf/address_index/models/token_address_nft_inventories.py
index 333108710..e0df281df 100644
--- a/indexer/modules/custom/address_index/models/token_address_nft_inventories.py
+++ b/hemera_udf/address_index/models/token_address_nft_inventories.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, Index, PrimaryKeyConstraint, func
from sqlalchemy.dialects.postgresql import BYTEA, NUMERIC, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.address_index.domain import TokenAddressNftInventory
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.address_index.domains import TokenAddressNftInventory
class TokenAddressNftInventories(HemeraModel):
diff --git a/indexer/modules/custom/blue_chip/__init__.py b/hemera_udf/address_index/schemas/__init__.py
similarity index 100%
rename from indexer/modules/custom/blue_chip/__init__.py
rename to hemera_udf/address_index/schemas/__init__.py
diff --git a/indexer/modules/custom/address_index/schemas/api.py b/hemera_udf/address_index/schemas/api.py
similarity index 99%
rename from indexer/modules/custom/address_index/schemas/api.py
rename to hemera_udf/address_index/schemas/api.py
index 42b80d571..1aaaf30a4 100644
--- a/indexer/modules/custom/address_index/schemas/api.py
+++ b/hemera_udf/address_index/schemas/api.py
@@ -3,7 +3,7 @@
from flask import jsonify
from flask_restx import fields
-from api.app.address import address_features_namespace
+from hemera.api.app.address import address_features_namespace
def validate_eth_address(f):
diff --git a/indexer/modules/custom/blue_chip/domain/__init__.py b/hemera_udf/address_index/utils/__init__.py
similarity index 100%
rename from indexer/modules/custom/blue_chip/domain/__init__.py
rename to hemera_udf/address_index/utils/__init__.py
diff --git a/indexer/modules/custom/address_index/utils/helpers.py b/hemera_udf/address_index/utils/helpers.py
similarity index 78%
rename from indexer/modules/custom/address_index/utils/helpers.py
rename to hemera_udf/address_index/utils/helpers.py
index 9816ac8c9..4b8f3f65c 100644
--- a/indexer/modules/custom/address_index/utils/helpers.py
+++ b/hemera_udf/address_index/utils/helpers.py
@@ -1,43 +1,52 @@
-import binascii
import copy
+import math
+import re
from datetime import date, datetime, timedelta
-from select import select
-from typing import Any, Dict, List, Optional, Tuple, Union
-
-from sqlalchemy import and_, func
-
-from api.app.address.features import register_feature
-from api.app.address.models import AddressBaseProfile
-from api.app.contract.contract_verify import get_names_from_method_or_topic_list
-from api.app.db_service.contracts import get_contracts_by_addresses
-from api.app.db_service.wallet_addresses import get_token_txn_cnt_by_address
-from api.app.utils.fill_info import fill_address_display_to_transactions, fill_is_contract_to_transactions
-from api.app.utils.format_utils import format_coin_value, format_transaction
-from api.app.utils.token_utils import get_coin_prices, get_latest_coin_prices, get_token_price
-from api.app.utils.web3_utils import get_balance
-from common.models import db
-from common.models.contracts import Contracts
-from common.models.scheduled_metadata import ScheduledMetadata
-from common.models.tokens import Tokens
-from common.utils.db_utils import app_config, build_entities
-from common.utils.exception_control import APIError
-from common.utils.format_utils import as_dict, bytes_to_hex_str, format_to_dict, format_value_for_json, hex_str_to_bytes
-from common.utils.web3_utils import ZERO_ADDRESS
-from enumeration.token_type import TokenType
-from indexer.modules.custom.address_index.address_index_job import (
+from typing import Any, Dict, List, Optional, Union
+
+from sqlalchemy import and_, distinct, func, or_
+
+from hemera.api.app.address.features import register_feature
+from hemera.api.app.address.models import AddressBaseProfile
+from hemera.api.app.contract.contract_verify import get_names_from_method_or_topic_list
+from hemera.api.app.db_service.contracts import get_contracts_by_addresses
+from hemera.api.app.db_service.wallet_addresses import get_token_txn_cnt_by_address
+from hemera.api.app.utils.fill_info import fill_address_display_to_transactions, fill_is_contract_to_transactions
+from hemera.api.app.utils.format_utils import format_coin_value
+from hemera.api.app.utils.token_utils import get_coin_prices, get_latest_coin_prices, get_token_price
+from hemera.api.app.utils.web3_utils import get_balance
+from hemera.common.enumeration.token_type import TokenType
+from hemera.common.models import db
+from hemera.common.models.contracts import Contracts
+from hemera.common.models.scheduled_metadata import ScheduledMetadata
+from hemera.common.models.tokens import Tokens
+from hemera.common.utils.db_utils import app_config, build_entities
+from hemera.common.utils.exception_control import APIError
+from hemera.common.utils.format_utils import (
+ as_dict,
+ bytes_to_hex_str,
+ format_to_dict,
+ format_value_for_json,
+ hex_str_to_bytes,
+)
+from hemera.common.utils.web3_utils import ZERO_ADDRESS
+from hemera_udf.address_index.jobs.address_index_job import (
AddressTokenTransferType,
AddressTransactionType,
InternalTransactionType,
)
-from indexer.modules.custom.address_index.models.address_contract_operation import AddressContractOperations
-from indexer.modules.custom.address_index.models.address_index_daily_stats import AddressIndexDailyStats
-from indexer.modules.custom.address_index.models.address_internal_transaciton import AddressInternalTransactions
-from indexer.modules.custom.address_index.models.address_nft_1155_holders import AddressNftTokenHolders
-from indexer.modules.custom.address_index.models.address_token_holders import AddressTokenHolders
-from indexer.modules.custom.address_index.models.address_token_transfers import AddressTokenTransfers
-from indexer.modules.custom.address_index.models.address_transactions import AddressTransactions
-from indexer.modules.custom.address_index.models.token_address_nft_inventories import TokenAddressNftInventories
-from indexer.modules.custom.address_index.schemas.api import address_base_info_model, filter_and_fill_dict_by_model
+from hemera_udf.address_index.models.address_contract_operation import AddressContractOperations
+from hemera_udf.address_index.models.address_index_daily_stats import AddressIndexDailyStats
+from hemera_udf.address_index.models.address_internal_transaciton import AddressInternalTransactions
+from hemera_udf.address_index.models.address_nft_1155_holders import AddressNftTokenHolders
+from hemera_udf.address_index.models.address_token_holders import AddressTokenHolders
+from hemera_udf.address_index.models.address_token_transfers import AddressTokenTransfers
+from hemera_udf.address_index.models.address_transactions import AddressTransactions
+from hemera_udf.address_index.models.dashboard_daily_address import AFDashboardDailyAddressStats
+from hemera_udf.address_index.models.distribution_daily_stats import AFDistributionDailyStats
+from hemera_udf.address_index.models.metrics_distribution_daily_stats import AFMetricsDistributionDailyStats
+from hemera_udf.address_index.models.token_address_nft_inventories import TokenAddressNftInventories
+from hemera_udf.address_index.schemas.api import address_base_info_model, filter_and_fill_dict_by_model
PAGE_SIZE = 10
MAX_TRANSACTION = 500000
@@ -1056,3 +1065,236 @@ def parse_address_transactions(transactions: list[AddressTransactions]):
).title()
return transaction_list
+
+
+time_ranges = {
+ "1d": lambda now: now - timedelta(days=1),
+ "7d": lambda now: now - timedelta(days=7),
+ "30d": lambda now: now - timedelta(days=30),
+ "6m": lambda now: now - timedelta(days=180),
+ "YTD": lambda now: datetime(now.year, 1, 1),
+ "1y": lambda now: now - timedelta(days=365),
+}
+
+
+def get_daily_active_address(time_range: str):
+ today = date.today()
+
+ start_date = time_ranges[time_range](today)
+ result = (
+ db.session.query(AFDashboardDailyAddressStats)
+ .filter(AFDashboardDailyAddressStats.block_date <= today, AFDashboardDailyAddressStats.block_date >= start_date)
+ .order_by(AFDashboardDailyAddressStats.block_date)
+ .all()
+ )
+ data = [
+ {
+ "block_date": record.block_date.isoformat(),
+ "active_addresses": record.active_addresses,
+ "new_addresses": record.new_addresses,
+ }
+ for record in result
+ ]
+
+ return {"time_range": time_range, "data": data}
+
+
+def get_all_udf_dashboards():
+ all_udf_dashboards = []
+ query = db.session.query(distinct(AFDistributionDailyStats.distribution_name)).order_by(
+ AFDistributionDailyStats.distribution_name
+ )
+ result = query.all()
+
+ distinct_distribution_names = [row[0] for row in result]
+
+ return distinct_distribution_names
+
+
+def fetch_and_group_metrics(today, week_ago, month_ago):
+ """Fetch metrics and group them by distribution name and date."""
+ metrics = (
+ db.session.query(AFMetricsDistributionDailyStats)
+ .filter(
+ or_(
+ AFMetricsDistributionDailyStats.block_date == today,
+ AFMetricsDistributionDailyStats.block_date == week_ago,
+ AFMetricsDistributionDailyStats.block_date == month_ago,
+ )
+ )
+ .order_by(AFMetricsDistributionDailyStats.distribution_name, AFMetricsDistributionDailyStats.block_date)
+ .all()
+ )
+
+ metrics_dic = {}
+ for metric in metrics:
+ metrics_dic.setdefault(metric.distribution_name, {})[metric.block_date] = metric
+ return metrics_dic
+
+
+def populate_data_for_date(res, row, date_index, metrics_dic):
+ """Populate data for a specific date index."""
+ target_metrics = metrics_dic.get(row.distribution_name, {}).get(row.block_date)
+ if target_metrics:
+ res[row.distribution_name]["data"][date_index]["avg"] = float(target_metrics.avg)
+ res[row.distribution_name]["data"][date_index]["stdev"] = float(target_metrics.stdev)
+ res[row.distribution_name]["data"][date_index]["actual_date"] = row.block_date.isoformat()
+ res[row.distribution_name]["data"][date_index]["data"].append({"value": float(row.value), "label": float(row.x)})
+
+
+def get_all_udf_dashboards_data():
+ today = date.today() - timedelta(days=1)
+ week_ago = today - timedelta(days=7)
+ month_ago = today - timedelta(days=30)
+
+ # Fetch main data and metrics
+ result = (
+ db.session.query(AFDistributionDailyStats)
+ .filter(
+ AFDistributionDailyStats.x != 0.0,
+ or_(
+ AFDistributionDailyStats.block_date == today,
+ AFDistributionDailyStats.block_date == week_ago,
+ AFDistributionDailyStats.block_date == month_ago,
+ ),
+ )
+ .order_by(
+ AFDistributionDailyStats.distribution_name,
+ AFDistributionDailyStats.block_date,
+ AFDistributionDailyStats.x,
+ )
+ .all()
+ )
+ metrics_dic = fetch_and_group_metrics(today, week_ago, month_ago)
+
+ # Initialize result structure
+ all_distribution_names = get_all_udf_dashboards()
+ res = {
+ name: {
+ "name": name,
+ "chart_type": "",
+ "data": [
+ {"type": "as_of_today", "avg": "", "stdev": "", "actual_date": "", "data": []},
+ {"type": "as_of_a_week_ago", "avg": "", "stdev": "", "actual_date": "", "data": []},
+ {"type": "as_of_a_month_ago", "avg": "", "stdev": "", "actual_date": "", "data": []},
+ ],
+ }
+ for name in all_distribution_names
+ }
+
+ date_mapping = {today: 0, week_ago: 1, month_ago: 2}
+ for row in result:
+ if (
+ row.distribution_name in {"distribution_job_ens_holdings_udf", "distribution_job_ens_resolves_udf"}
+ and float(row.x) == 100.0
+ ):
+ continue
+ date_index = date_mapping.get(row.block_date)
+ if date_index is not None:
+ populate_data_for_date(res, row, date_index, metrics_dic)
+
+ # Handle missing data
+ for distribution_name, distribution_data in res.items():
+ for i, (date_ref, date_type) in enumerate(
+ [(today, "as_of_today"), (week_ago, "as_of_a_week_ago"), (month_ago, "as_of_a_month_ago")]
+ ):
+ if not distribution_data["data"][i]["data"]:
+ actual_date, data = get_best_match_data(distribution_name, date_ref)
+ avg, stdev = get_distribution_date_metrics(distribution_name, actual_date)
+ distribution_data["data"][i].update(
+ {
+ "avg": avg,
+ "stdev": stdev,
+ "actual_date": actual_date.isoformat(),
+ "data": data,
+ }
+ )
+
+ # Determine chart type
+ chart_type = (
+ "log"
+ if any(check_logarithmic_pattern(distribution_name, distribution_data["data"][i]["data"]) for i in range(3))
+ else "value"
+ )
+ distribution_data["chart_type"] = chart_type
+ if distribution_name in ("distribution_age_daily", "distribution_tx_daily", "distribution_deployed_daily"):
+ res[distribution_name]["data"] = res[distribution_name]["data"][0:1]
+
+ return res
+
+
+def get_best_match_data(distribution_name: str, target_date: date):
+ """
+ Find the nearest data for a given distribution and target_date.
+ """
+ closest_block_date = (
+ db.session.query(AFDistributionDailyStats.block_date)
+ .filter(AFDistributionDailyStats.distribution_name == distribution_name)
+ .order_by(
+ func.abs(
+ func.date_part("epoch", AFDistributionDailyStats.block_date) - func.date_part("epoch", target_date)
+ )
+ )
+ .limit(1)
+ .scalar()
+ )
+
+ if not closest_block_date:
+ return []
+
+ result = (
+ db.session.query(AFDistributionDailyStats)
+ .filter(
+ and_(
+ AFDistributionDailyStats.block_date == closest_block_date,
+ AFDistributionDailyStats.distribution_name == distribution_name,
+ AFDistributionDailyStats.x != 0.0,
+ )
+ )
+ .order_by(AFDistributionDailyStats.x)
+ .all()
+ )
+
+ data = [{"value": float(record.value), "label": float(record.x)} for record in result]
+
+ return closest_block_date, data
+
+
+def get_distribution_date_metrics(distribution_name: str, block_date: date):
+ row = (
+ db.session.query(AFMetricsDistributionDailyStats)
+ .filter(
+ AFMetricsDistributionDailyStats.distribution_name == distribution_name,
+ AFMetricsDistributionDailyStats.block_date == block_date,
+ )
+ .order_by(AFMetricsDistributionDailyStats.distribution_name, AFMetricsDistributionDailyStats.block_date)
+ .first()
+ )
+ if row:
+ return float(row.avg), float(row.stdev)
+ return "", ""
+
+
+def is_logarithmic(labels):
+ if len(labels) < 3:
+ return False
+ differences = []
+ for i in range(1, len(labels)):
+ if labels[i - 1] <= 0 or labels[i] <= 0:
+ return False
+ differences.append(math.log(labels[i]) - math.log(labels[i - 1]))
+
+ threshold = 1e-6
+ return all(abs(d - differences[0]) < threshold for d in differences)
+
+
+def check_logarithmic_pattern(distribution_name, data):
+ log_chart_type = {
+ "distribution_job_eigen_layer_udf",
+ "distribution_job_aave2_supply_udf",
+ "distribution_job_aave2_borrow_udf",
+ }
+ if distribution_name in log_chart_type:
+ return True
+ labels = [item["label"] for item in data]
+ return is_logarithmic(labels)
diff --git a/indexer/modules/custom/address_index/utils/score.py b/hemera_udf/address_index/utils/score.py
similarity index 100%
rename from indexer/modules/custom/address_index/utils/score.py
rename to hemera_udf/address_index/utils/score.py
diff --git a/hemera_udf/blue_chip/__init__.py b/hemera_udf/blue_chip/__init__.py
new file mode 100644
index 000000000..3ca9830a7
--- /dev/null
+++ b/hemera_udf/blue_chip/__init__.py
@@ -0,0 +1,38 @@
+from __future__ import annotations
+
+import packaging.version
+
+from hemera import __version__ as hemera_version
+
+__all__ = ["__version__"]
+
+__version__ = "0.1.0"
+
+from hemera.common.enumeration.entity_type import DynamicEntityTypeRegistry
+from hemera.indexer.domains.block import Block
+from hemera.indexer.domains.current_token_balance import CurrentTokenBalance
+from hemera.indexer.domains.token import Token, UpdateToken
+from hemera.indexer.domains.token_balance import TokenBalance
+from hemera.indexer.domains.token_transfer import ERC721TokenTransfer
+from hemera.indexer.domains.transaction import Transaction
+from hemera_udf.aci_features.domains import AllFeatureValueRecordBlueChipHolders
+from hemera_udf.blue_chip.domains import BlueChipHolder
+
+if packaging.version.parse(packaging.version.parse(hemera_version).base_version) < packaging.version.parse("1.0.0"):
+ raise RuntimeError(f"The package `hemera-modules-address-index:{__version__}` needs Hemera 1.0.0+")
+
+value = DynamicEntityTypeRegistry.register("BLUE_CHIP")
+DynamicEntityTypeRegistry.register_output_types(
+ value,
+ {
+ Block,
+ Transaction,
+ ERC721TokenTransfer,
+ Token,
+ UpdateToken,
+ TokenBalance,
+ CurrentTokenBalance,
+ AllFeatureValueRecordBlueChipHolders,
+ BlueChipHolder,
+ },
+)
diff --git a/indexer/modules/custom/blue_chip/constants.py b/hemera_udf/blue_chip/constants.py
similarity index 100%
rename from indexer/modules/custom/blue_chip/constants.py
rename to hemera_udf/blue_chip/constants.py
diff --git a/indexer/modules/custom/blue_chip/domain/feature_blue_chip.py b/hemera_udf/blue_chip/domains.py
similarity index 71%
rename from indexer/modules/custom/blue_chip/domain/feature_blue_chip.py
rename to hemera_udf/blue_chip/domains.py
index 61165cca4..9cb667d58 100644
--- a/indexer/modules/custom/blue_chip/domain/feature_blue_chip.py
+++ b/hemera_udf/blue_chip/domains.py
@@ -1,10 +1,10 @@
from dataclasses import dataclass
-from indexer.domain import FilterData
+from hemera.indexer.domains import Domain
@dataclass
-class BlueChipHolder(FilterData):
+class BlueChipHolder(Domain):
wallet_address: str
hold_detail: dict
current_count: int
diff --git a/indexer/modules/custom/blue_chip/models/__init__.py b/hemera_udf/blue_chip/jobs/__init__.py
similarity index 100%
rename from indexer/modules/custom/blue_chip/models/__init__.py
rename to hemera_udf/blue_chip/jobs/__init__.py
diff --git a/indexer/modules/custom/blue_chip/export_blue_chip_job.py b/hemera_udf/blue_chip/jobs/export_blue_chip_job.py
similarity index 88%
rename from indexer/modules/custom/blue_chip/export_blue_chip_job.py
rename to hemera_udf/blue_chip/jobs/export_blue_chip_job.py
index 4aebcb3c4..1424e0e51 100644
--- a/indexer/modules/custom/blue_chip/export_blue_chip_job.py
+++ b/hemera_udf/blue_chip/jobs/export_blue_chip_job.py
@@ -1,17 +1,17 @@
import logging
from collections import defaultdict
-from common.utils.format_utils import bytes_to_hex_str
-from indexer.domain.block_ts_mapper import BlockTsMapper
-from indexer.domain.token_balance import TokenBalance
-from indexer.executors.batch_work_executor import BatchWorkExecutor
-from indexer.jobs import FilterTransactionDataJob
-from indexer.modules.custom.all_features_value_record import AllFeatureValueRecordBlueChipHolders
-from indexer.modules.custom.blue_chip import constants
-from indexer.modules.custom.blue_chip.domain.feature_blue_chip import BlueChipHolder
-from indexer.modules.custom.blue_chip.models.feature_blue_chip_holders import FeatureBlueChipHolders
-from indexer.modules.custom.feature_type import FeatureType
-from indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera.common.utils.format_utils import bytes_to_hex_str
+from hemera.indexer.domains.block_ts_mapper import BlockTsMapper
+from hemera.indexer.domains.token_balance import TokenBalance
+from hemera.indexer.executors.batch_work_executor import BatchWorkExecutor
+from hemera.indexer.jobs import FilterTransactionDataJob
+from hemera.indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera_udf.aci_features.domains import AllFeatureValueRecordBlueChipHolders
+from hemera_udf.aci_features.feature_type import FeatureType
+from hemera_udf.blue_chip import constants
+from hemera_udf.blue_chip.domains import BlueChipHolder
+from hemera_udf.blue_chip.models.feature_blue_chip_holders import FeatureBlueChipHolders
logger = logging.getLogger(__name__)
FEATURE_ID = FeatureType.BLUE_CHIP_HOLDING.value
diff --git a/indexer/modules/custom/cyber_id/__init__.py b/hemera_udf/blue_chip/models/__init__.py
similarity index 100%
rename from indexer/modules/custom/cyber_id/__init__.py
rename to hemera_udf/blue_chip/models/__init__.py
diff --git a/indexer/modules/custom/blue_chip/models/feature_blue_chip_holders.py b/hemera_udf/blue_chip/models/feature_blue_chip_holders.py
similarity index 85%
rename from indexer/modules/custom/blue_chip/models/feature_blue_chip_holders.py
rename to hemera_udf/blue_chip/models/feature_blue_chip_holders.py
index 768a3f396..84ab5ad2c 100644
--- a/indexer/modules/custom/blue_chip/models/feature_blue_chip_holders.py
+++ b/hemera_udf/blue_chip/models/feature_blue_chip_holders.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, func
from sqlalchemy.dialects.postgresql import BIGINT, BYTEA, JSONB, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.blue_chip.domain.feature_blue_chip import BlueChipHolder
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.blue_chip.domains import BlueChipHolder
class FeatureBlueChipHolders(HemeraModel):
diff --git a/indexer/modules/bridge/README.md b/hemera_udf/bridge/README.md
similarity index 100%
rename from indexer/modules/bridge/README.md
rename to hemera_udf/bridge/README.md
diff --git a/hemera_udf/bridge/__init__.py b/hemera_udf/bridge/__init__.py
new file mode 100644
index 000000000..4632ff849
--- /dev/null
+++ b/hemera_udf/bridge/__init__.py
@@ -0,0 +1,12 @@
+from __future__ import annotations
+
+import packaging.version
+
+from hemera import __version__ as hemera_version
+
+__all__ = ["__version__"]
+
+__version__ = "0.1.0"
+
+if packaging.version.parse(packaging.version.parse(hemera_version).base_version) < packaging.version.parse("1.0.0"):
+ raise RuntimeError(f"The package `hemera-modules-xxx:{__version__}` needs Hemera 1.0.0+")
diff --git a/indexer/modules/custom/cyber_id/domains/__init__.py b/hemera_udf/bridge/arbitrum/__init__.py
similarity index 100%
rename from indexer/modules/custom/cyber_id/domains/__init__.py
rename to hemera_udf/bridge/arbitrum/__init__.py
diff --git a/indexer/modules/bridge/arbitrum/arb_bridge_on_l1_job.py b/hemera_udf/bridge/arbitrum/arb_bridge_on_l1_job.py
similarity index 94%
rename from indexer/modules/bridge/arbitrum/arb_bridge_on_l1_job.py
rename to hemera_udf/bridge/arbitrum/arb_bridge_on_l1_job.py
index 3495d7fe3..13815cfb8 100644
--- a/indexer/modules/bridge/arbitrum/arb_bridge_on_l1_job.py
+++ b/hemera_udf/bridge/arbitrum/arb_bridge_on_l1_job.py
@@ -1,11 +1,12 @@
import logging
-from indexer.domain.transaction import Transaction
-from indexer.jobs import FilterTransactionDataJob
-from indexer.modules.bridge.arbitrum.arb_parser import *
-from indexer.modules.bridge.arbitrum.arb_rlp import calculate_deposit_tx_id, calculate_submit_retryable_id
-from indexer.modules.bridge.domain.arbitrum import ArbitrumL1ToL2TransactionOnL1, ArbitrumL2ToL1TransactionOnL1
-from indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera.indexer.domains.transaction import Transaction
+from hemera.indexer.jobs import FilterTransactionDataJob
+from hemera.indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera_udf.bridge.arbitrum.arb_parser import *
+from hemera_udf.bridge.arbitrum.arb_parser import parse_outbound_transfer_function
+from hemera_udf.bridge.arbitrum.arb_rlp import calculate_deposit_tx_id, calculate_submit_retryable_id
+from hemera_udf.bridge.domains.arbitrum import ArbitrumL1ToL2TransactionOnL1, ArbitrumL2ToL1TransactionOnL1
logger = logging.getLogger(__name__)
diff --git a/indexer/modules/bridge/arbitrum/arb_bridge_on_l2_job.py b/hemera_udf/bridge/arbitrum/arb_bridge_on_l2_job.py
similarity index 89%
rename from indexer/modules/bridge/arbitrum/arb_bridge_on_l2_job.py
rename to hemera_udf/bridge/arbitrum/arb_bridge_on_l2_job.py
index ad56785c9..ea6bd0f76 100644
--- a/indexer/modules/bridge/arbitrum/arb_bridge_on_l2_job.py
+++ b/hemera_udf/bridge/arbitrum/arb_bridge_on_l2_job.py
@@ -1,8 +1,8 @@
-from indexer.domain.transaction import Transaction
-from indexer.jobs import FilterTransactionDataJob
-from indexer.modules.bridge.arbitrum.arb_parser import *
-from indexer.modules.bridge.domain.arbitrum import ArbitrumL1ToL2TransactionOnL2, ArbitrumL2ToL1TransactionOnL2
-from indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera.indexer.domains.transaction import Transaction
+from hemera.indexer.jobs import FilterTransactionDataJob
+from hemera.indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera_udf.bridge.arbitrum.arb_parser import *
+from hemera_udf.bridge.domains.arbitrum import ArbitrumL1ToL2TransactionOnL2, ArbitrumL2ToL1TransactionOnL2
class ArbitrumBridgeOnL2Job(FilterTransactionDataJob):
diff --git a/indexer/modules/bridge/arbitrum/arb_conf.py b/hemera_udf/bridge/arbitrum/arb_conf.py
similarity index 100%
rename from indexer/modules/bridge/arbitrum/arb_conf.py
rename to hemera_udf/bridge/arbitrum/arb_conf.py
diff --git a/indexer/modules/bridge/arbitrum/arb_network.py b/hemera_udf/bridge/arbitrum/arb_network.py
similarity index 100%
rename from indexer/modules/bridge/arbitrum/arb_network.py
rename to hemera_udf/bridge/arbitrum/arb_network.py
diff --git a/indexer/modules/bridge/arbitrum/arb_parser.py b/hemera_udf/bridge/arbitrum/arb_parser.py
similarity index 99%
rename from indexer/modules/bridge/arbitrum/arb_parser.py
rename to hemera_udf/bridge/arbitrum/arb_parser.py
index 2464cd9bc..11ddf6191 100644
--- a/indexer/modules/bridge/arbitrum/arb_parser.py
+++ b/hemera_udf/bridge/arbitrum/arb_parser.py
@@ -13,9 +13,10 @@
from web3._utils.contracts import decode_transaction_data
from web3.types import ABIEvent, ABIFunction
-from common.utils.abi_code_utils import decode_log
-from indexer.modules.bridge.arbitrum.arb_network import Network
-from indexer.modules.bridge.domain.arbitrum import (
+from hemera.common.utils.abi_code_utils import decode_log
+from hemera.indexer.utils.abi import event_log_abi_to_topic, function_abi_to_4byte_selector_str
+from hemera_udf.bridge.arbitrum.arb_network import Network
+from hemera_udf.bridge.domains.arbitrum import (
ArbitrumStateBatchConfirmed,
ArbitrumStateBatchCreated,
ArbitrumTransactionBatch,
@@ -24,7 +25,6 @@
TicketCreatedData,
TransactionToken,
)
-from indexer.utils.abi import event_log_abi_to_topic, function_abi_to_4byte_selector_str
MESSAGE_DELIVERED_EVENT = cast(
ABIEvent,
diff --git a/indexer/modules/bridge/arbitrum/arb_rlp.py b/hemera_udf/bridge/arbitrum/arb_rlp.py
similarity index 96%
rename from indexer/modules/bridge/arbitrum/arb_rlp.py
rename to hemera_udf/bridge/arbitrum/arb_rlp.py
index d6b3c05f2..a1c5ddbe8 100644
--- a/indexer/modules/bridge/arbitrum/arb_rlp.py
+++ b/hemera_udf/bridge/arbitrum/arb_rlp.py
@@ -8,7 +8,7 @@
import rlp
from web3 import Web3
-from common.utils.format_utils import hex_str_to_bytes
+from hemera.common.utils.format_utils import hex_str_to_bytes
def convert_int_bytes(value, length):
diff --git a/indexer/modules/custom/cyber_id/models/__init__.py b/hemera_udf/bridge/bedrock/__init__.py
similarity index 100%
rename from indexer/modules/custom/cyber_id/models/__init__.py
rename to hemera_udf/bridge/bedrock/__init__.py
diff --git a/indexer/modules/bridge/bedrock/bedrock_bridge_on_l1_job.py b/hemera_udf/bridge/bedrock/bedrock_bridge_on_l1_job.py
similarity index 95%
rename from indexer/modules/bridge/bedrock/bedrock_bridge_on_l1_job.py
rename to hemera_udf/bridge/bedrock/bedrock_bridge_on_l1_job.py
index 6095283d7..1d1e924ab 100644
--- a/indexer/modules/bridge/bedrock/bedrock_bridge_on_l1_job.py
+++ b/hemera_udf/bridge/bedrock/bedrock_bridge_on_l1_job.py
@@ -1,15 +1,15 @@
import logging
-from indexer.domain.transaction import Transaction
-from indexer.jobs import FilterTransactionDataJob
-from indexer.modules.bridge.bedrock.parser.bedrock_bridge_parser import (
+from hemera.indexer.domains.transaction import Transaction
+from hemera.indexer.jobs import FilterTransactionDataJob
+from hemera.indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera_udf.bridge.bedrock.parser.bedrock_bridge_parser import (
BEDROCK_EVENT_ABI_SIGNATURE_MAPPING,
parse_propose_l2_output,
parse_relayed_message,
parse_transaction_deposited_event,
)
-from indexer.modules.bridge.domain.op_bedrock import *
-from indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera_udf.bridge.domains.op_bedrock import *
logger = logging.getLogger(__name__)
diff --git a/indexer/modules/bridge/bedrock/bedrock_bridge_on_l2_job.py b/hemera_udf/bridge/bedrock/bedrock_bridge_on_l2_job.py
similarity index 90%
rename from indexer/modules/bridge/bedrock/bedrock_bridge_on_l2_job.py
rename to hemera_udf/bridge/bedrock/bedrock_bridge_on_l2_job.py
index a7204b09a..57b489994 100644
--- a/indexer/modules/bridge/bedrock/bedrock_bridge_on_l2_job.py
+++ b/hemera_udf/bridge/bedrock/bedrock_bridge_on_l2_job.py
@@ -1,15 +1,15 @@
import logging
-from common.utils.exception_control import FastShutdownError
-from indexer.domain.transaction import Transaction
-from indexer.jobs import FilterTransactionDataJob
-from indexer.modules.bridge.bedrock.parser.bedrock_bridge_parser import (
+from hemera.common.utils.exception_control import FastShutdownError
+from hemera.indexer.domains.transaction import Transaction
+from hemera.indexer.jobs import FilterTransactionDataJob
+from hemera.indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera_udf.bridge.bedrock.parser.bedrock_bridge_parser import (
BEDROCK_EVENT_ABI_SIGNATURE_MAPPING,
parse_message_passed_event,
parse_relayed_message,
)
-from indexer.modules.bridge.domain.op_bedrock import *
-from indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera_udf.bridge.domains.op_bedrock import *
logger = logging.getLogger(__name__)
diff --git a/indexer/modules/custom/day_mining/__init__.py b/hemera_udf/bridge/bedrock/parser/__init__.py
similarity index 100%
rename from indexer/modules/custom/day_mining/__init__.py
rename to hemera_udf/bridge/bedrock/parser/__init__.py
diff --git a/indexer/modules/bridge/bedrock/parser/bedrock_bridge_parser.py b/hemera_udf/bridge/bedrock/parser/bedrock_bridge_parser.py
similarity index 96%
rename from indexer/modules/bridge/bedrock/parser/bedrock_bridge_parser.py
rename to hemera_udf/bridge/bedrock/parser/bedrock_bridge_parser.py
index 67a83dc67..3b980aa9c 100644
--- a/indexer/modules/bridge/bedrock/parser/bedrock_bridge_parser.py
+++ b/hemera_udf/bridge/bedrock/parser/bedrock_bridge_parser.py
@@ -6,24 +6,24 @@
from web3.auto import w3
from web3.types import ABIEvent, ABIFunction
-from common.utils.abi_code_utils import decode_log
-from common.utils.format_utils import bytes_to_hex_str
-from indexer.domain.transaction import Transaction
-from indexer.modules.bridge.bedrock.parser.function_parser import (
+from hemera.common.utils.abi_code_utils import decode_log
+from hemera.common.utils.format_utils import bytes_to_hex_str
+from hemera.indexer.domains.transaction import Transaction
+from hemera.indexer.utils.abi import event_log_abi_to_topic
+from hemera_udf.bridge.bedrock.parser.function_parser import (
BedrockBridgeParser,
BedRockFunctionCallType,
BridgeRemoteFunctionCallInfo,
)
-from indexer.modules.bridge.bedrock.parser.function_parser.finalize_bridge_erc20 import FINALIZE_BRIDGE_ERC20_DECODER
-from indexer.modules.bridge.bedrock.parser.function_parser.finalize_bridge_erc721 import FINALIZE_BRIDGE_ERC721_DECODER
-from indexer.modules.bridge.bedrock.parser.function_parser.finalize_bridge_eth import FINALIZE_BRIDGE_ETH_DECODER
-from indexer.modules.bridge.bridge_utils import (
+from hemera_udf.bridge.bedrock.parser.function_parser.finalize_bridge_erc20 import FINALIZE_BRIDGE_ERC20_DECODER
+from hemera_udf.bridge.bedrock.parser.function_parser.finalize_bridge_erc721 import FINALIZE_BRIDGE_ERC721_DECODER
+from hemera_udf.bridge.bedrock.parser.function_parser.finalize_bridge_eth import FINALIZE_BRIDGE_ETH_DECODER
+from hemera_udf.bridge.bridge_utils import (
deposit_event_to_op_bedrock_transaction,
get_version_and_index_from_nonce,
unmarshal_deposit_version0,
unmarshal_deposit_version1,
)
-from indexer.utils.abi import event_log_abi_to_topic
bedrockBridgeParser = BedrockBridgeParser(
[
diff --git a/indexer/modules/bridge/bedrock/parser/function_parser/__init__.py b/hemera_udf/bridge/bedrock/parser/function_parser/__init__.py
similarity index 92%
rename from indexer/modules/bridge/bedrock/parser/function_parser/__init__.py
rename to hemera_udf/bridge/bedrock/parser/function_parser/__init__.py
index 860eb6010..c6b1a70f4 100644
--- a/indexer/modules/bridge/bedrock/parser/function_parser/__init__.py
+++ b/hemera_udf/bridge/bedrock/parser/function_parser/__init__.py
@@ -4,8 +4,8 @@
from web3.types import ABIFunction
-from common.utils.format_utils import bytes_to_hex_str
-from indexer.utils.abi import function_abi_to_4byte_selector_str
+from hemera.common.utils.format_utils import bytes_to_hex_str
+from hemera.indexer.utils.abi import function_abi_to_4byte_selector_str
class BedRockFunctionCallType(Enum):
diff --git a/indexer/modules/bridge/bedrock/parser/function_parser/finalize_bridge_erc20.py b/hemera_udf/bridge/bedrock/parser/function_parser/finalize_bridge_erc20.py
similarity index 92%
rename from indexer/modules/bridge/bedrock/parser/function_parser/finalize_bridge_erc20.py
rename to hemera_udf/bridge/bedrock/parser/function_parser/finalize_bridge_erc20.py
index b8fe4c33b..799d9559f 100644
--- a/indexer/modules/bridge/bedrock/parser/function_parser/finalize_bridge_erc20.py
+++ b/hemera_udf/bridge/bedrock/parser/function_parser/finalize_bridge_erc20.py
@@ -4,13 +4,13 @@
from web3._utils.contracts import decode_transaction_data
from web3.types import ABIFunction
-from common.utils.format_utils import bytes_to_hex_str
-from indexer.modules.bridge.bedrock.parser.function_parser import (
+from hemera.common.utils.format_utils import bytes_to_hex_str
+from hemera.indexer.utils.abi import function_abi_to_4byte_selector_str
+from hemera_udf.bridge.bedrock.parser.function_parser import (
BedRockFunctionCallType,
BridgeRemoteFunctionCallInfo,
RemoteFunctionCallDecoder,
)
-from indexer.utils.abi import function_abi_to_4byte_selector_str
FINALIZE_BRIDGE_ERC20 = cast(
ABIFunction,
diff --git a/indexer/modules/bridge/bedrock/parser/function_parser/finalize_bridge_erc721.py b/hemera_udf/bridge/bedrock/parser/function_parser/finalize_bridge_erc721.py
similarity index 92%
rename from indexer/modules/bridge/bedrock/parser/function_parser/finalize_bridge_erc721.py
rename to hemera_udf/bridge/bedrock/parser/function_parser/finalize_bridge_erc721.py
index 4c73550b1..afa2c8417 100644
--- a/indexer/modules/bridge/bedrock/parser/function_parser/finalize_bridge_erc721.py
+++ b/hemera_udf/bridge/bedrock/parser/function_parser/finalize_bridge_erc721.py
@@ -4,13 +4,13 @@
from web3._utils.contracts import decode_transaction_data
from web3.types import ABIFunction
-from common.utils.format_utils import bytes_to_hex_str
-from indexer.modules.bridge.bedrock.parser.function_parser import (
+from hemera.common.utils.format_utils import bytes_to_hex_str
+from hemera.indexer.utils.abi import function_abi_to_4byte_selector_str
+from hemera_udf.bridge.bedrock.parser.function_parser import (
BedRockFunctionCallType,
BridgeRemoteFunctionCallInfo,
RemoteFunctionCallDecoder,
)
-from indexer.utils.abi import function_abi_to_4byte_selector_str
FINALIZE_BRIDGE_ERC721 = cast(
ABIFunction,
diff --git a/indexer/modules/bridge/bedrock/parser/function_parser/finalize_bridge_eth.py b/hemera_udf/bridge/bedrock/parser/function_parser/finalize_bridge_eth.py
similarity index 89%
rename from indexer/modules/bridge/bedrock/parser/function_parser/finalize_bridge_eth.py
rename to hemera_udf/bridge/bedrock/parser/function_parser/finalize_bridge_eth.py
index e080137ae..c340831d6 100644
--- a/indexer/modules/bridge/bedrock/parser/function_parser/finalize_bridge_eth.py
+++ b/hemera_udf/bridge/bedrock/parser/function_parser/finalize_bridge_eth.py
@@ -4,14 +4,14 @@
from web3._utils.contracts import decode_transaction_data
from web3.types import ABIFunction
-from common.utils.exception_control import FastShutdownError
-from common.utils.format_utils import bytes_to_hex_str
-from indexer.modules.bridge.bedrock.parser.function_parser import (
+from hemera.common.utils.exception_control import FastShutdownError
+from hemera.common.utils.format_utils import bytes_to_hex_str
+from hemera.indexer.utils.abi import function_abi_to_4byte_selector_str
+from hemera_udf.bridge.bedrock.parser.function_parser import (
BedRockFunctionCallType,
BridgeRemoteFunctionCallInfo,
RemoteFunctionCallDecoder,
)
-from indexer.utils.abi import function_abi_to_4byte_selector_str
FINALIZE_BRIDGE_ETH = cast(
ABIFunction,
diff --git a/indexer/modules/bridge/bridge_utils.py b/hemera_udf/bridge/bridge_utils.py
similarity index 97%
rename from indexer/modules/bridge/bridge_utils.py
rename to hemera_udf/bridge/bridge_utils.py
index 480baff67..5fa521306 100644
--- a/indexer/modules/bridge/bridge_utils.py
+++ b/hemera_udf/bridge/bridge_utils.py
@@ -5,8 +5,8 @@
from rlp.sedes import big_endian_int, binary, boolean
from web3 import Web3 as w3
-from common.utils.format_utils import hex_str_to_bytes
-from indexer.domain.log import Log
+from hemera.common.utils.format_utils import hex_str_to_bytes
+from hemera.indexer.domains.log import Log
class OpBedrockDepositTx(Serializable):
diff --git a/indexer/modules/custom/day_mining/domain/__init__.py b/hemera_udf/bridge/domains/__init__.py
similarity index 100%
rename from indexer/modules/custom/day_mining/domain/__init__.py
rename to hemera_udf/bridge/domains/__init__.py
diff --git a/indexer/modules/bridge/domain/arbitrum.py b/hemera_udf/bridge/domains/arbitrum.py
similarity index 82%
rename from indexer/modules/bridge/domain/arbitrum.py
rename to hemera_udf/bridge/domains/arbitrum.py
index c2ee50eab..ad0c5614c 100644
--- a/indexer/modules/bridge/domain/arbitrum.py
+++ b/hemera_udf/bridge/domains/arbitrum.py
@@ -1,11 +1,11 @@
from dataclasses import dataclass
from typing import Optional
-from indexer.domain import FilterData
+from hemera.indexer.domains import Domain
@dataclass
-class ArbitrumL1ToL2TransactionOnL1(FilterData):
+class ArbitrumL1ToL2TransactionOnL1(Domain):
msg_hash: str
index: int
l1_block_number: int
@@ -24,7 +24,7 @@ class ArbitrumL1ToL2TransactionOnL1(FilterData):
@dataclass
-class ArbitrumL2ToL1TransactionOnL1(FilterData):
+class ArbitrumL2ToL1TransactionOnL1(Domain):
msg_hash: str
l1_transaction_hash: str
l1_block_number: int
@@ -35,7 +35,7 @@ class ArbitrumL2ToL1TransactionOnL1(FilterData):
@dataclass
-class TicketCreatedData(FilterData):
+class TicketCreatedData(Domain):
msg_hash: str
transaction_hash: str
block_number: int
@@ -46,7 +46,7 @@ class TicketCreatedData(FilterData):
@dataclass
-class BridgeCallTriggeredData(FilterData):
+class BridgeCallTriggeredData(Domain):
msg_hash: str
l1_transaction_hash: str
l1_block_number: int
@@ -61,14 +61,14 @@ class BridgeCallTriggeredData(FilterData):
@dataclass
-class TransactionToken(FilterData):
+class TransactionToken(Domain):
transaction_hash: str
l1Token: str
amount: int
@dataclass
-class ArbitrumTransactionBatch(FilterData):
+class ArbitrumTransactionBatch(Domain):
batch_index: int
l1_block_number: int
l1_block_timestamp: int
@@ -80,7 +80,7 @@ class ArbitrumTransactionBatch(FilterData):
@dataclass
-class ArbitrumStateBatchConfirmed(FilterData):
+class ArbitrumStateBatchConfirmed(Domain):
node_num: int
block_hash: str
send_root: str
@@ -94,7 +94,7 @@ class ArbitrumStateBatchConfirmed(FilterData):
@dataclass
-class ArbitrumStateBatchCreated(FilterData):
+class ArbitrumStateBatchCreated(Domain):
node_num: int
create_l1_block_number: int
create_l1_block_timestamp: int
@@ -105,13 +105,13 @@ class ArbitrumStateBatchCreated(FilterData):
@dataclass
-class BridgeToken(FilterData):
+class BridgeToken(Domain):
l1_token_address: str
l2_token_address: str
@dataclass
-class ArbitrumL2ToL1TransactionOnL2(FilterData):
+class ArbitrumL2ToL1TransactionOnL2(Domain):
msg_hash: str
index: Optional[int]
l2_block_number: int
@@ -129,7 +129,7 @@ class ArbitrumL2ToL1TransactionOnL2(FilterData):
@dataclass
-class ArbitrumL1ToL2TransactionOnL2(FilterData):
+class ArbitrumL1ToL2TransactionOnL2(Domain):
msg_hash: str
l2_block_number: int
l2_block_timestamp: int
diff --git a/indexer/modules/bridge/domain/morph.py b/hemera_udf/bridge/domains/morph.py
similarity index 97%
rename from indexer/modules/bridge/domain/morph.py
rename to hemera_udf/bridge/domains/morph.py
index 860b0133d..4e0e72cad 100644
--- a/indexer/modules/bridge/domain/morph.py
+++ b/hemera_udf/bridge/domains/morph.py
@@ -1,7 +1,7 @@
from dataclasses import dataclass
from typing import Any, Dict, Optional
-from indexer.domain import Domain
+from hemera.indexer.domains import Domain
@dataclass
diff --git a/indexer/modules/bridge/domain/op_bedrock.py b/hemera_udf/bridge/domains/op_bedrock.py
similarity index 84%
rename from indexer/modules/bridge/domain/op_bedrock.py
rename to hemera_udf/bridge/domains/op_bedrock.py
index 93339bcc5..584f00f65 100644
--- a/indexer/modules/bridge/domain/op_bedrock.py
+++ b/hemera_udf/bridge/domains/op_bedrock.py
@@ -1,11 +1,11 @@
from dataclasses import dataclass
from typing import Optional
-from indexer.domain import FilterData
+from hemera.indexer.domains import Domain
@dataclass
-class OpL1ToL2DepositedTransaction(FilterData):
+class OpL1ToL2DepositedTransaction(Domain):
msg_hash: str
version: Optional[int]
index: Optional[int]
@@ -28,7 +28,7 @@ class OpL1ToL2DepositedTransaction(FilterData):
@dataclass
-class OpL1ToL2DepositedTransactionOnL2(FilterData):
+class OpL1ToL2DepositedTransactionOnL2(Domain):
msg_hash: str
l2_block_number: int
l2_block_timestamp: int
@@ -39,7 +39,7 @@ class OpL1ToL2DepositedTransactionOnL2(FilterData):
@dataclass
-class OpL2ToL1WithdrawnTransactionFinalized(FilterData):
+class OpL2ToL1WithdrawnTransactionFinalized(Domain):
msg_hash: str
l1_block_number: int
l1_block_timestamp: int
@@ -50,7 +50,7 @@ class OpL2ToL1WithdrawnTransactionFinalized(FilterData):
@dataclass
-class OpL2ToL1WithdrawnTransactionOnL2(FilterData):
+class OpL2ToL1WithdrawnTransactionOnL2(Domain):
msg_hash: str
version: Optional[int]
index: Optional[int]
@@ -73,7 +73,7 @@ class OpL2ToL1WithdrawnTransactionOnL2(FilterData):
@dataclass
-class OpL2ToL1WithdrawnTransactionProven(FilterData):
+class OpL2ToL1WithdrawnTransactionProven(Domain):
msg_hash: str
l1_proven_block_number: int
l1_proven_block_timestamp: int
@@ -84,7 +84,7 @@ class OpL2ToL1WithdrawnTransactionProven(FilterData):
@dataclass
-class OpStateBatch(FilterData):
+class OpStateBatch(Domain):
batch_index: int
l1_block_number: int
l1_block_timestamp: int
diff --git a/indexer/modules/bridge/items.py b/hemera_udf/bridge/items.py
similarity index 98%
rename from indexer/modules/bridge/items.py
rename to hemera_udf/bridge/items.py
index 864a20d7c..31cb9f6a8 100644
--- a/indexer/modules/bridge/items.py
+++ b/hemera_udf/bridge/items.py
@@ -2,8 +2,8 @@
from sqlalchemy.dialects.postgresql import BYTEA
from sqlalchemy.orm import declarative_base
-from common.utils.format_utils import hex_str_to_bytes
-from indexer.modules.bridge.arbitrum.arb_parser import (
+from hemera.common.utils.format_utils import hex_str_to_bytes
+from hemera_udf.bridge.arbitrum.arb_parser import (
ArbitrumStateBatchConfirmed,
ArbitrumStateBatchCreated,
ArbitrumTransactionBatch,
diff --git a/indexer/modules/custom/day_mining/models/__init__.py b/hemera_udf/bridge/models/__init__.py
similarity index 100%
rename from indexer/modules/custom/day_mining/models/__init__.py
rename to hemera_udf/bridge/models/__init__.py
diff --git a/common/models/bridge.py b/hemera_udf/bridge/models/bridge.py
similarity index 98%
rename from common/models/bridge.py
rename to hemera_udf/bridge/models/bridge.py
index 24170ea6e..06961df24 100644
--- a/common/models/bridge.py
+++ b/hemera_udf/bridge/models/bridge.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column
from sqlalchemy.dialects.postgresql import ARRAY, BOOLEAN, BYTEA, INTEGER, JSON, NUMERIC, TIMESTAMP, VARCHAR
-from common.models import HemeraModel, general_converter
-from indexer.modules.bridge.domain.arbitrum import (
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.bridge.domains.arbitrum import (
ArbitrumL1ToL2TransactionOnL1,
ArbitrumL1ToL2TransactionOnL2,
ArbitrumL2ToL1TransactionOnL1,
@@ -12,13 +12,13 @@
ArbitrumTransactionBatch,
BridgeToken,
)
-from indexer.modules.bridge.domain.morph import (
+from hemera_udf.bridge.domains.morph import (
MorphDepositedTransactionOnL1,
MorphDepositedTransactionOnL2,
MorphWithdrawalTransactionOnL1,
MorphWithdrawalTransactionOnL2,
)
-from indexer.modules.bridge.domain.op_bedrock import (
+from hemera_udf.bridge.domains.op_bedrock import (
OpL1ToL2DepositedTransaction,
OpL1ToL2DepositedTransactionOnL2,
OpL2ToL1WithdrawnTransactionFinalized,
diff --git a/indexer/modules/custom/demo_job/__init__.py b/hemera_udf/bridge/morphl2/__init__.py
similarity index 100%
rename from indexer/modules/custom/demo_job/__init__.py
rename to hemera_udf/bridge/morphl2/__init__.py
diff --git a/indexer/modules/bridge/morphl2/abi/event.py b/hemera_udf/bridge/morphl2/abi/event.py
similarity index 96%
rename from indexer/modules/bridge/morphl2/abi/event.py
rename to hemera_udf/bridge/morphl2/abi/event.py
index 9c8086690..56ca2cce6 100644
--- a/indexer/modules/bridge/morphl2/abi/event.py
+++ b/hemera_udf/bridge/morphl2/abi/event.py
@@ -1,4 +1,4 @@
-from common.utils.abi_code_utils import Event
+from hemera.common.utils.abi_code_utils import Event
SentMessageEvent = Event(
{
diff --git a/indexer/modules/bridge/morphl2/abi/function.py b/hemera_udf/bridge/morphl2/abi/function.py
similarity index 98%
rename from indexer/modules/bridge/morphl2/abi/function.py
rename to hemera_udf/bridge/morphl2/abi/function.py
index 911d14117..3783e8f8f 100644
--- a/indexer/modules/bridge/morphl2/abi/function.py
+++ b/hemera_udf/bridge/morphl2/abi/function.py
@@ -3,9 +3,9 @@
from web3.types import ABIFunction
-from common.utils.abi_code_utils import Function, FunctionCollection
-from common.utils.format_utils import bytes_to_hex_str
-from indexer.modules.bridge.morphl2.parser.deposited_transaction import DepositedTransaction
+from hemera.common.utils.abi_code_utils import Function, FunctionCollection
+from hemera.common.utils.format_utils import bytes_to_hex_str
+from hemera_udf.bridge.morphl2.parser.deposited_transaction import DepositedTransaction
class MorphFunction(Function):
diff --git a/indexer/modules/bridge/morphl2/morph_bridge_on_l1_job.py b/hemera_udf/bridge/morphl2/morph_bridge_on_l1_job.py
similarity index 88%
rename from indexer/modules/bridge/morphl2/morph_bridge_on_l1_job.py
rename to hemera_udf/bridge/morphl2/morph_bridge_on_l1_job.py
index 582c5fab2..c532c53fa 100644
--- a/indexer/modules/bridge/morphl2/morph_bridge_on_l1_job.py
+++ b/hemera_udf/bridge/morphl2/morph_bridge_on_l1_job.py
@@ -1,11 +1,11 @@
import logging
-from indexer.domain.transaction import Transaction
-from indexer.jobs import FilterTransactionDataJob
-from indexer.modules.bridge.domain.morph import MorphDepositedTransactionOnL1, MorphWithdrawalTransactionOnL1
-from indexer.modules.bridge.morphl2.abi.event import QueueTransactionEvent, RelayedMessageEvent
-from indexer.modules.bridge.morphl2.parser.parser import parse_relayed_message_event, parse_transaction_deposited_event
-from indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera.indexer.domains.transaction import Transaction
+from hemera.indexer.jobs import FilterTransactionDataJob
+from hemera.indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera_udf.bridge.domains.morph import MorphDepositedTransactionOnL1, MorphWithdrawalTransactionOnL1
+from hemera_udf.bridge.morphl2.abi.event import QueueTransactionEvent, RelayedMessageEvent
+from hemera_udf.bridge.morphl2.parser.parser import parse_relayed_message_event, parse_transaction_deposited_event
logger = logging.getLogger(__name__)
diff --git a/indexer/modules/bridge/morphl2/morph_bridge_on_l2_job.py b/hemera_udf/bridge/morphl2/morph_bridge_on_l2_job.py
similarity index 87%
rename from indexer/modules/bridge/morphl2/morph_bridge_on_l2_job.py
rename to hemera_udf/bridge/morphl2/morph_bridge_on_l2_job.py
index 8c322bade..3b5b0ccd1 100644
--- a/indexer/modules/bridge/morphl2/morph_bridge_on_l2_job.py
+++ b/hemera_udf/bridge/morphl2/morph_bridge_on_l2_job.py
@@ -1,11 +1,11 @@
import logging
-from indexer.domain.transaction import Transaction
-from indexer.jobs import FilterTransactionDataJob
-from indexer.modules.bridge.domain.morph import MorphDepositedTransactionOnL2, MorphWithdrawalTransactionOnL2
-from indexer.modules.bridge.morphl2.abi.event import RelayedMessageEvent, SentMessageEvent
-from indexer.modules.bridge.morphl2.parser.parser import parse_relayed_message_event, parse_sent_message_event
-from indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera.indexer.domains.transaction import Transaction
+from hemera.indexer.jobs import FilterTransactionDataJob
+from hemera.indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera_udf.bridge.domains.morph import MorphDepositedTransactionOnL2, MorphWithdrawalTransactionOnL2
+from hemera_udf.bridge.morphl2.abi.event import RelayedMessageEvent, SentMessageEvent
+from hemera_udf.bridge.morphl2.parser.parser import parse_relayed_message_event, parse_sent_message_event
logger = logging.getLogger(__name__)
diff --git a/indexer/modules/bridge/morphl2/parser/deposited_transaction.py b/hemera_udf/bridge/morphl2/parser/deposited_transaction.py
similarity index 100%
rename from indexer/modules/bridge/morphl2/parser/deposited_transaction.py
rename to hemera_udf/bridge/morphl2/parser/deposited_transaction.py
diff --git a/indexer/modules/bridge/morphl2/parser/parser.py b/hemera_udf/bridge/morphl2/parser/parser.py
similarity index 94%
rename from indexer/modules/bridge/morphl2/parser/parser.py
rename to hemera_udf/bridge/morphl2/parser/parser.py
index 8c9561bc2..029884865 100644
--- a/indexer/modules/bridge/morphl2/parser/parser.py
+++ b/hemera_udf/bridge/morphl2/parser/parser.py
@@ -2,12 +2,12 @@
from web3 import Web3
-from common.utils.format_utils import bytes_to_hex_str
-from indexer.domain.transaction import Transaction
-from indexer.modules.bridge.bedrock.parser.bedrock_bridge_parser import RelayedMessageTransaction
-from indexer.modules.bridge.bridge_utils import get_version_and_index_from_nonce
-from indexer.modules.bridge.morphl2.abi.event import QueueTransactionEvent, RelayedMessageEvent, SentMessageEvent
-from indexer.modules.bridge.morphl2.abi.function import (
+from hemera.common.utils.format_utils import bytes_to_hex_str
+from hemera.indexer.domains.transaction import Transaction
+from hemera_udf.bridge.bedrock.parser.bedrock_bridge_parser import RelayedMessageTransaction
+from hemera_udf.bridge.bridge_utils import get_version_and_index_from_nonce
+from hemera_udf.bridge.morphl2.abi.event import QueueTransactionEvent, RelayedMessageEvent, SentMessageEvent
+from hemera_udf.bridge.morphl2.abi.function import (
MorphFunctionCollection,
finalizeBatchDepositERC721Function,
finalizeBatchDepositERC1155Function,
@@ -23,7 +23,7 @@
finalizeWithdrawETHFunction,
relayMessageFunction,
)
-from indexer.modules.bridge.morphl2.parser.deposited_transaction import DepositedTransaction
+from hemera_udf.bridge.morphl2.parser.deposited_transaction import DepositedTransaction
def parse_relayed_message_event(transaction: Transaction, contract_address) -> List[RelayedMessageTransaction]:
diff --git a/hemera_udf/cyber_id/__init__.py b/hemera_udf/cyber_id/__init__.py
new file mode 100644
index 000000000..3ef9ca3c9
--- /dev/null
+++ b/hemera_udf/cyber_id/__init__.py
@@ -0,0 +1,12 @@
+from __future__ import annotations
+
+import packaging.version
+
+from hemera import __version__ as hemera_version
+
+__all__ = ["__version__"]
+
+__version__ = "0.1.0"
+
+if packaging.version.parse(packaging.version.parse(hemera_version).base_version) < packaging.version.parse("1.0.0"):
+ raise RuntimeError(f"The package `hemera-modules-address-index:{__version__}` needs Hemera 1.0.0+")
diff --git a/indexer/modules/custom/demo_job/domain/__init__.py b/hemera_udf/cyber_id/abi/__init__.py
similarity index 100%
rename from indexer/modules/custom/demo_job/domain/__init__.py
rename to hemera_udf/cyber_id/abi/__init__.py
diff --git a/indexer/modules/custom/cyber_id/abi/event.py b/hemera_udf/cyber_id/abi/event.py
similarity index 91%
rename from indexer/modules/custom/cyber_id/abi/event.py
rename to hemera_udf/cyber_id/abi/event.py
index a0c93ec94..4a6829081 100644
--- a/indexer/modules/custom/cyber_id/abi/event.py
+++ b/hemera_udf/cyber_id/abi/event.py
@@ -1,10 +1,10 @@
from web3.types import ABIEvent
-from common.utils.abi_code_utils import Event
-from common.utils.format_utils import bytes_to_hex_str
-from indexer.domain.log import Log
-from indexer.modules.custom.cyber_id.domains.cyber_domain import CyberAddressChangedD, CyberIDRegisterD
-from indexer.modules.custom.cyber_id.utils import get_node
+from hemera.common.utils.abi_code_utils import Event
+from hemera.common.utils.format_utils import bytes_to_hex_str
+from hemera.indexer.domains.log import Log
+from hemera_udf.cyber_id.domains import CyberAddressChangedD, CyberIDRegisterD
+from hemera_udf.cyber_id.utils import get_node
class CyberEvent(Event):
diff --git a/indexer/modules/custom/cyber_id/abi/function.py b/hemera_udf/cyber_id/abi/function.py
similarity index 91%
rename from indexer/modules/custom/cyber_id/abi/function.py
rename to hemera_udf/cyber_id/abi/function.py
index d7f03b066..88304467f 100644
--- a/indexer/modules/custom/cyber_id/abi/function.py
+++ b/hemera_udf/cyber_id/abi/function.py
@@ -1,9 +1,9 @@
from web3.types import ABIFunction
-from common.utils.abi_code_utils import Function
-from indexer.domain.transaction import Transaction
-from indexer.modules.custom.cyber_id.domains.cyber_domain import CyberAddressD
-from indexer.modules.custom.cyber_id.utils import get_reverse_node
+from hemera.common.utils.abi_code_utils import Function
+from hemera.indexer.domains.transaction import Transaction
+from hemera_udf.cyber_id.domains import *
+from hemera_udf.cyber_id.utils import get_reverse_node
class CyberFunction(Function):
diff --git a/indexer/modules/custom/cyber_id/domains/cyber_domain.py b/hemera_udf/cyber_id/domains.py
similarity index 69%
rename from indexer/modules/custom/cyber_id/domains/cyber_domain.py
rename to hemera_udf/cyber_id/domains.py
index 1d7a08aae..ceb2163db 100644
--- a/indexer/modules/custom/cyber_id/domains/cyber_domain.py
+++ b/hemera_udf/cyber_id/domains.py
@@ -1,11 +1,11 @@
from dataclasses import dataclass
from datetime import datetime
-from indexer.domain import FilterData
+from hemera.indexer.domains import Domain
@dataclass
-class CyberAddressD(FilterData):
+class CyberAddressD(Domain):
address: str
reverse_node: str
name: str
@@ -13,7 +13,7 @@ class CyberAddressD(FilterData):
@dataclass
-class CyberIDRegisterD(FilterData):
+class CyberIDRegisterD(Domain):
label: str
token_id: int
node: str
@@ -23,7 +23,7 @@ class CyberIDRegisterD(FilterData):
@dataclass
-class CyberAddressChangedD(FilterData):
+class CyberAddressChangedD(Domain):
node: str
address: str
block_number: int
diff --git a/indexer/modules/custom/deposit_to_l2/__init__.py b/hemera_udf/cyber_id/jobs/__init__.py
similarity index 100%
rename from indexer/modules/custom/deposit_to_l2/__init__.py
rename to hemera_udf/cyber_id/jobs/__init__.py
diff --git a/indexer/modules/custom/cyber_id/export_cyber_id_job.py b/hemera_udf/cyber_id/jobs/export_cyber_id_job.py
similarity index 83%
rename from indexer/modules/custom/cyber_id/export_cyber_id_job.py
rename to hemera_udf/cyber_id/jobs/export_cyber_id_job.py
index c75118ea6..60d3167a8 100644
--- a/indexer/modules/custom/cyber_id/export_cyber_id_job.py
+++ b/hemera_udf/cyber_id/jobs/export_cyber_id_job.py
@@ -2,14 +2,14 @@
from itertools import groupby
from typing import List
-from indexer.domain.log import Log
-from indexer.domain.transaction import Transaction
-from indexer.executors.batch_work_executor import BatchWorkExecutor
-from indexer.jobs import FilterTransactionDataJob
-from indexer.modules.custom.cyber_id.abi.event import AddressChangedEvent, CyberEvent, NameChangedEvent, RegisterEvent
-from indexer.modules.custom.cyber_id.abi.function import CyberFunction, SetNameForAddrFunction, SetNameFunction
-from indexer.modules.custom.cyber_id.domains.cyber_domain import CyberAddressChangedD, CyberAddressD, CyberIDRegisterD
-from indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera.indexer.domains.log import Log
+from hemera.indexer.domains.transaction import Transaction
+from hemera.indexer.executors.batch_work_executor import BatchWorkExecutor
+from hemera.indexer.jobs import FilterTransactionDataJob
+from hemera.indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera_udf.cyber_id.abi.event import AddressChangedEvent, CyberEvent, NameChangedEvent, RegisterEvent
+from hemera_udf.cyber_id.abi.function import CyberFunction, SetNameForAddrFunction, SetNameFunction
+from hemera_udf.cyber_id.domains import *
logger = logging.getLogger(__name__)
diff --git a/indexer/modules/custom/deposit_to_l2/domain/__init__.py b/hemera_udf/cyber_id/models/__init__.py
similarity index 100%
rename from indexer/modules/custom/deposit_to_l2/domain/__init__.py
rename to hemera_udf/cyber_id/models/__init__.py
diff --git a/indexer/modules/custom/cyber_id/models/cyber_models.py b/hemera_udf/cyber_id/models/cyber_models.py
similarity index 87%
rename from indexer/modules/custom/cyber_id/models/cyber_models.py
rename to hemera_udf/cyber_id/models/cyber_models.py
index a5294f30e..a017a081d 100644
--- a/indexer/modules/custom/cyber_id/models/cyber_models.py
+++ b/hemera_udf/cyber_id/models/cyber_models.py
@@ -1,9 +1,9 @@
from sqlalchemy import Column, func
from sqlalchemy.dialects.postgresql import BIGINT, BYTEA, NUMERIC, TIMESTAMP, VARCHAR
-from common.models import HemeraModel
-from indexer.modules.custom.cyber_id.domains.cyber_domain import CyberAddressChangedD, CyberAddressD, CyberIDRegisterD
-from indexer.modules.custom.hemera_ens.models.af_ens_node_current import ens_general_converter
+from hemera.common.models import HemeraModel
+from hemera_udf.cyber_id.domains import *
+from hemera_udf.hemera_ens.models.af_ens_node_current import ens_general_converter
class CyberAddress(HemeraModel):
diff --git a/indexer/modules/custom/cyber_id/sql/cyber_id_table_migration.sql b/hemera_udf/cyber_id/sql/cyber_id_table_migration.sql
similarity index 100%
rename from indexer/modules/custom/cyber_id/sql/cyber_id_table_migration.sql
rename to hemera_udf/cyber_id/sql/cyber_id_table_migration.sql
diff --git a/indexer/modules/custom/cyber_id/utils.py b/hemera_udf/cyber_id/utils.py
similarity index 100%
rename from indexer/modules/custom/cyber_id/utils.py
rename to hemera_udf/cyber_id/utils.py
diff --git a/hemera_udf/day_mining/__init__.py b/hemera_udf/day_mining/__init__.py
new file mode 100644
index 000000000..4632ff849
--- /dev/null
+++ b/hemera_udf/day_mining/__init__.py
@@ -0,0 +1,12 @@
+from __future__ import annotations
+
+import packaging.version
+
+from hemera import __version__ as hemera_version
+
+__all__ = ["__version__"]
+
+__version__ = "0.1.0"
+
+if packaging.version.parse(packaging.version.parse(hemera_version).base_version) < packaging.version.parse("1.0.0"):
+ raise RuntimeError(f"The package `hemera-modules-xxx:{__version__}` needs Hemera 1.0.0+")
diff --git a/indexer/modules/custom/day_mining/domain/current_traits_activeness.py b/hemera_udf/day_mining/domains.py
similarity index 83%
rename from indexer/modules/custom/day_mining/domain/current_traits_activeness.py
rename to hemera_udf/day_mining/domains.py
index 0143f2207..6e042d23c 100644
--- a/indexer/modules/custom/day_mining/domain/current_traits_activeness.py
+++ b/hemera_udf/day_mining/domains.py
@@ -1,7 +1,7 @@
from dataclasses import dataclass
from typing import Optional
-from indexer.domain import Domain
+from hemera.indexer.domains import Domain
@dataclass
diff --git a/indexer/modules/custom/deposit_to_l2/models/__init__.py b/hemera_udf/day_mining/job/__init__.py
similarity index 100%
rename from indexer/modules/custom/deposit_to_l2/models/__init__.py
rename to hemera_udf/day_mining/job/__init__.py
diff --git a/indexer/modules/custom/day_mining/export_activeness.py b/hemera_udf/day_mining/job/export_activeness.py
similarity index 89%
rename from indexer/modules/custom/day_mining/export_activeness.py
rename to hemera_udf/day_mining/job/export_activeness.py
index ecb2eb910..26d51ad41 100644
--- a/indexer/modules/custom/day_mining/export_activeness.py
+++ b/hemera_udf/day_mining/job/export_activeness.py
@@ -1,15 +1,15 @@
from collections import defaultdict
-from common.models.contracts import Contracts
-from common.utils.format_utils import bytes_to_hex_str
-from indexer.domain.contract import Contract
-from indexer.domain.transaction import Transaction
-from indexer.executors.batch_work_executor import BatchWorkExecutor
-from indexer.jobs.base_job import ExtensionJob
-from indexer.modules.custom.all_features_value_record import AllFeatureValueRecordTraitsActiveness
-from indexer.modules.custom.day_mining.domain.current_traits_activeness import CurrentTraitsActiveness
-from indexer.modules.custom.day_mining.models.current_traits_activeness import CurrentTraitsActivenessModel
-from indexer.modules.custom.feature_type import FeatureType
+from hemera.common.models.contracts import Contracts
+from hemera.common.utils.format_utils import bytes_to_hex_str
+from hemera.indexer.domains.contract import Contract
+from hemera.indexer.domains.transaction import Transaction
+from hemera.indexer.executors.batch_work_executor import BatchWorkExecutor
+from hemera.indexer.jobs.base_job import ExtensionJob
+from hemera_udf.aci_features.domains import AllFeatureValueRecordTraitsActiveness
+from hemera_udf.aci_features.feature_type import FeatureType
+from hemera_udf.day_mining.domains import CurrentTraitsActiveness
+from hemera_udf.day_mining.models.current_traits_activeness import CurrentTraitsActivenessModel
"""
record:
diff --git a/indexer/modules/custom/eigen_layer/domains/__init__.py b/hemera_udf/day_mining/models/__init__.py
similarity index 100%
rename from indexer/modules/custom/eigen_layer/domains/__init__.py
rename to hemera_udf/day_mining/models/__init__.py
diff --git a/indexer/modules/custom/day_mining/models/current_traits_activeness.py b/hemera_udf/day_mining/models/current_traits_activeness.py
similarity index 84%
rename from indexer/modules/custom/day_mining/models/current_traits_activeness.py
rename to hemera_udf/day_mining/models/current_traits_activeness.py
index 3dadc72fa..93aef6757 100644
--- a/indexer/modules/custom/day_mining/models/current_traits_activeness.py
+++ b/hemera_udf/day_mining/models/current_traits_activeness.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, PrimaryKeyConstraint, func
from sqlalchemy.dialects.postgresql import BIGINT, BYTEA, JSONB, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.day_mining.domain.current_traits_activeness import CurrentTraitsActiveness
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.day_mining.domains import CurrentTraitsActiveness
class CurrentTraitsActivenessModel(HemeraModel):
diff --git a/hemera_udf/demo_job/__init__.py b/hemera_udf/demo_job/__init__.py
new file mode 100644
index 000000000..4632ff849
--- /dev/null
+++ b/hemera_udf/demo_job/__init__.py
@@ -0,0 +1,12 @@
+from __future__ import annotations
+
+import packaging.version
+
+from hemera import __version__ as hemera_version
+
+__all__ = ["__version__"]
+
+__version__ = "0.1.0"
+
+if packaging.version.parse(packaging.version.parse(hemera_version).base_version) < packaging.version.parse("1.0.0"):
+ raise RuntimeError(f"The package `hemera-modules-xxx:{__version__}` needs Hemera 1.0.0+")
diff --git a/indexer/modules/custom/demo_job/demo_job.py b/hemera_udf/demo_job/demo_job.py
similarity index 90%
rename from indexer/modules/custom/demo_job/demo_job.py
rename to hemera_udf/demo_job/demo_job.py
index 46e269724..456d3abc4 100644
--- a/indexer/modules/custom/demo_job/demo_job.py
+++ b/hemera_udf/demo_job/demo_job.py
@@ -3,19 +3,19 @@
from typing import List
-from common.utils.web3_utils import ZERO_ADDRESS
+from hemera.common.utils.web3_utils import ZERO_ADDRESS
# Dependency dataclass
-from indexer.domain.log import Log
-from indexer.domain.token_transfer import TokenTransfer
-from indexer.jobs.base_job import Collector, FilterTransactionDataJob
-
-# Custom dataclass
-from indexer.modules.custom.demo_job.domain.erc721_token_mint import ERC721TokenMint
-from indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera.indexer.domains.log import Log
+from hemera.indexer.domains.token_transfer import TokenTransfer
+from hemera.indexer.jobs.base_job import Collector, FilterTransactionDataJob
+from hemera.indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
# Utility
-from indexer.utils.abi_setting import ERC721_TRANSFER_EVENT
+from hemera.indexer.utils.abi_setting import ERC721_TRANSFER_EVENT
+
+# Custom dataclass
+from hemera_udf.demo_job.domains import ERC721TokenMint
def _filter_erc721_mint_event(logs: List[Log]) -> List[TokenTransfer]:
diff --git a/indexer/modules/custom/demo_job/domain/erc721_token_mint.py b/hemera_udf/demo_job/domains.py
similarity index 77%
rename from indexer/modules/custom/demo_job/domain/erc721_token_mint.py
rename to hemera_udf/demo_job/domains.py
index 1d061ac7b..0fb4af040 100644
--- a/indexer/modules/custom/demo_job/domain/erc721_token_mint.py
+++ b/hemera_udf/demo_job/domains.py
@@ -3,11 +3,11 @@
from dataclasses import dataclass
-from indexer.domain import FilterData
+from hemera.indexer.domains import Domain
@dataclass
-class ERC721TokenMint(FilterData):
+class ERC721TokenMint(Domain):
address: str
token_address: str
token_id: int
diff --git a/indexer/modules/custom/eigen_layer/models/__init__.py b/hemera_udf/demo_job/models/__init__.py
similarity index 100%
rename from indexer/modules/custom/eigen_layer/models/__init__.py
rename to hemera_udf/demo_job/models/__init__.py
diff --git a/indexer/modules/custom/demo_job/models/erc721_token_mint.py b/hemera_udf/demo_job/models/erc721_token_mint.py
similarity index 95%
rename from indexer/modules/custom/demo_job/models/erc721_token_mint.py
rename to hemera_udf/demo_job/models/erc721_token_mint.py
index 06c15e924..586c96f90 100644
--- a/indexer/modules/custom/demo_job/models/erc721_token_mint.py
+++ b/hemera_udf/demo_job/models/erc721_token_mint.py
@@ -4,7 +4,7 @@
from sqlalchemy import Column, Index, PrimaryKeyConstraint, func, text
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, INTEGER, NUMERIC, TIMESTAMP
-from common.models import HemeraModel, general_converter
+from hemera.common.models import HemeraModel, general_converter
class ERC721TokenMint(HemeraModel):
diff --git a/hemera_udf/deposit_to_l2/__init__.py b/hemera_udf/deposit_to_l2/__init__.py
new file mode 100644
index 000000000..5598f3cc3
--- /dev/null
+++ b/hemera_udf/deposit_to_l2/__init__.py
@@ -0,0 +1,18 @@
+from __future__ import annotations
+
+import packaging.version
+
+from hemera import __version__ as hemera_version
+
+__all__ = ["__version__"]
+
+__version__ = "0.1.0"
+
+from hemera.common.enumeration.entity_type import DynamicEntityTypeRegistry
+from hemera_udf.deposit_to_l2.domains import *
+
+if packaging.version.parse(packaging.version.parse(hemera_version).base_version) < packaging.version.parse("1.0.0"):
+ raise RuntimeError(f"The package `hemera-modules-xxx:{__version__}` needs Hemera 1.0.0+")
+
+value = DynamicEntityTypeRegistry.register("DEPOSIT_TO_L2")
+DynamicEntityTypeRegistry.register_output_types(value, {TokenDepositTransaction, AddressTokenDeposit})
diff --git a/indexer/modules/custom/deposit_to_l2/config.ini b/hemera_udf/deposit_to_l2/config.ini
similarity index 100%
rename from indexer/modules/custom/deposit_to_l2/config.ini
rename to hemera_udf/deposit_to_l2/config.ini
diff --git a/indexer/modules/custom/deposit_to_l2/deposit_parser.py b/hemera_udf/deposit_to_l2/deposit_parser.py
similarity index 97%
rename from indexer/modules/custom/deposit_to_l2/deposit_parser.py
rename to hemera_udf/deposit_to_l2/deposit_parser.py
index 94ac16cc1..6351c5500 100644
--- a/indexer/modules/custom/deposit_to_l2/deposit_parser.py
+++ b/hemera_udf/deposit_to_l2/deposit_parser.py
@@ -5,9 +5,9 @@
from web3._utils.contracts import decode_transaction_data
from web3.types import ABIEvent, ABIFunction
-from indexer.domain.transaction import Transaction
-from indexer.modules.custom.deposit_to_l2.domain.token_deposit_transaction import TokenDepositTransaction
-from indexer.utils.abi import event_log_abi_to_topic, function_abi_to_4byte_selector_str
+from hemera.indexer.domains.transaction import Transaction
+from hemera.indexer.utils.abi import event_log_abi_to_topic, function_abi_to_4byte_selector_str
+from hemera_udf.deposit_to_l2.domains import TokenDepositTransaction
ETH_ADDRESS = "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2"
USDC_ADDRESS = "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48"
diff --git a/indexer/modules/custom/deposit_to_l2/deposit_to_l2_job.py b/hemera_udf/deposit_to_l2/deposit_to_l2_job.py
similarity index 76%
rename from indexer/modules/custom/deposit_to_l2/deposit_to_l2_job.py
rename to hemera_udf/deposit_to_l2/deposit_to_l2_job.py
index 772a8c08f..cfc458865 100644
--- a/indexer/modules/custom/deposit_to_l2/deposit_to_l2_job.py
+++ b/hemera_udf/deposit_to_l2/deposit_to_l2_job.py
@@ -4,21 +4,18 @@
from typing import List, cast
from eth_utils import to_normalized_address
-from sqlalchemy import and_
from web3.types import ABIFunction
-from common.utils.cache_utils import BlockToLiveDict, TimeToLiveDict
-from common.utils.exception_control import FastShutdownError
-from common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes
-from indexer.domain.transaction import Transaction
-from indexer.jobs import FilterTransactionDataJob
-from indexer.modules.custom.deposit_to_l2.deposit_parser import parse_deposit_transaction_function, token_parse_mapping
-from indexer.modules.custom.deposit_to_l2.domain.address_token_deposit import AddressTokenDeposit
-from indexer.modules.custom.deposit_to_l2.domain.token_deposit_transaction import TokenDepositTransaction
-from indexer.modules.custom.deposit_to_l2.models.af_token_deposits_current import AFTokenDepositsCurrent
-from indexer.specification.specification import ToAddressSpecification, TransactionFilterByTransactionInfo
-from indexer.utils.abi import function_abi_to_4byte_selector_str
-from indexer.utils.collection_utils import distinct_collections_by_group
+from hemera.common.utils.cache_utils import BlockToLiveDict, TimeToLiveDict
+from hemera.common.utils.db_utils import build_domains_by_sql
+from hemera.common.utils.exception_control import FastShutdownError
+from hemera.indexer.domains.transaction import Transaction
+from hemera.indexer.jobs import FilterTransactionDataJob
+from hemera.indexer.specification.specification import ToAddressSpecification, TransactionFilterByTransactionInfo
+from hemera.indexer.utils.abi import function_abi_to_4byte_selector_str
+from hemera.indexer.utils.collection_utils import distinct_collections_by_group
+from hemera_udf.deposit_to_l2.deposit_parser import parse_deposit_transaction_function, token_parse_mapping
+from hemera_udf.deposit_to_l2.domains import AddressTokenDeposit, TokenDepositTransaction
class DepositToL2Job(FilterTransactionDataJob):
@@ -149,36 +146,27 @@ def _process(self, **kwargs):
def check_history_deposit_from_db(
self, wallet_address: str, chain_id: int, token_address: str
) -> AddressTokenDeposit:
- session = self._service.get_service_session()
- try:
- history_deposit = (
- session.query(AFTokenDepositsCurrent)
- .filter(
- and_(
- AFTokenDepositsCurrent.wallet_address == hex_str_to_bytes(wallet_address),
- AFTokenDepositsCurrent.chain_id == chain_id,
- AFTokenDepositsCurrent.token_address == hex_str_to_bytes(token_address),
- )
- )
- .first()
- )
- finally:
- session.close()
-
- deposit = (
- AddressTokenDeposit(
- wallet_address=bytes_to_hex_str(history_deposit.wallet_address),
- chain_id=history_deposit.chain_id,
- contract_address=bytes_to_hex_str(history_deposit.contract_address),
- token_address=bytes_to_hex_str(history_deposit.token_address),
- value=int(history_deposit.value),
- block_number=history_deposit.block_number,
- block_timestamp=int(round(history_deposit.block_timestamp.timestamp())),
- )
- if history_deposit
- else None
+
+ deposit = build_domains_by_sql(
+ service=self._service,
+ domain=AddressTokenDeposit,
+ sql="""
+ SELECT * FROM af_token_deposits_current
+ WHERE
+ wallet_address = '{}'
+ and chain_id = {}
+ and token_address = '{}'
+ limit 1
+ """.format(
+ "\\" + wallet_address[1:], chain_id, "\\" + token_address[1:]
+ ),
)
+ if len(deposit) == 0:
+ deposit = None
+ else:
+ deposit = deposit[0]
+
return deposit
diff --git a/hemera_udf/deposit_to_l2/domains.py b/hemera_udf/deposit_to_l2/domains.py
new file mode 100644
index 000000000..a6a007bc6
--- /dev/null
+++ b/hemera_udf/deposit_to_l2/domains.py
@@ -0,0 +1,26 @@
+from dataclasses import dataclass
+
+from hemera.indexer.domains import Domain
+
+
+@dataclass
+class AddressTokenDeposit(Domain):
+ wallet_address: str
+ chain_id: int
+ contract_address: str
+ token_address: str
+ value: int
+ block_number: int
+ block_timestamp: int
+
+
+@dataclass
+class TokenDepositTransaction(Domain):
+ transaction_hash: str
+ wallet_address: str
+ chain_id: int
+ contract_address: str
+ token_address: str
+ value: int
+ block_number: int
+ block_timestamp: int
diff --git a/indexer/modules/custom/deposit_to_l2/endpoint/__init__.py b/hemera_udf/deposit_to_l2/endpoint/__init__.py
similarity index 100%
rename from indexer/modules/custom/deposit_to_l2/endpoint/__init__.py
rename to hemera_udf/deposit_to_l2/endpoint/__init__.py
diff --git a/indexer/modules/custom/deposit_to_l2/endpoint/routes.py b/hemera_udf/deposit_to_l2/endpoint/routes.py
similarity index 89%
rename from indexer/modules/custom/deposit_to_l2/endpoint/routes.py
rename to hemera_udf/deposit_to_l2/endpoint/routes.py
index ff941bed2..785319e42 100644
--- a/indexer/modules/custom/deposit_to_l2/endpoint/routes.py
+++ b/hemera_udf/deposit_to_l2/endpoint/routes.py
@@ -3,24 +3,24 @@
import flask
from flask_restx import Resource
-from api.app.address.features import register_feature
-from api.app.cache import cache
-from api.app.db_service.af_token_deposit import (
+from hemera.api.app.address.features import register_feature
+from hemera.api.app.cache import cache
+from hemera.api.app.db_service.af_token_deposit import (
get_deposit_assets_list,
get_deposit_chain_list,
get_transactions_by_condition,
get_transactions_cnt_by_condition,
get_transactions_cnt_by_wallet,
)
-from api.app.db_service.blocks import get_block_by_hash
-from api.app.db_service.tokens import get_token_price_map_by_symbol_list
-from api.app.utils.parse_utils import parse_deposit_assets, parse_deposit_transactions
-from common.utils.config import get_config
-from common.utils.exception_control import APIError
-from common.utils.format_utils import hex_str_to_bytes, row_to_dict
-from common.utils.web3_utils import SUPPORT_CHAINS, chain_id_name_mapping
-from indexer.modules.custom.deposit_to_l2.endpoint import token_deposit_namespace
-from indexer.modules.custom.deposit_to_l2.models.af_token_deposits__transactions import AFTokenDepositsTransactions
+from hemera.api.app.db_service.blocks import get_block_by_hash
+from hemera.api.app.db_service.tokens import get_token_price_map_by_symbol_list
+from hemera.api.app.utils.parse_utils import parse_deposit_assets, parse_deposit_transactions
+from hemera.common.utils.config import get_config
+from hemera.common.utils.exception_control import APIError
+from hemera.common.utils.format_utils import hex_str_to_bytes, row_to_dict
+from hemera.common.utils.web3_utils import SUPPORT_CHAINS, chain_id_name_mapping
+from hemera_udf.deposit_to_l2.endpoint import token_deposit_namespace
+from hemera_udf.deposit_to_l2.models.af_token_deposits__transactions import AFTokenDepositsTransactions
PAGE_SIZE = 10
MAX_TRANSACTION = 500000
diff --git a/indexer/modules/custom/etherfi/__init__.py b/hemera_udf/deposit_to_l2/models/__init__.py
similarity index 100%
rename from indexer/modules/custom/etherfi/__init__.py
rename to hemera_udf/deposit_to_l2/models/__init__.py
diff --git a/indexer/modules/custom/deposit_to_l2/models/af_token_deposits__transactions.py b/hemera_udf/deposit_to_l2/models/af_token_deposits__transactions.py
similarity index 90%
rename from indexer/modules/custom/deposit_to_l2/models/af_token_deposits__transactions.py
rename to hemera_udf/deposit_to_l2/models/af_token_deposits__transactions.py
index 0820c7e90..1706cebd0 100644
--- a/indexer/modules/custom/deposit_to_l2/models/af_token_deposits__transactions.py
+++ b/hemera_udf/deposit_to_l2/models/af_token_deposits__transactions.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, Index, desc, func, text
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, NUMERIC, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.deposit_to_l2.domain.token_deposit_transaction import TokenDepositTransaction
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.deposit_to_l2.domains import TokenDepositTransaction
class AFTokenDepositsTransactions(HemeraModel):
diff --git a/indexer/modules/custom/deposit_to_l2/models/af_token_deposits_current.py b/hemera_udf/deposit_to_l2/models/af_token_deposits_current.py
similarity index 87%
rename from indexer/modules/custom/deposit_to_l2/models/af_token_deposits_current.py
rename to hemera_udf/deposit_to_l2/models/af_token_deposits_current.py
index ec5e75c50..9a953ebb4 100644
--- a/indexer/modules/custom/deposit_to_l2/models/af_token_deposits_current.py
+++ b/hemera_udf/deposit_to_l2/models/af_token_deposits_current.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, PrimaryKeyConstraint, func
from sqlalchemy.dialects.postgresql import BIGINT, BYTEA, NUMERIC, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.deposit_to_l2.domain.address_token_deposit import AddressTokenDeposit
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.deposit_to_l2.domains import AddressTokenDeposit
class AFTokenDepositsCurrent(HemeraModel):
diff --git a/hemera_udf/eigen_layer/__init__.py b/hemera_udf/eigen_layer/__init__.py
new file mode 100644
index 000000000..dfa7d60b4
--- /dev/null
+++ b/hemera_udf/eigen_layer/__init__.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time 2024/9/23 17:12
+# @Author will
+# @File __init__.py.py
+# @Brief
+
+from __future__ import annotations
+
+import packaging.version
+
+from hemera import __version__ as hemera_version
+
+__all__ = ["__version__"]
+
+__version__ = "0.1.0"
+
+from hemera.common.enumeration.entity_type import DynamicEntityTypeRegistry
+from hemera_udf.eigen_layer.domains import *
+
+if packaging.version.parse(packaging.version.parse(hemera_version).base_version) < packaging.version.parse("1.0.0"):
+ raise RuntimeError(f"The package `hemera-modules-xxx:{__version__}` needs Hemera 1.0.0+")
+
+
+value = DynamicEntityTypeRegistry.register("EIGEN_LAYER")
+DynamicEntityTypeRegistry.register_output_types(value, {EigenLayerAction, EigenLayerAddressCurrent})
diff --git a/indexer/modules/custom/eigen_layer/abi.py b/hemera_udf/eigen_layer/abi.py
similarity index 98%
rename from indexer/modules/custom/eigen_layer/abi.py
rename to hemera_udf/eigen_layer/abi.py
index 75fd69d3b..f44fa25f2 100644
--- a/indexer/modules/custom/eigen_layer/abi.py
+++ b/hemera_udf/eigen_layer/abi.py
@@ -5,7 +5,7 @@
# @File eigen_layer_abi.py
# @Brief
-from common.utils.abi_code_utils import Event, Function
+from hemera.common.utils.abi_code_utils import Event, Function
DEPOSIT_EVENT = Event(
{
diff --git a/indexer/modules/custom/eigen_layer/domains/eigen_layer_domain.py b/hemera_udf/eigen_layer/domains.py
similarity index 96%
rename from indexer/modules/custom/eigen_layer/domains/eigen_layer_domain.py
rename to hemera_udf/eigen_layer/domains.py
index e7f861608..3d605f118 100644
--- a/indexer/modules/custom/eigen_layer/domains/eigen_layer_domain.py
+++ b/hemera_udf/eigen_layer/domains.py
@@ -1,7 +1,7 @@
from dataclasses import dataclass
from typing import Optional
-from indexer.domain import Domain
+from hemera.indexer.domains import Domain
@dataclass
diff --git a/indexer/modules/custom/eigen_layer/endpoint/__init__.py b/hemera_udf/eigen_layer/endpoint/__init__.py
similarity index 100%
rename from indexer/modules/custom/eigen_layer/endpoint/__init__.py
rename to hemera_udf/eigen_layer/endpoint/__init__.py
diff --git a/indexer/modules/custom/eigen_layer/endpoint/routes.py b/hemera_udf/eigen_layer/endpoint/routes.py
similarity index 89%
rename from indexer/modules/custom/eigen_layer/endpoint/routes.py
rename to hemera_udf/eigen_layer/endpoint/routes.py
index d7666510a..0a737a035 100644
--- a/indexer/modules/custom/eigen_layer/endpoint/routes.py
+++ b/hemera_udf/eigen_layer/endpoint/routes.py
@@ -1,12 +1,11 @@
from operator import and_
-from select import select
-from typing import Any, Dict, Optional, Union
+from typing import Optional, Union
-from api.app.address.features import register_feature
-from common.models import db
-from common.models.tokens import Tokens
-from common.utils.format_utils import bytes_to_hex_str, format_value_for_json, hex_str_to_bytes
-from indexer.modules.custom.eigen_layer.models.af_eigen_layer_address_current import AfEigenLayerAddressCurrent
+from hemera.api.app.address.features import register_feature
+from hemera.common.models import db
+from hemera.common.models.tokens import Tokens
+from hemera.common.utils.format_utils import bytes_to_hex_str, format_value_for_json, hex_str_to_bytes
+from hemera_udf.eigen_layer.models.af_eigen_layer_address_current import AfEigenLayerAddressCurrent
@register_feature("eigen_layer", "value")
diff --git a/indexer/modules/custom/etherfi/abi/__init__.py b/hemera_udf/eigen_layer/jobs/__init__.py
similarity index 100%
rename from indexer/modules/custom/etherfi/abi/__init__.py
rename to hemera_udf/eigen_layer/jobs/__init__.py
diff --git a/indexer/modules/custom/eigen_layer/eigen_layer_job.py b/hemera_udf/eigen_layer/jobs/eigen_layer_job.py
similarity index 95%
rename from indexer/modules/custom/eigen_layer/eigen_layer_job.py
rename to hemera_udf/eigen_layer/jobs/eigen_layer_job.py
index 0e90402e7..b6cd8e957 100644
--- a/indexer/modules/custom/eigen_layer/eigen_layer_job.py
+++ b/hemera_udf/eigen_layer/jobs/eigen_layer_job.py
@@ -8,25 +8,25 @@
from collections import defaultdict
from typing import Any, Dict, List
-from common.utils.exception_control import FastShutdownError
-from common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes
-from indexer.domain.transaction import Transaction
-from indexer.jobs import FilterTransactionDataJob
-from indexer.modules.custom.eigen_layer.abi import (
+from hemera.common.utils.exception_control import FastShutdownError
+from hemera.common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes
+from hemera.indexer.domains.transaction import Transaction
+from hemera.indexer.jobs import FilterTransactionDataJob
+from hemera.indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera_udf.eigen_layer.abi import (
DEPOSIT_EVENT,
SHARE_WITHDRAW_QUEUED,
WITHDRAWAL_COMPLETED_EVENT,
WITHDRAWAL_QUEUED_BATCH_EVENT,
WITHDRAWAL_QUEUED_EVENT,
)
-from indexer.modules.custom.eigen_layer.domains.eigen_layer_domain import (
+from hemera_udf.eigen_layer.domains import (
EigenLayerAction,
EigenLayerAddressCurrent,
eigen_layer_address_current_factory,
)
-from indexer.modules.custom.eigen_layer.models.af_eigen_layer_address_current import AfEigenLayerAddressCurrent
-from indexer.modules.custom.eigen_layer.models.af_eigen_layer_records import AfEigenLayerRecords
-from indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera_udf.eigen_layer.models.af_eigen_layer_address_current import AfEigenLayerAddressCurrent
+from hemera_udf.eigen_layer.models.af_eigen_layer_records import AfEigenLayerRecords
logger = logging.getLogger(__name__)
diff --git a/indexer/modules/custom/etherfi/domains/__init__.py b/hemera_udf/eigen_layer/models/__init__.py
similarity index 100%
rename from indexer/modules/custom/etherfi/domains/__init__.py
rename to hemera_udf/eigen_layer/models/__init__.py
diff --git a/indexer/modules/custom/eigen_layer/models/af_eigen_layer_address_current.py b/hemera_udf/eigen_layer/models/af_eigen_layer_address_current.py
similarity index 85%
rename from indexer/modules/custom/eigen_layer/models/af_eigen_layer_address_current.py
rename to hemera_udf/eigen_layer/models/af_eigen_layer_address_current.py
index 0e454f744..352f48712 100644
--- a/indexer/modules/custom/eigen_layer/models/af_eigen_layer_address_current.py
+++ b/hemera_udf/eigen_layer/models/af_eigen_layer_address_current.py
@@ -7,10 +7,8 @@
from sqlalchemy import Column, PrimaryKeyConstraint, func
from sqlalchemy.dialects.postgresql import BYTEA, NUMERIC, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.eigen_layer.domains.eigen_layer_domain import (
- EigenLayerAddressCurrent as EigenLayerAddressCurrentD,
-)
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.eigen_layer.domains import EigenLayerAddressCurrent as EigenLayerAddressCurrentD
class AfEigenLayerAddressCurrent(HemeraModel):
diff --git a/indexer/modules/custom/eigen_layer/models/af_eigen_layer_records.py b/hemera_udf/eigen_layer/models/af_eigen_layer_records.py
similarity index 89%
rename from indexer/modules/custom/eigen_layer/models/af_eigen_layer_records.py
rename to hemera_udf/eigen_layer/models/af_eigen_layer_records.py
index 145583d9e..b9e4be860 100644
--- a/indexer/modules/custom/eigen_layer/models/af_eigen_layer_records.py
+++ b/hemera_udf/eigen_layer/models/af_eigen_layer_records.py
@@ -7,8 +7,8 @@
from sqlalchemy import Column, PrimaryKeyConstraint, func, text
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, INTEGER, NUMERIC, TIMESTAMP, VARCHAR
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.eigen_layer.domains.eigen_layer_domain import EigenLayerAction as EigenLayerActionD
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.eigen_layer.domains import EigenLayerAction as EigenLayerActionD
class AfEigenLayerRecords(HemeraModel):
diff --git a/indexer/modules/custom/eigen_layer/sql/eigen_layer_table_migration.sql b/hemera_udf/eigen_layer/sql/eigen_layer_table_migration.sql
similarity index 100%
rename from indexer/modules/custom/eigen_layer/sql/eigen_layer_table_migration.sql
rename to hemera_udf/eigen_layer/sql/eigen_layer_table_migration.sql
diff --git a/hemera_udf/etherfi/__init__.py b/hemera_udf/etherfi/__init__.py
new file mode 100644
index 000000000..4632ff849
--- /dev/null
+++ b/hemera_udf/etherfi/__init__.py
@@ -0,0 +1,12 @@
+from __future__ import annotations
+
+import packaging.version
+
+from hemera import __version__ as hemera_version
+
+__all__ = ["__version__"]
+
+__version__ = "0.1.0"
+
+if packaging.version.parse(packaging.version.parse(hemera_version).base_version) < packaging.version.parse("1.0.0"):
+ raise RuntimeError(f"The package `hemera-modules-xxx:{__version__}` needs Hemera 1.0.0+")
diff --git a/indexer/modules/custom/etherfi/models/__init__.py b/hemera_udf/etherfi/abi/__init__.py
similarity index 100%
rename from indexer/modules/custom/etherfi/models/__init__.py
rename to hemera_udf/etherfi/abi/__init__.py
diff --git a/indexer/modules/custom/etherfi/abi/event.py b/hemera_udf/etherfi/abi/event.py
similarity index 96%
rename from indexer/modules/custom/etherfi/abi/event.py
rename to hemera_udf/etherfi/abi/event.py
index 2c2b8ea1a..f935bf2dc 100644
--- a/indexer/modules/custom/etherfi/abi/event.py
+++ b/hemera_udf/etherfi/abi/event.py
@@ -1,4 +1,4 @@
-from common.utils.abi_code_utils import Event
+from hemera.common.utils.abi_code_utils import Event
transfer_share_event = Event(
{
diff --git a/indexer/modules/custom/etherfi/abi/functions.py b/hemera_udf/etherfi/abi/functions.py
similarity index 94%
rename from indexer/modules/custom/etherfi/abi/functions.py
rename to hemera_udf/etherfi/abi/functions.py
index 45ab5bfbf..0bada91b5 100644
--- a/indexer/modules/custom/etherfi/abi/functions.py
+++ b/hemera_udf/etherfi/abi/functions.py
@@ -1,4 +1,4 @@
-from common.utils.abi_code_utils import Function
+from hemera.common.utils.abi_code_utils import Function
get_shares_func = Function(
{
diff --git a/indexer/modules/custom/etherfi/domains/eeth.py b/hemera_udf/etherfi/domains.py
similarity index 73%
rename from indexer/modules/custom/etherfi/domains/eeth.py
rename to hemera_udf/etherfi/domains.py
index 5fcc0d3a7..35944f6d4 100644
--- a/indexer/modules/custom/etherfi/domains/eeth.py
+++ b/hemera_udf/etherfi/domains.py
@@ -1,6 +1,6 @@
from dataclasses import dataclass
-from indexer.domain import Domain
+from hemera.indexer.domains import Domain
@dataclass
@@ -25,3 +25,10 @@ class EtherFiPositionValuesD(Domain):
total_share: int
total_value_out_lp: int
total_value_in_lp: int
+
+
+@dataclass
+class EtherFiLrtExchangeRateD(Domain):
+ exchange_rate: int
+ token_address: str
+ block_number: int
diff --git a/indexer/modules/custom/init_capital/__init__.py b/hemera_udf/etherfi/jobs/__init__.py
similarity index 100%
rename from indexer/modules/custom/init_capital/__init__.py
rename to hemera_udf/etherfi/jobs/__init__.py
diff --git a/indexer/modules/custom/etherfi/export_etherfi_share_job.py b/hemera_udf/etherfi/jobs/export_etherfi_share_job.py
similarity index 90%
rename from indexer/modules/custom/etherfi/export_etherfi_share_job.py
rename to hemera_udf/etherfi/jobs/export_etherfi_share_job.py
index 3d3926b7f..f83074893 100644
--- a/indexer/modules/custom/etherfi/export_etherfi_share_job.py
+++ b/hemera_udf/etherfi/jobs/export_etherfi_share_job.py
@@ -2,24 +2,22 @@
from itertools import groupby
from typing import List
-from web3 import Web3
-
-from common.utils.web3_utils import ZERO_ADDRESS, event_topic_to_address
-from indexer.domain.log import Log
-from indexer.domain.transaction import Transaction
-from indexer.executors.batch_work_executor import BatchWorkExecutor
-from indexer.jobs.base_job import FilterTransactionDataJob
-from indexer.modules.custom.etherfi.abi.event import *
-from indexer.modules.custom.etherfi.abi.functions import *
-from indexer.modules.custom.etherfi.domains.eeth import (
+from hemera.common.utils.web3_utils import ZERO_ADDRESS, event_topic_to_address
+from hemera.indexer.domains.log import Log
+from hemera.indexer.domains.transaction import Transaction
+from hemera.indexer.executors.batch_work_executor import BatchWorkExecutor
+from hemera.indexer.jobs.base_job import FilterTransactionDataJob
+from hemera.indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera.indexer.utils.multicall_hemera import Call
+from hemera.indexer.utils.multicall_hemera.multi_call_helper import MultiCallHelper
+from hemera_udf.etherfi.abi.event import *
+from hemera_udf.etherfi.abi.functions import *
+from hemera_udf.etherfi.domains import (
+ EtherFiLrtExchangeRateD,
EtherFiPositionValuesD,
EtherFiShareBalanceCurrentD,
EtherFiShareBalanceD,
)
-from indexer.modules.custom.etherfi.domains.lrts import EtherFiLrtExchangeRateD
-from indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
-from indexer.utils.multicall_hemera import Call
-from indexer.utils.multicall_hemera.multi_call_helper import MultiCallHelper
logger = logging.getLogger(__name__)
diff --git a/indexer/modules/custom/init_capital/domains/__init__.py b/hemera_udf/etherfi/models/__init__.py
similarity index 100%
rename from indexer/modules/custom/init_capital/domains/__init__.py
rename to hemera_udf/etherfi/models/__init__.py
diff --git a/indexer/modules/custom/etherfi/models/eeth.py b/hemera_udf/etherfi/models/eeth.py
similarity index 92%
rename from indexer/modules/custom/etherfi/models/eeth.py
rename to hemera_udf/etherfi/models/eeth.py
index 2fd846bbb..f1907f7b0 100644
--- a/indexer/modules/custom/etherfi/models/eeth.py
+++ b/hemera_udf/etherfi/models/eeth.py
@@ -1,12 +1,8 @@
from sqlalchemy import Column, PrimaryKeyConstraint, func, text
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, NUMERIC, TIMESTAMP, VARCHAR
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.etherfi.domains.eeth import (
- EtherFiPositionValuesD,
- EtherFiShareBalanceCurrentD,
- EtherFiShareBalanceD,
-)
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.etherfi.domains import EtherFiPositionValuesD, EtherFiShareBalanceCurrentD, EtherFiShareBalanceD
class EtherFiShareBalances(HemeraModel):
diff --git a/indexer/modules/custom/etherfi/models/lrts.py b/hemera_udf/etherfi/models/lrts.py
similarity index 87%
rename from indexer/modules/custom/etherfi/models/lrts.py
rename to hemera_udf/etherfi/models/lrts.py
index b0818f1ba..f2d7e8840 100644
--- a/indexer/modules/custom/etherfi/models/lrts.py
+++ b/hemera_udf/etherfi/models/lrts.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, PrimaryKeyConstraint, func, text
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, NUMERIC, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.etherfi.domains.lrts import EtherFiLrtExchangeRateD
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.etherfi.domains import EtherFiLrtExchangeRateD
class EtherFiLrtExchangeRate(HemeraModel):
diff --git a/indexer/modules/custom/etherfi/sql/ether_fi_sql_migration.sql b/hemera_udf/etherfi/sql/ether_fi_sql_migration.sql
similarity index 100%
rename from indexer/modules/custom/etherfi/sql/ether_fi_sql_migration.sql
rename to hemera_udf/etherfi/sql/ether_fi_sql_migration.sql
diff --git a/hemera_udf/hemera_ens/__init__.py b/hemera_udf/hemera_ens/__init__.py
new file mode 100644
index 000000000..480908c43
--- /dev/null
+++ b/hemera_udf/hemera_ens/__init__.py
@@ -0,0 +1,20 @@
+from __future__ import annotations
+
+import packaging.version
+
+from hemera import __version__ as hemera_version
+
+__all__ = ["__version__"]
+
+__version__ = "0.1.0"
+
+from hemera.common.enumeration.entity_type import DynamicEntityTypeRegistry
+from hemera_udf.hemera_ens.ens_domain import *
+
+if packaging.version.parse(packaging.version.parse(hemera_version).base_version) < packaging.version.parse("1.0.0"):
+ raise RuntimeError(f"The package `hemera-modules-xxx:{__version__}` needs Hemera 1.0.0+")
+
+value = DynamicEntityTypeRegistry.register("ENS")
+DynamicEntityTypeRegistry.register_output_types(
+ value, {ENSMiddleD, ENSRegisterD, ENSNameRenewD, ENSAddressChangeD, ENSAddressD}
+)
diff --git a/indexer/modules/custom/hemera_ens/abi/0x00000000000c2e074ec69a0dfb2997ba6c7d2e1e.json b/hemera_udf/hemera_ens/abi/0x00000000000c2e074ec69a0dfb2997ba6c7d2e1e.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0x00000000000c2e074ec69a0dfb2997ba6c7d2e1e.json
rename to hemera_udf/hemera_ens/abi/0x00000000000c2e074ec69a0dfb2997ba6c7d2e1e.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0x00000000008794027c69c26d2a048dbec09de67c.json b/hemera_udf/hemera_ens/abi/0x00000000008794027c69c26d2a048dbec09de67c.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0x00000000008794027c69c26d2a048dbec09de67c.json
rename to hemera_udf/hemera_ens/abi/0x00000000008794027c69c26d2a048dbec09de67c.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0x084b1c3c81545d370f3634392de611caabff8148.json b/hemera_udf/hemera_ens/abi/0x084b1c3c81545d370f3634392de611caabff8148.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0x084b1c3c81545d370f3634392de611caabff8148.json
rename to hemera_udf/hemera_ens/abi/0x084b1c3c81545d370f3634392de611caabff8148.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0x0904dac3347ea47d208f3fd67402d039a3b99859.json b/hemera_udf/hemera_ens/abi/0x0904dac3347ea47d208f3fd67402d039a3b99859.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0x0904dac3347ea47d208f3fd67402d039a3b99859.json
rename to hemera_udf/hemera_ens/abi/0x0904dac3347ea47d208f3fd67402d039a3b99859.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0x1d6552e8f46fd509f3918a174fe62c34b42564ae.json b/hemera_udf/hemera_ens/abi/0x1d6552e8f46fd509f3918a174fe62c34b42564ae.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0x1d6552e8f46fd509f3918a174fe62c34b42564ae.json
rename to hemera_udf/hemera_ens/abi/0x1d6552e8f46fd509f3918a174fe62c34b42564ae.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0x1da022710df5002339274aadee8d58218e9d6ab5.json b/hemera_udf/hemera_ens/abi/0x1da022710df5002339274aadee8d58218e9d6ab5.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0x1da022710df5002339274aadee8d58218e9d6ab5.json
rename to hemera_udf/hemera_ens/abi/0x1da022710df5002339274aadee8d58218e9d6ab5.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0x226159d592e2b063810a10ebf6dcbada94ed68b8.json b/hemera_udf/hemera_ens/abi/0x226159d592e2b063810a10ebf6dcbada94ed68b8.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0x226159d592e2b063810a10ebf6dcbada94ed68b8.json
rename to hemera_udf/hemera_ens/abi/0x226159d592e2b063810a10ebf6dcbada94ed68b8.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0x231b0ee14048e9dccd1d247744d114a4eb5e8e63.json b/hemera_udf/hemera_ens/abi/0x231b0ee14048e9dccd1d247744d114a4eb5e8e63.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0x231b0ee14048e9dccd1d247744d114a4eb5e8e63.json
rename to hemera_udf/hemera_ens/abi/0x231b0ee14048e9dccd1d247744d114a4eb5e8e63.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0x253553366da8546fc250f225fe3d25d0c782303b.json b/hemera_udf/hemera_ens/abi/0x253553366da8546fc250f225fe3d25d0c782303b.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0x253553366da8546fc250f225fe3d25d0c782303b.json
rename to hemera_udf/hemera_ens/abi/0x253553366da8546fc250f225fe3d25d0c782303b.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0x27c9b34eb43523447d3e1bcf26f009d814522687.json b/hemera_udf/hemera_ens/abi/0x27c9b34eb43523447d3e1bcf26f009d814522687.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0x27c9b34eb43523447d3e1bcf26f009d814522687.json
rename to hemera_udf/hemera_ens/abi/0x27c9b34eb43523447d3e1bcf26f009d814522687.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0x283af0b28c62c092c9727f1ee09c02ca627eb7f5.json b/hemera_udf/hemera_ens/abi/0x283af0b28c62c092c9727f1ee09c02ca627eb7f5.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0x283af0b28c62c092c9727f1ee09c02ca627eb7f5.json
rename to hemera_udf/hemera_ens/abi/0x283af0b28c62c092c9727f1ee09c02ca627eb7f5.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0x314159265dd8dbb310642f98f50c066173c1259b.json b/hemera_udf/hemera_ens/abi/0x314159265dd8dbb310642f98f50c066173c1259b.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0x314159265dd8dbb310642f98f50c066173c1259b.json
rename to hemera_udf/hemera_ens/abi/0x314159265dd8dbb310642f98f50c066173c1259b.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0x323a76393544d5ecca80cd6ef2a560c6a395b7e3.json b/hemera_udf/hemera_ens/abi/0x323a76393544d5ecca80cd6ef2a560c6a395b7e3.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0x323a76393544d5ecca80cd6ef2a560c6a395b7e3.json
rename to hemera_udf/hemera_ens/abi/0x323a76393544d5ecca80cd6ef2a560c6a395b7e3.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0x3671ae578e63fdf66ad4f3e12cc0c0d71ac7510c.json b/hemera_udf/hemera_ens/abi/0x3671ae578e63fdf66ad4f3e12cc0c0d71ac7510c.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0x3671ae578e63fdf66ad4f3e12cc0c0d71ac7510c.json
rename to hemera_udf/hemera_ens/abi/0x3671ae578e63fdf66ad4f3e12cc0c0d71ac7510c.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0x4976fb03c32e5b8cfe2b6ccb31c09ba78ebaba41.json b/hemera_udf/hemera_ens/abi/0x4976fb03c32e5b8cfe2b6ccb31c09ba78ebaba41.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0x4976fb03c32e5b8cfe2b6ccb31c09ba78ebaba41.json
rename to hemera_udf/hemera_ens/abi/0x4976fb03c32e5b8cfe2b6ccb31c09ba78ebaba41.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0x4fe4e666be5752f1fdd210f4ab5de2cc26e3e0e8.json b/hemera_udf/hemera_ens/abi/0x4fe4e666be5752f1fdd210f4ab5de2cc26e3e0e8.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0x4fe4e666be5752f1fdd210f4ab5de2cc26e3e0e8.json
rename to hemera_udf/hemera_ens/abi/0x4fe4e666be5752f1fdd210f4ab5de2cc26e3e0e8.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0x57f1887a8bf19b14fc0df6fd9b2acc9af147ea85.json b/hemera_udf/hemera_ens/abi/0x57f1887a8bf19b14fc0df6fd9b2acc9af147ea85.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0x57f1887a8bf19b14fc0df6fd9b2acc9af147ea85.json
rename to hemera_udf/hemera_ens/abi/0x57f1887a8bf19b14fc0df6fd9b2acc9af147ea85.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0x6090a6e47849629b7245dfa1ca21d94cd15878ef.json b/hemera_udf/hemera_ens/abi/0x6090a6e47849629b7245dfa1ca21d94cd15878ef.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0x6090a6e47849629b7245dfa1ca21d94cd15878ef.json
rename to hemera_udf/hemera_ens/abi/0x6090a6e47849629b7245dfa1ca21d94cd15878ef.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0x60c7c2a24b5e86c38639fd1586917a8fef66a56d.json b/hemera_udf/hemera_ens/abi/0x60c7c2a24b5e86c38639fd1586917a8fef66a56d.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0x60c7c2a24b5e86c38639fd1586917a8fef66a56d.json
rename to hemera_udf/hemera_ens/abi/0x60c7c2a24b5e86c38639fd1586917a8fef66a56d.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0x6109dd117aa5486605fc85e040ab00163a75c662.json b/hemera_udf/hemera_ens/abi/0x6109dd117aa5486605fc85e040ab00163a75c662.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0x6109dd117aa5486605fc85e040ab00163a75c662.json
rename to hemera_udf/hemera_ens/abi/0x6109dd117aa5486605fc85e040ab00163a75c662.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0x690f0581ececcf8389c223170778cd9d029606f2.json b/hemera_udf/hemera_ens/abi/0x690f0581ececcf8389c223170778cd9d029606f2.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0x690f0581ececcf8389c223170778cd9d029606f2.json
rename to hemera_udf/hemera_ens/abi/0x690f0581ececcf8389c223170778cd9d029606f2.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0x705bfbcfccde554e11df213bf6d463ea00dd57cc.json b/hemera_udf/hemera_ens/abi/0x705bfbcfccde554e11df213bf6d463ea00dd57cc.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0x705bfbcfccde554e11df213bf6d463ea00dd57cc.json
rename to hemera_udf/hemera_ens/abi/0x705bfbcfccde554e11df213bf6d463ea00dd57cc.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0x9062c0a6dbd6108336bcbe4593a3d1ce05512069.json b/hemera_udf/hemera_ens/abi/0x9062c0a6dbd6108336bcbe4593a3d1ce05512069.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0x9062c0a6dbd6108336bcbe4593a3d1ce05512069.json
rename to hemera_udf/hemera_ens/abi/0x9062c0a6dbd6108336bcbe4593a3d1ce05512069.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0x911143d946ba5d467bfc476491fdb235fef4d667.json b/hemera_udf/hemera_ens/abi/0x911143d946ba5d467bfc476491fdb235fef4d667.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0x911143d946ba5d467bfc476491fdb235fef4d667.json
rename to hemera_udf/hemera_ens/abi/0x911143d946ba5d467bfc476491fdb235fef4d667.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0x9b6d20f524367d7e98ed849d37fc662402dca7fb.json b/hemera_udf/hemera_ens/abi/0x9b6d20f524367d7e98ed849d37fc662402dca7fb.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0x9b6d20f524367d7e98ed849d37fc662402dca7fb.json
rename to hemera_udf/hemera_ens/abi/0x9b6d20f524367d7e98ed849d37fc662402dca7fb.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0xa2c122be93b0074270ebee7f6b7292c7deb45047.json b/hemera_udf/hemera_ens/abi/0xa2c122be93b0074270ebee7f6b7292c7deb45047.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0xa2c122be93b0074270ebee7f6b7292c7deb45047.json
rename to hemera_udf/hemera_ens/abi/0xa2c122be93b0074270ebee7f6b7292c7deb45047.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0xa2f428617a523837d4adc81c67a296d42fd95e86.json b/hemera_udf/hemera_ens/abi/0xa2f428617a523837d4adc81c67a296d42fd95e86.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0xa2f428617a523837d4adc81c67a296d42fd95e86.json
rename to hemera_udf/hemera_ens/abi/0xa2f428617a523837d4adc81c67a296d42fd95e86.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0xa58e81fe9b61b5c3fe2afd33cf304c454abfc7cb.json b/hemera_udf/hemera_ens/abi/0xa58e81fe9b61b5c3fe2afd33cf304c454abfc7cb.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0xa58e81fe9b61b5c3fe2afd33cf304c454abfc7cb.json
rename to hemera_udf/hemera_ens/abi/0xa58e81fe9b61b5c3fe2afd33cf304c454abfc7cb.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0xab528d626ec275e3fad363ff1393a41f581c5897.json b/hemera_udf/hemera_ens/abi/0xab528d626ec275e3fad363ff1393a41f581c5897.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0xab528d626ec275e3fad363ff1393a41f581c5897.json
rename to hemera_udf/hemera_ens/abi/0xab528d626ec275e3fad363ff1393a41f581c5897.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0xb1377e4f32e6746444970823d5506f98f5a04201.json b/hemera_udf/hemera_ens/abi/0xb1377e4f32e6746444970823d5506f98f5a04201.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0xb1377e4f32e6746444970823d5506f98f5a04201.json
rename to hemera_udf/hemera_ens/abi/0xb1377e4f32e6746444970823d5506f98f5a04201.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0xb6e040c9ecaae172a89bd561c5f73e1c48d28cd9.json b/hemera_udf/hemera_ens/abi/0xb6e040c9ecaae172a89bd561c5f73e1c48d28cd9.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0xb6e040c9ecaae172a89bd561c5f73e1c48d28cd9.json
rename to hemera_udf/hemera_ens/abi/0xb6e040c9ecaae172a89bd561c5f73e1c48d28cd9.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0xb9d374d0fe3d8341155663fae31b7beae0ae233a.json b/hemera_udf/hemera_ens/abi/0xb9d374d0fe3d8341155663fae31b7beae0ae233a.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0xb9d374d0fe3d8341155663fae31b7beae0ae233a.json
rename to hemera_udf/hemera_ens/abi/0xb9d374d0fe3d8341155663fae31b7beae0ae233a.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0xc1735677a60884abbcf72295e88d47764beda282.json b/hemera_udf/hemera_ens/abi/0xc1735677a60884abbcf72295e88d47764beda282.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0xc1735677a60884abbcf72295e88d47764beda282.json
rename to hemera_udf/hemera_ens/abi/0xc1735677a60884abbcf72295e88d47764beda282.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0xc32659651d137a18b79925449722855aa327231d.json b/hemera_udf/hemera_ens/abi/0xc32659651d137a18b79925449722855aa327231d.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0xc32659651d137a18b79925449722855aa327231d.json
rename to hemera_udf/hemera_ens/abi/0xc32659651d137a18b79925449722855aa327231d.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0xd4416b13d2b3a9abae7acd5d6c2bbdbe25686401.json b/hemera_udf/hemera_ens/abi/0xd4416b13d2b3a9abae7acd5d6c2bbdbe25686401.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0xd4416b13d2b3a9abae7acd5d6c2bbdbe25686401.json
rename to hemera_udf/hemera_ens/abi/0xd4416b13d2b3a9abae7acd5d6c2bbdbe25686401.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0xd7a029db2585553978190db5e85ec724aa4df23f.json b/hemera_udf/hemera_ens/abi/0xd7a029db2585553978190db5e85ec724aa4df23f.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0xd7a029db2585553978190db5e85ec724aa4df23f.json
rename to hemera_udf/hemera_ens/abi/0xd7a029db2585553978190db5e85ec724aa4df23f.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0xdaaf96c344f63131acadd0ea35170e7892d3dfba.json b/hemera_udf/hemera_ens/abi/0xdaaf96c344f63131acadd0ea35170e7892d3dfba.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0xdaaf96c344f63131acadd0ea35170e7892d3dfba.json
rename to hemera_udf/hemera_ens/abi/0xdaaf96c344f63131acadd0ea35170e7892d3dfba.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0xe65d8aaf34cb91087d1598e0a15b582f57f217d9.json b/hemera_udf/hemera_ens/abi/0xe65d8aaf34cb91087d1598e0a15b582f57f217d9.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0xe65d8aaf34cb91087d1598e0a15b582f57f217d9.json
rename to hemera_udf/hemera_ens/abi/0xe65d8aaf34cb91087d1598e0a15b582f57f217d9.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0xf0ad5cad05e10572efceb849f6ff0c68f9700455.json b/hemera_udf/hemera_ens/abi/0xf0ad5cad05e10572efceb849f6ff0c68f9700455.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0xf0ad5cad05e10572efceb849f6ff0c68f9700455.json
rename to hemera_udf/hemera_ens/abi/0xf0ad5cad05e10572efceb849f6ff0c68f9700455.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0xf7c83bd0c50e7a72b55a39fe0dabf5e3a330d749.json b/hemera_udf/hemera_ens/abi/0xf7c83bd0c50e7a72b55a39fe0dabf5e3a330d749.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0xf7c83bd0c50e7a72b55a39fe0dabf5e3a330d749.json
rename to hemera_udf/hemera_ens/abi/0xf7c83bd0c50e7a72b55a39fe0dabf5e3a330d749.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0xfe89cc7abb2c4183683ab71653c4cdc9b02d44b7.json b/hemera_udf/hemera_ens/abi/0xfe89cc7abb2c4183683ab71653c4cdc9b02d44b7.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0xfe89cc7abb2c4183683ab71653c4cdc9b02d44b7.json
rename to hemera_udf/hemera_ens/abi/0xfe89cc7abb2c4183683ab71653c4cdc9b02d44b7.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0xff252725f6122a92551a5fa9a6b6bf10eb0be035.json b/hemera_udf/hemera_ens/abi/0xff252725f6122a92551a5fa9a6b6bf10eb0be035.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0xff252725f6122a92551a5fa9a6b6bf10eb0be035.json
rename to hemera_udf/hemera_ens/abi/0xff252725f6122a92551a5fa9a6b6bf10eb0be035.json
diff --git a/indexer/modules/custom/hemera_ens/abi/0xffc8ca4e83416b7e0443ff430cc245646434b647.json b/hemera_udf/hemera_ens/abi/0xffc8ca4e83416b7e0443ff430cc245646434b647.json
similarity index 100%
rename from indexer/modules/custom/hemera_ens/abi/0xffc8ca4e83416b7e0443ff430cc245646434b647.json
rename to hemera_udf/hemera_ens/abi/0xffc8ca4e83416b7e0443ff430cc245646434b647.json
diff --git a/indexer/modules/custom/hemera_ens/endpoint/__init__.py b/hemera_udf/hemera_ens/endpoint/__init__.py
similarity index 100%
rename from indexer/modules/custom/hemera_ens/endpoint/__init__.py
rename to hemera_udf/hemera_ens/endpoint/__init__.py
diff --git a/indexer/modules/custom/hemera_ens/endpoint/action_types.py b/hemera_udf/hemera_ens/endpoint/action_types.py
similarity index 100%
rename from indexer/modules/custom/hemera_ens/endpoint/action_types.py
rename to hemera_udf/hemera_ens/endpoint/action_types.py
diff --git a/indexer/modules/custom/hemera_ens/endpoint/routes.py b/hemera_udf/hemera_ens/endpoint/routes.py
similarity index 91%
rename from indexer/modules/custom/hemera_ens/endpoint/routes.py
rename to hemera_udf/hemera_ens/endpoint/routes.py
index edd7684e7..9868ef1cf 100644
--- a/indexer/modules/custom/hemera_ens/endpoint/routes.py
+++ b/hemera_udf/hemera_ens/endpoint/routes.py
@@ -8,20 +8,20 @@
from sqlalchemy.sql import and_, or_
from web3 import Web3
-from api.app.address.features import register_feature
-from common.models import db
-from common.models.current_token_balances import CurrentTokenBalances
-from common.models.erc721_token_id_details import ERC721TokenIdDetails
-from common.models.erc721_token_transfers import ERC721TokenTransfers
-from common.models.erc1155_token_transfers import ERC1155TokenTransfers
-from common.utils.config import get_config
-from common.utils.exception_control import APIError
-from common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes
-from indexer.modules.custom.hemera_ens.endpoint import af_ens_namespace
-from indexer.modules.custom.hemera_ens.endpoint.action_types import OperationType
-from indexer.modules.custom.hemera_ens.models.af_ens_address_current import ENSAddress
-from indexer.modules.custom.hemera_ens.models.af_ens_event import ENSMiddle
-from indexer.modules.custom.hemera_ens.models.af_ens_node_current import ENSRecord
+from hemera.api.app.address.features import register_feature
+from hemera.common.models import db
+from hemera.common.models.current_token_balances import CurrentTokenBalances
+from hemera.common.models.erc721_token_id_details import ERC721TokenIdDetails
+from hemera.common.models.erc721_token_transfers import ERC721TokenTransfers
+from hemera.common.models.erc1155_token_transfers import ERC1155TokenTransfers
+from hemera.common.utils.config import get_config
+from hemera.common.utils.exception_control import APIError
+from hemera.common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes
+from hemera_udf.hemera_ens.endpoint import af_ens_namespace
+from hemera_udf.hemera_ens.endpoint.action_types import OperationType
+from hemera_udf.hemera_ens.models.af_ens_address_current import ENSAddress
+from hemera_udf.hemera_ens.models.af_ens_event import ENSMiddle
+from hemera_udf.hemera_ens.models.af_ens_node_current import ENSRecord
app_config = get_config()
diff --git a/hemera_udf/hemera_ens/ens_abi.py b/hemera_udf/hemera_ens/ens_abi.py
new file mode 100644
index 000000000..af30b9391
--- /dev/null
+++ b/hemera_udf/hemera_ens/ens_abi.py
@@ -0,0 +1,33 @@
+import json
+import os
+
+
+def load_abi_from_directory(relative_path):
+ """
+ Load ABI files from the specified relative directory and build an abi_map.
+
+ :param relative_path: The relative path to the directory containing ABI files.
+ :return: A dictionary mapping addresses (lowercased) to their ABI JSON strings.
+ """
+ abi_map = {}
+ # Get the absolute path of the directory
+ current_dir = os.path.dirname(os.path.abspath(__file__))
+ absolute_path = os.path.join(current_dir, relative_path)
+
+ # Iterate through all files in the directory
+ for file_name in os.listdir(absolute_path):
+ file_path = os.path.join(absolute_path, file_name)
+ # Process only files, skip directories
+ if os.path.isfile(file_path):
+ with open(file_path, "r") as data_file:
+ try:
+ data = json.load(data_file)
+ abi_map[data["address"].lower()] = json.dumps(data["abi"])
+ except (KeyError, json.JSONDecodeError) as e:
+ print(f"Error processing file {file_name}: {e}")
+
+ return abi_map
+
+
+relative_path = "abi"
+abi_map = load_abi_from_directory(relative_path)
diff --git a/indexer/modules/custom/hemera_ens/__init__.py b/hemera_udf/hemera_ens/ens_compatible_string_decoder.py
similarity index 89%
rename from indexer/modules/custom/hemera_ens/__init__.py
rename to hemera_udf/hemera_ens/ens_compatible_string_decoder.py
index cb718cbcf..91ada9710 100644
--- a/indexer/modules/custom/hemera_ens/__init__.py
+++ b/hemera_udf/hemera_ens/ens_compatible_string_decoder.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
import codecs
from eth_abi import encoding
@@ -27,7 +29,3 @@ def decoder_fn(data, handle_string_errors="strict"):
CompatibleStringDecoder,
label="string",
)
-
-
-from .ens_conf import CONTRACT_NAME_MAP
-from .ens_handler import EnsConfLoader, EnsHandler
diff --git a/indexer/modules/custom/hemera_ens/ens_conf.py b/hemera_udf/hemera_ens/ens_conf.py
similarity index 100%
rename from indexer/modules/custom/hemera_ens/ens_conf.py
rename to hemera_udf/hemera_ens/ens_conf.py
diff --git a/indexer/modules/custom/hemera_ens/ens_domain.py b/hemera_udf/hemera_ens/ens_domain.py
similarity index 90%
rename from indexer/modules/custom/hemera_ens/ens_domain.py
rename to hemera_udf/hemera_ens/ens_domain.py
index 213cb905a..9447537ca 100644
--- a/indexer/modules/custom/hemera_ens/ens_domain.py
+++ b/hemera_udf/hemera_ens/ens_domain.py
@@ -2,13 +2,13 @@
from datetime import datetime
from typing import Optional
-from indexer.domain import FilterData
+from hemera.indexer.domains import Domain
"""for ens_middle"""
@dataclass
-class ENSMiddleD(FilterData):
+class ENSMiddleD(Domain):
transaction_hash: str
log_index: int
transaction_index: int
@@ -45,7 +45,7 @@ class ENSMiddleD(FilterData):
@dataclass
-class ENSRegisterD(FilterData):
+class ENSRegisterD(Domain):
registration: Optional[datetime] = None
expires: Optional[datetime] = None
@@ -60,7 +60,7 @@ class ENSRegisterD(FilterData):
@dataclass
-class ENSNameRenewD(FilterData):
+class ENSNameRenewD(Domain):
node: Optional[str] = None
expires: Optional[datetime] = None
@@ -68,7 +68,7 @@ class ENSNameRenewD(FilterData):
@dataclass
-class ENSAddressChangeD(FilterData):
+class ENSAddressChangeD(Domain):
node: Optional[str] = None
address: Optional[str] = None
@@ -79,7 +79,7 @@ class ENSAddressChangeD(FilterData):
@dataclass
-class ENSAddressD(FilterData):
+class ENSAddressD(Domain):
address: Optional[str] = None
reverse_node: Optional[str] = None
diff --git a/indexer/modules/custom/hemera_ens/ens_handler.py b/hemera_udf/hemera_ens/ens_handler.py
similarity index 95%
rename from indexer/modules/custom/hemera_ens/ens_handler.py
rename to hemera_udf/hemera_ens/ens_handler.py
index eb18c44b4..18ed48bc1 100644
--- a/indexer/modules/custom/hemera_ens/ens_handler.py
+++ b/hemera_udf/hemera_ens/ens_handler.py
@@ -13,19 +13,13 @@
from eth_abi.codec import ABICodec
from web3 import Web3
-from indexer.modules.custom.hemera_ens import lifo_registry
-from indexer.modules.custom.hemera_ens.ens_abi import abi_map
-from indexer.modules.custom.hemera_ens.ens_conf import CONTRACT_NAME_MAP, ENS_CONTRACT_CREATED_BLOCK, REVERSE_BASE_NODE
-from indexer.modules.custom.hemera_ens.ens_domain import (
- ENSAddressChangeD,
- ENSAddressD,
- ENSMiddleD,
- ENSNameRenewD,
- ENSRegisterD,
-)
-from indexer.modules.custom.hemera_ens.ens_hash import namehash
-from indexer.modules.custom.hemera_ens.extractors import BaseExtractor, RegisterExtractor
-from indexer.modules.custom.hemera_ens.util import convert_str_ts
+from hemera_udf.hemera_ens.ens_abi import abi_map
+from hemera_udf.hemera_ens.ens_compatible_string_decoder import lifo_registry
+from hemera_udf.hemera_ens.ens_conf import CONTRACT_NAME_MAP, ENS_CONTRACT_CREATED_BLOCK, REVERSE_BASE_NODE
+from hemera_udf.hemera_ens.ens_domain import ENSAddressChangeD, ENSAddressD, ENSMiddleD, ENSNameRenewD, ENSRegisterD
+from hemera_udf.hemera_ens.ens_hash import namehash
+from hemera_udf.hemera_ens.extractors import BaseExtractor, RegisterExtractor
+from hemera_udf.hemera_ens.util import convert_str_ts
logger = logging.getLogger(__name__)
diff --git a/indexer/modules/custom/hemera_ens/ens_hash.py b/hemera_udf/hemera_ens/ens_hash.py
similarity index 100%
rename from indexer/modules/custom/hemera_ens/ens_hash.py
rename to hemera_udf/hemera_ens/ens_hash.py
diff --git a/indexer/modules/custom/hemera_ens/extractors.py b/hemera_udf/hemera_ens/extractors.py
similarity index 97%
rename from indexer/modules/custom/hemera_ens/extractors.py
rename to hemera_udf/hemera_ens/extractors.py
index 77dbe3a12..9a8b2db4c 100644
--- a/indexer/modules/custom/hemera_ens/extractors.py
+++ b/hemera_udf/hemera_ens/extractors.py
@@ -8,11 +8,11 @@
from web3 import Web3
-from common.utils.format_utils import hex_str_to_bytes
-from indexer.modules.custom.hemera_ens.ens_conf import BASE_NODE, REVERSE_BASE_NODE
-from indexer.modules.custom.hemera_ens.ens_domain import ENSMiddleD
-from indexer.modules.custom.hemera_ens.ens_hash import compute_node_label, namehash
-from indexer.modules.custom.hemera_ens.util import convert_str_ts
+from hemera.common.utils.format_utils import hex_str_to_bytes
+from hemera_udf.hemera_ens.ens_conf import BASE_NODE, REVERSE_BASE_NODE
+from hemera_udf.hemera_ens.ens_domain import ENSMiddleD
+from hemera_udf.hemera_ens.ens_hash import compute_node_label, namehash
+from hemera_udf.hemera_ens.util import convert_str_ts
logger = logging.getLogger(__name__)
diff --git a/indexer/modules/custom/init_capital/models/__init__.py b/hemera_udf/hemera_ens/jobs/__init__.py
similarity index 100%
rename from indexer/modules/custom/init_capital/models/__init__.py
rename to hemera_udf/hemera_ens/jobs/__init__.py
diff --git a/indexer/modules/custom/hemera_ens/export_ens_job.py b/hemera_udf/hemera_ens/jobs/export_ens_job.py
similarity index 84%
rename from indexer/modules/custom/hemera_ens/export_ens_job.py
rename to hemera_udf/hemera_ens/jobs/export_ens_job.py
index cd255ccdd..e474b5d58 100644
--- a/indexer/modules/custom/hemera_ens/export_ens_job.py
+++ b/hemera_udf/hemera_ens/jobs/export_ens_job.py
@@ -9,26 +9,21 @@
from dataclasses import asdict, fields, is_dataclass
from typing import Any, Dict, List
-from common.utils.exception_control import FastShutdownError
-from indexer.domain.log import Log
-from indexer.domain.transaction import Transaction
-from indexer.executors.batch_work_executor import BatchWorkExecutor
-from indexer.jobs import FilterTransactionDataJob
-from indexer.modules.custom.hemera_ens import CONTRACT_NAME_MAP, EnsConfLoader, EnsHandler
-from indexer.modules.custom.hemera_ens.ens_domain import (
- ENSAddressChangeD,
- ENSAddressD,
- ENSMiddleD,
- ENSNameRenewD,
- ENSRegisterD,
-)
-from indexer.modules.custom.hemera_ens.extractors import BaseExtractor
-from indexer.specification.specification import (
+from hemera.common.utils.exception_control import FastShutdownError
+from hemera.indexer.domains.log import Log
+from hemera.indexer.domains.transaction import Transaction
+from hemera.indexer.executors.batch_work_executor import BatchWorkExecutor
+from hemera.indexer.jobs import FilterTransactionDataJob
+from hemera.indexer.specification.specification import (
ToAddressSpecification,
TopicSpecification,
TransactionFilterByLogs,
TransactionFilterByTransactionInfo,
)
+from hemera_udf.hemera_ens.ens_conf import CONTRACT_NAME_MAP
+from hemera_udf.hemera_ens.ens_domain import ENSAddressChangeD, ENSAddressD, ENSMiddleD, ENSNameRenewD, ENSRegisterD
+from hemera_udf.hemera_ens.ens_handler import EnsConfLoader, EnsHandler
+from hemera_udf.hemera_ens.extractors import BaseExtractor
logger = logging.getLogger(__name__)
diff --git a/indexer/modules/custom/hemera_ens/models/__init__.py b/hemera_udf/hemera_ens/models/__init__.py
similarity index 100%
rename from indexer/modules/custom/hemera_ens/models/__init__.py
rename to hemera_udf/hemera_ens/models/__init__.py
diff --git a/indexer/modules/custom/hemera_ens/models/af_ens_address_current.py b/hemera_udf/hemera_ens/models/af_ens_address_current.py
similarity index 80%
rename from indexer/modules/custom/hemera_ens/models/af_ens_address_current.py
rename to hemera_udf/hemera_ens/models/af_ens_address_current.py
index 72f7460e1..4b5915456 100644
--- a/indexer/modules/custom/hemera_ens/models/af_ens_address_current.py
+++ b/hemera_udf/hemera_ens/models/af_ens_address_current.py
@@ -1,9 +1,9 @@
from sqlalchemy import Column, func
from sqlalchemy.dialects.postgresql import BIGINT, BYTEA, TIMESTAMP, VARCHAR
-from common.models import HemeraModel
-from indexer.modules.custom.hemera_ens.ens_domain import ENSAddressD
-from indexer.modules.custom.hemera_ens.models.af_ens_node_current import ens_general_converter
+from hemera.common.models import HemeraModel
+from hemera_udf.hemera_ens.ens_domain import ENSAddressD
+from hemera_udf.hemera_ens.models.af_ens_node_current import ens_general_converter
class ENSAddress(HemeraModel):
diff --git a/indexer/modules/custom/hemera_ens/models/af_ens_event.py b/hemera_udf/hemera_ens/models/af_ens_event.py
similarity index 90%
rename from indexer/modules/custom/hemera_ens/models/af_ens_event.py
rename to hemera_udf/hemera_ens/models/af_ens_event.py
index c16811930..1de3358d1 100644
--- a/indexer/modules/custom/hemera_ens/models/af_ens_event.py
+++ b/hemera_udf/hemera_ens/models/af_ens_event.py
@@ -1,9 +1,9 @@
from sqlalchemy import Column, Index, PrimaryKeyConstraint, func, text
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, INTEGER, NUMERIC, TIMESTAMP, VARCHAR
-from common.models import HemeraModel
-from indexer.modules.custom.hemera_ens.ens_domain import ENSMiddleD
-from indexer.modules.custom.hemera_ens.models.af_ens_node_current import ens_general_converter
+from hemera.common.models import HemeraModel
+from hemera_udf.hemera_ens.ens_domain import ENSMiddleD
+from hemera_udf.hemera_ens.models.af_ens_node_current import ens_general_converter
class ENSMiddle(HemeraModel):
diff --git a/indexer/modules/custom/hemera_ens/models/af_ens_node_current.py b/hemera_udf/hemera_ens/models/af_ens_node_current.py
similarity index 94%
rename from indexer/modules/custom/hemera_ens/models/af_ens_node_current.py
rename to hemera_udf/hemera_ens/models/af_ens_node_current.py
index 9c95d1226..83b26c846 100644
--- a/indexer/modules/custom/hemera_ens/models/af_ens_node_current.py
+++ b/hemera_udf/hemera_ens/models/af_ens_node_current.py
@@ -6,10 +6,10 @@
from sqlalchemy.dialects.postgresql import ARRAY, BIGINT, BYTEA, JSONB, NUMERIC, TIMESTAMP, VARCHAR
from sqlalchemy.sql import func
-from common.models import HemeraModel, get_column_type
-from common.utils.format_utils import hex_str_to_bytes
-from indexer.domain import Domain
-from indexer.modules.custom.hemera_ens.ens_domain import ENSAddressChangeD, ENSNameRenewD, ENSRegisterD
+from hemera.common.models import HemeraModel, get_column_type
+from hemera.common.utils.format_utils import hex_str_to_bytes
+from hemera.indexer.domains import Domain
+from hemera_udf.hemera_ens.ens_domain import ENSAddressChangeD, ENSNameRenewD, ENSRegisterD
def ens_general_converter(table: Type[HemeraModel], data: Domain, is_update=False):
diff --git a/indexer/modules/custom/hemera_ens/util.py b/hemera_udf/hemera_ens/util.py
similarity index 100%
rename from indexer/modules/custom/hemera_ens/util.py
rename to hemera_udf/hemera_ens/util.py
diff --git a/hemera_udf/init_capital/__init__.py b/hemera_udf/init_capital/__init__.py
new file mode 100644
index 000000000..f383874cc
--- /dev/null
+++ b/hemera_udf/init_capital/__init__.py
@@ -0,0 +1,35 @@
+from __future__ import annotations
+
+import packaging.version
+
+from hemera import __version__ as hemera_version
+
+__all__ = ["__version__"]
+
+__version__ = "0.1.0"
+
+from hemera.common.enumeration.entity_type import DynamicEntityTypeRegistry
+from hemera_udf.init_capital.domains import (
+ InitCapitalPoolHistoryDomain,
+ InitCapitalPoolUpdateDomain,
+ InitCapitalPositionCreateDomain,
+ InitCapitalPositionHistoryDomain,
+ InitCapitalPositionUpdateDomain,
+ InitCapitalRecordDomain,
+)
+
+if packaging.version.parse(packaging.version.parse(hemera_version).base_version) < packaging.version.parse("1.0.0"):
+ raise RuntimeError(f"The package `hemera-modules-xxx:{__version__}` needs Hemera 1.0.0+")
+
+value = DynamicEntityTypeRegistry.register("INIT_CAPITAL")
+DynamicEntityTypeRegistry.register_output_types(
+ value,
+ {
+ InitCapitalPoolHistoryDomain,
+ InitCapitalPoolUpdateDomain,
+ InitCapitalPositionCreateDomain,
+ InitCapitalPositionHistoryDomain,
+ InitCapitalPositionUpdateDomain,
+ InitCapitalRecordDomain,
+ },
+)
diff --git a/indexer/modules/custom/init_capital/abi.py b/hemera_udf/init_capital/abi.py
similarity index 99%
rename from indexer/modules/custom/init_capital/abi.py
rename to hemera_udf/init_capital/abi.py
index 2da48ef4e..830f1b45f 100644
--- a/indexer/modules/custom/init_capital/abi.py
+++ b/hemera_udf/init_capital/abi.py
@@ -1,4 +1,4 @@
-from common.utils.abi_code_utils import Event, Function
+from hemera.common.utils.abi_code_utils import Event, Function
INIT_BORROW_EVENT = Event(
{
diff --git a/indexer/modules/custom/init_capital/domains/init_capital_domains.py b/hemera_udf/init_capital/domains.py
similarity index 78%
rename from indexer/modules/custom/init_capital/domains/init_capital_domains.py
rename to hemera_udf/init_capital/domains.py
index 208198759..b0ee5e9a0 100644
--- a/indexer/modules/custom/init_capital/domains/init_capital_domains.py
+++ b/hemera_udf/init_capital/domains.py
@@ -1,11 +1,10 @@
from dataclasses import dataclass
-from datetime import datetime
-from indexer.domain import FilterData
+from hemera.indexer.domains import Domain
@dataclass
-class InitCapitalPositionHistoryDomain(FilterData):
+class InitCapitalPositionHistoryDomain(Domain):
position_id: int
owner_address: str
viewer_address: str
@@ -19,7 +18,7 @@ class InitCapitalPositionHistoryDomain(FilterData):
@dataclass
-class InitCapitalPositionCreateDomain(FilterData):
+class InitCapitalPositionCreateDomain(Domain):
position_id: int
owner_address: str
viewer_address: str
@@ -35,7 +34,7 @@ class InitCapitalPositionCreateDomain(FilterData):
@dataclass
-class InitCapitalPositionUpdateDomain(FilterData):
+class InitCapitalPositionUpdateDomain(Domain):
position_id: int
owner_address: str
viewer_address: str
@@ -49,7 +48,7 @@ class InitCapitalPositionUpdateDomain(FilterData):
@dataclass
-class InitCapitalRecordDomain(FilterData):
+class InitCapitalRecordDomain(Domain):
action_type: str
position_id: int
pool_address: str
@@ -65,7 +64,7 @@ class InitCapitalRecordDomain(FilterData):
@dataclass
-class InitCapitalPoolHistoryDomain(FilterData):
+class InitCapitalPoolHistoryDomain(Domain):
pool_address: str
token_address: str
@@ -79,7 +78,7 @@ class InitCapitalPoolHistoryDomain(FilterData):
@dataclass
-class InitCapitalPoolUpdateDomain(FilterData):
+class InitCapitalPoolUpdateDomain(Domain):
pool_address: str
total_asset: int
diff --git a/indexer/modules/custom/init_capital/endpoints/__init__.py b/hemera_udf/init_capital/endpoints/__init__.py
similarity index 100%
rename from indexer/modules/custom/init_capital/endpoints/__init__.py
rename to hemera_udf/init_capital/endpoints/__init__.py
diff --git a/indexer/modules/custom/init_capital/endpoints/routes.py b/hemera_udf/init_capital/endpoints/routes.py
similarity index 90%
rename from indexer/modules/custom/init_capital/endpoints/routes.py
rename to hemera_udf/init_capital/endpoints/routes.py
index 3c0b7bacc..75c740f0c 100644
--- a/indexer/modules/custom/init_capital/endpoints/routes.py
+++ b/hemera_udf/init_capital/endpoints/routes.py
@@ -2,12 +2,12 @@
from flask_restx import Resource
-from api.app.address.features import register_feature
-from api.app.cache import cache
-from common.models import db
-from common.utils.format_utils import as_dict, bytes_to_hex_str, hex_str_to_bytes
-from indexer.modules.custom.init_capital.endpoints import init_capital_namespace
-from indexer.modules.custom.init_capital.models.init_capital_models import (
+from hemera.api.app.address.features import register_feature
+from hemera.api.app.cache import cache
+from hemera.common.models import db
+from hemera.common.utils.format_utils import as_dict, bytes_to_hex_str, hex_str_to_bytes
+from hemera_udf.init_capital.endpoints import init_capital_namespace
+from hemera_udf.init_capital.models.init_capital_models import (
InitCapitalPoolCurrent,
InitCapitalPositionCurrent,
InitCapitalRecords,
diff --git a/indexer/modules/custom/lido/__init__.py b/hemera_udf/init_capital/jobs/__init__.py
similarity index 100%
rename from indexer/modules/custom/lido/__init__.py
rename to hemera_udf/init_capital/jobs/__init__.py
diff --git a/indexer/modules/custom/init_capital/export_init_capital_job.py b/hemera_udf/init_capital/jobs/export_init_capital_job.py
similarity index 96%
rename from indexer/modules/custom/init_capital/export_init_capital_job.py
rename to hemera_udf/init_capital/jobs/export_init_capital_job.py
index fca4804c3..879d78138 100644
--- a/indexer/modules/custom/init_capital/export_init_capital_job.py
+++ b/hemera_udf/init_capital/jobs/export_init_capital_job.py
@@ -3,18 +3,20 @@
import orjson
# Utility
-from common.utils.abi_code_utils import decode_data, decode_log, encode_data
-from common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes
+from hemera.common.utils.abi_code_utils import decode_data, decode_log
+from hemera.common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes
# Dependency dataclass
-from indexer.domain.log import Log
-from indexer.executors.batch_work_executor import BatchWorkExecutor
+from hemera.indexer.domains.log import Log
# Job
-from indexer.jobs.base_job import FilterTransactionDataJob
+from hemera.indexer.jobs.base_job import FilterTransactionDataJob
+from hemera.indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera.indexer.utils.json_rpc_requests import generate_eth_call_json_rpc
+from hemera.indexer.utils.rpc_utils import rpc_response_to_result, zip_rpc_response
# Custom dataclass
-from indexer.modules.custom.init_capital.abi import (
+from hemera_udf.init_capital.abi import (
INIT_BORROW_EVENT,
INIT_COLLATERALIZE_EVENT,
INIT_CREATE_POSITION_EVENT,
@@ -26,7 +28,7 @@
INIT_POOL_TOTAL_SUPPLY_FUNCTION,
INIT_REPAY_EVENT,
)
-from indexer.modules.custom.init_capital.domains.init_capital_domains import (
+from hemera_udf.init_capital.domains import (
InitCapitalPoolHistoryDomain,
InitCapitalPoolUpdateDomain,
InitCapitalPositionCreateDomain,
@@ -34,13 +36,7 @@
InitCapitalPositionUpdateDomain,
InitCapitalRecordDomain,
)
-from indexer.modules.custom.init_capital.models.init_capital_models import (
- InitCapitalPoolCurrent,
- InitCapitalPositionCurrent,
-)
-from indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
-from indexer.utils.json_rpc_requests import generate_eth_call_json_rpc
-from indexer.utils.rpc_utils import rpc_response_to_result, zip_rpc_response
+from hemera_udf.init_capital.models.init_capital_models import InitCapitalPoolCurrent, InitCapitalPositionCurrent
logger = logging.getLogger(__name__)
diff --git a/indexer/modules/custom/lido/abi/__init__.py b/hemera_udf/init_capital/models/__init__.py
similarity index 100%
rename from indexer/modules/custom/lido/abi/__init__.py
rename to hemera_udf/init_capital/models/__init__.py
diff --git a/indexer/modules/custom/init_capital/models/init_capital_models.py b/hemera_udf/init_capital/models/init_capital_models.py
similarity index 89%
rename from indexer/modules/custom/init_capital/models/init_capital_models.py
rename to hemera_udf/init_capital/models/init_capital_models.py
index 507ce7cbd..ef356b5b2 100644
--- a/indexer/modules/custom/init_capital/models/init_capital_models.py
+++ b/hemera_udf/init_capital/models/init_capital_models.py
@@ -1,7 +1,15 @@
from sqlalchemy import Column, func
from sqlalchemy.dialects.postgresql import BIGINT, BYTEA, INTEGER, JSONB, NUMERIC, SMALLINT, TIMESTAMP
-from common.models import HemeraModel, general_converter
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.init_capital import (
+ InitCapitalPoolHistoryDomain,
+ InitCapitalPoolUpdateDomain,
+ InitCapitalPositionCreateDomain,
+ InitCapitalPositionHistoryDomain,
+ InitCapitalPositionUpdateDomain,
+ InitCapitalRecordDomain,
+)
class InitCapitalPositionHistory(HemeraModel):
@@ -25,7 +33,7 @@ class InitCapitalPositionHistory(HemeraModel):
def model_domain_mapping():
return [
{
- "domain": "InitCapitalPositionHistoryDomain",
+ "domain": InitCapitalPositionHistoryDomain,
"conflict_do_update": True,
"update_strategy": None,
"converter": general_converter,
@@ -59,13 +67,13 @@ class InitCapitalPositionCurrent(HemeraModel):
def model_domain_mapping():
return [
{
- "domain": "InitCapitalPositionCreateDomain",
+ "domain": InitCapitalPositionCreateDomain,
"conflict_do_update": True,
"update_strategy": "EXCLUDED.block_number >= init_capital_position_current.block_number",
"converter": general_converter,
},
{
- "domain": "InitCapitalPositionUpdateDomain",
+ "domain": InitCapitalPositionUpdateDomain,
"conflict_do_update": True,
"update_strategy": "EXCLUDED.block_number >= init_capital_position_current.block_number",
"converter": general_converter,
@@ -97,7 +105,7 @@ class InitCapitalRecords(HemeraModel):
def model_domain_mapping():
return [
{
- "domain": "InitCapitalRecordDomain",
+ "domain": InitCapitalRecordDomain,
"conflict_do_update": True,
"update_strategy": None,
"converter": general_converter,
@@ -138,7 +146,7 @@ class InitCapitalPoolsHistory(HemeraModel):
def model_domain_mapping():
return [
{
- "domain": "InitCapitalPoolHistoryDomain",
+ "domain": InitCapitalPoolHistoryDomain,
"conflict_do_update": True,
"update_strategy": None,
"converter": general_converter,
@@ -166,7 +174,7 @@ class InitCapitalPoolCurrent(HemeraModel):
def model_domain_mapping():
return [
{
- "domain": "InitCapitalPoolUpdateDomain",
+ "domain": InitCapitalPoolUpdateDomain,
"conflict_do_update": True,
"update_strategy": "EXCLUDED.block_number >= init_capital_pool_current.block_number OR init_capital_pool_current.block_number IS NULL",
"converter": general_converter,
diff --git a/hemera_udf/karak/__init__.py b/hemera_udf/karak/__init__.py
new file mode 100644
index 000000000..f64ebe687
--- /dev/null
+++ b/hemera_udf/karak/__init__.py
@@ -0,0 +1,25 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time 2024/9/19 15:18
+# @Author will
+# @File __init__.py.py
+# @Brief
+"""Currently, this job only support Deposit, StartWithDraw, FinishWithDraw, more events coming soon"""
+from __future__ import annotations
+
+import packaging.version
+
+from hemera import __version__ as hemera_version
+
+__all__ = ["__version__"]
+
+__version__ = "0.1.0"
+
+from hemera.common.enumeration.entity_type import DynamicEntityTypeRegistry
+from hemera_udf.karak.domains import *
+
+if packaging.version.parse(packaging.version.parse(hemera_version).base_version) < packaging.version.parse("1.0.0"):
+ raise RuntimeError(f"The package `hemera-modules-xxx:{__version__}` needs Hemera 1.0.0+")
+
+value = DynamicEntityTypeRegistry.register("KARAK")
+DynamicEntityTypeRegistry.register_output_types(value, {KarakActionD, KarakVaultTokenD, KarakAddressCurrentD})
diff --git a/indexer/modules/custom/karak/karak_domain.py b/hemera_udf/karak/domains.py
similarity index 90%
rename from indexer/modules/custom/karak/karak_domain.py
rename to hemera_udf/karak/domains.py
index 633b98c14..69e71fafd 100644
--- a/indexer/modules/custom/karak/karak_domain.py
+++ b/hemera_udf/karak/domains.py
@@ -1,11 +1,11 @@
from dataclasses import dataclass
from typing import Optional
-from indexer.domain import FilterData
+from hemera.indexer.domains import Domain
@dataclass
-class KarakActionD(FilterData):
+class KarakActionD(Domain):
transaction_hash: str
log_index: int
transaction_index: int
@@ -29,7 +29,7 @@ class KarakActionD(FilterData):
@dataclass
-class KarakVaultTokenD(FilterData):
+class KarakVaultTokenD(Domain):
vault: Optional[str] = None
token: Optional[str] = None
name: Optional[str] = None
@@ -38,7 +38,7 @@ class KarakVaultTokenD(FilterData):
@dataclass
-class KarakAddressCurrentD(FilterData):
+class KarakAddressCurrentD(Domain):
address: Optional[str] = None
vault: Optional[str] = None
deposit_amount: Optional[int] = None
diff --git a/indexer/modules/custom/karak/endpoints/__init__.py b/hemera_udf/karak/endpoints/__init__.py
similarity index 100%
rename from indexer/modules/custom/karak/endpoints/__init__.py
rename to hemera_udf/karak/endpoints/__init__.py
diff --git a/indexer/modules/custom/karak/endpoints/routes.py b/hemera_udf/karak/endpoints/routes.py
similarity index 100%
rename from indexer/modules/custom/karak/endpoints/routes.py
rename to hemera_udf/karak/endpoints/routes.py
diff --git a/indexer/modules/custom/lido/domains/__init__.py b/hemera_udf/karak/jobs/__init__.py
similarity index 100%
rename from indexer/modules/custom/lido/domains/__init__.py
rename to hemera_udf/karak/jobs/__init__.py
diff --git a/indexer/modules/custom/karak/export_karak_job.py b/hemera_udf/karak/jobs/export_karak_job.py
similarity index 91%
rename from indexer/modules/custom/karak/export_karak_job.py
rename to hemera_udf/karak/jobs/export_karak_job.py
index 7d256c383..1b2398806 100644
--- a/indexer/modules/custom/karak/export_karak_job.py
+++ b/hemera_udf/karak/jobs/export_karak_job.py
@@ -6,25 +6,20 @@
from eth_typing import Decodable
from sqlalchemy import func
-from common.utils.abi_code_utils import decode_log
-from common.utils.exception_control import FastShutdownError
-from common.utils.format_utils import hex_str_to_bytes
-from indexer.domain.transaction import Transaction
-from indexer.executors.batch_work_executor import BatchWorkExecutor
-from indexer.jobs import FilterTransactionDataJob
-from indexer.modules.custom.hemera_ens.extractors import extract_eth_address
-from indexer.modules.custom.karak.karak_abi import DEPOSIT_EVENT, FINISH_WITHDRAWAL_EVENT, START_WITHDRAWAL_EVENT
-from indexer.modules.custom.karak.karak_conf import CHAIN_CONTRACT
-from indexer.modules.custom.karak.karak_domain import (
- KarakActionD,
- KarakAddressCurrentD,
- KarakVaultTokenD,
- karak_address_current_factory,
-)
-from indexer.modules.custom.karak.models.af_karak_address_current import AfKarakAddressCurrent
-from indexer.modules.custom.karak.models.af_karak_vault_token import AfKarakVaultToken
-from indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
-from indexer.utils.abi import bytes_to_hex_str
+from hemera.common.utils.abi_code_utils import decode_log
+from hemera.common.utils.exception_control import FastShutdownError
+from hemera.common.utils.format_utils import hex_str_to_bytes
+from hemera.indexer.domains.transaction import Transaction
+from hemera.indexer.executors.batch_work_executor import BatchWorkExecutor
+from hemera.indexer.jobs import FilterTransactionDataJob
+from hemera.indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera.indexer.utils.abi import bytes_to_hex_str
+from hemera_udf.hemera_ens.extractors import extract_eth_address
+from hemera_udf.karak.domains import KarakActionD, KarakAddressCurrentD, KarakVaultTokenD, karak_address_current_factory
+from hemera_udf.karak.karak_abi import DEPOSIT_EVENT, FINISH_WITHDRAWAL_EVENT, START_WITHDRAWAL_EVENT
+from hemera_udf.karak.karak_conf import CHAIN_CONTRACT
+from hemera_udf.karak.models.af_karak_address_current import AfKarakAddressCurrent
+from hemera_udf.karak.models.af_karak_vault_token import AfKarakVaultToken
logger = logging.getLogger(__name__)
diff --git a/indexer/modules/custom/karak/karak_abi.py b/hemera_udf/karak/karak_abi.py
similarity index 100%
rename from indexer/modules/custom/karak/karak_abi.py
rename to hemera_udf/karak/karak_abi.py
diff --git a/indexer/modules/custom/karak/karak_conf.py b/hemera_udf/karak/karak_conf.py
similarity index 100%
rename from indexer/modules/custom/karak/karak_conf.py
rename to hemera_udf/karak/karak_conf.py
diff --git a/indexer/modules/custom/karak/models/__init__.py b/hemera_udf/karak/models/__init__.py
similarity index 100%
rename from indexer/modules/custom/karak/models/__init__.py
rename to hemera_udf/karak/models/__init__.py
diff --git a/indexer/modules/custom/karak/models/af_karak_address_current.py b/hemera_udf/karak/models/af_karak_address_current.py
similarity index 89%
rename from indexer/modules/custom/karak/models/af_karak_address_current.py
rename to hemera_udf/karak/models/af_karak_address_current.py
index 2cc80f62c..02fdd7db5 100644
--- a/indexer/modules/custom/karak/models/af_karak_address_current.py
+++ b/hemera_udf/karak/models/af_karak_address_current.py
@@ -8,8 +8,8 @@
from sqlalchemy import Column, PrimaryKeyConstraint, func, text
from sqlalchemy.dialects.postgresql import BOOLEAN, BYTEA, NUMERIC, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.karak.karak_domain import KarakAddressCurrentD
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.karak.domains import KarakAddressCurrentD
class AfKarakAddressCurrent(HemeraModel):
diff --git a/indexer/modules/custom/karak/models/af_karak_records.py b/hemera_udf/karak/models/af_karak_records.py
similarity index 91%
rename from indexer/modules/custom/karak/models/af_karak_records.py
rename to hemera_udf/karak/models/af_karak_records.py
index d4cfdc91c..e1ceab115 100644
--- a/indexer/modules/custom/karak/models/af_karak_records.py
+++ b/hemera_udf/karak/models/af_karak_records.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, PrimaryKeyConstraint, func, text
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, INTEGER, NUMERIC, TIMESTAMP, VARCHAR
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.karak.karak_domain import KarakActionD
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.karak.domains import KarakActionD
class AfKarakRecords(HemeraModel):
diff --git a/indexer/modules/custom/karak/models/af_karak_vault_token.py b/hemera_udf/karak/models/af_karak_vault_token.py
similarity index 88%
rename from indexer/modules/custom/karak/models/af_karak_vault_token.py
rename to hemera_udf/karak/models/af_karak_vault_token.py
index 0c32f3904..b6f1af4aa 100644
--- a/indexer/modules/custom/karak/models/af_karak_vault_token.py
+++ b/hemera_udf/karak/models/af_karak_vault_token.py
@@ -7,8 +7,8 @@
from sqlalchemy import Column, PrimaryKeyConstraint, func
from sqlalchemy.dialects.postgresql import BYTEA, INTEGER, TIMESTAMP, VARCHAR
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.karak.karak_domain import KarakVaultTokenD
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.karak.domains import KarakVaultTokenD
class AfKarakVaultToken(HemeraModel):
diff --git a/indexer/modules/custom/karak/sql/karak_table_migration.sql b/hemera_udf/karak/sql/karak_table_migration.sql
similarity index 100%
rename from indexer/modules/custom/karak/sql/karak_table_migration.sql
rename to hemera_udf/karak/sql/karak_table_migration.sql
diff --git a/indexer/modules/custom/lido/README.md b/hemera_udf/lido/README.md
similarity index 100%
rename from indexer/modules/custom/lido/README.md
rename to hemera_udf/lido/README.md
diff --git a/hemera_udf/lido/__init__.py b/hemera_udf/lido/__init__.py
new file mode 100644
index 000000000..4632ff849
--- /dev/null
+++ b/hemera_udf/lido/__init__.py
@@ -0,0 +1,12 @@
+from __future__ import annotations
+
+import packaging.version
+
+from hemera import __version__ as hemera_version
+
+__all__ = ["__version__"]
+
+__version__ = "0.1.0"
+
+if packaging.version.parse(packaging.version.parse(hemera_version).base_version) < packaging.version.parse("1.0.0"):
+ raise RuntimeError(f"The package `hemera-modules-xxx:{__version__}` needs Hemera 1.0.0+")
diff --git a/indexer/modules/custom/lido/models/__init__.py b/hemera_udf/lido/abi/__init__.py
similarity index 100%
rename from indexer/modules/custom/lido/models/__init__.py
rename to hemera_udf/lido/abi/__init__.py
diff --git a/indexer/modules/custom/lido/abi/event.py b/hemera_udf/lido/abi/event.py
similarity index 98%
rename from indexer/modules/custom/lido/abi/event.py
rename to hemera_udf/lido/abi/event.py
index c53154565..10e8f0462 100644
--- a/indexer/modules/custom/lido/abi/event.py
+++ b/hemera_udf/lido/abi/event.py
@@ -1,4 +1,4 @@
-from common.utils.abi_code_utils import Event
+from hemera.common.utils.abi_code_utils import Event
transfer_share_event = Event(
{
diff --git a/indexer/modules/custom/lido/abi/functions.py b/hemera_udf/lido/abi/functions.py
similarity index 95%
rename from indexer/modules/custom/lido/abi/functions.py
rename to hemera_udf/lido/abi/functions.py
index e75421aa3..d515179f5 100644
--- a/indexer/modules/custom/lido/abi/functions.py
+++ b/hemera_udf/lido/abi/functions.py
@@ -1,4 +1,4 @@
-from common.utils.abi_code_utils import Function
+from hemera.common.utils.abi_code_utils import Function
get_total_shares_func = Function(
{
diff --git a/indexer/modules/custom/lido/domains/seth.py b/hemera_udf/lido/domains.py
similarity index 91%
rename from indexer/modules/custom/lido/domains/seth.py
rename to hemera_udf/lido/domains.py
index 3d45956d6..c1b3e46bd 100644
--- a/indexer/modules/custom/lido/domains/seth.py
+++ b/hemera_udf/lido/domains.py
@@ -1,6 +1,6 @@
from dataclasses import dataclass
-from indexer.domain import Domain
+from hemera.indexer.domains import Domain
@dataclass
diff --git a/indexer/modules/custom/merchant_moe/__init__.py b/hemera_udf/lido/jobs/__init__.py
similarity index 100%
rename from indexer/modules/custom/merchant_moe/__init__.py
rename to hemera_udf/lido/jobs/__init__.py
diff --git a/indexer/modules/custom/lido/export_lido_share_job.py b/hemera_udf/lido/jobs/export_lido_share_job.py
similarity index 89%
rename from indexer/modules/custom/lido/export_lido_share_job.py
rename to hemera_udf/lido/jobs/export_lido_share_job.py
index c900b6a27..e936b6d4a 100644
--- a/indexer/modules/custom/lido/export_lido_share_job.py
+++ b/hemera_udf/lido/jobs/export_lido_share_job.py
@@ -2,19 +2,17 @@
from itertools import groupby
from typing import List
-from web3 import Web3
-
-from common.utils.web3_utils import ZERO_ADDRESS, event_topic_to_address
-from indexer.domain.log import Log
-from indexer.domain.transaction import Transaction
-from indexer.executors.batch_work_executor import BatchWorkExecutor
-from indexer.jobs.base_job import FilterTransactionDataJob
-from indexer.modules.custom.lido.abi.event import *
-from indexer.modules.custom.lido.abi.functions import *
-from indexer.modules.custom.lido.domains.seth import LidoPositionValuesD, LidoShareBalanceCurrentD, LidoShareBalanceD
-from indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
-from indexer.utils.multicall_hemera import Call
-from indexer.utils.multicall_hemera.multi_call_helper import MultiCallHelper
+from hemera.common.utils.web3_utils import ZERO_ADDRESS, event_topic_to_address
+from hemera.indexer.domains.log import Log
+from hemera.indexer.domains.transaction import Transaction
+from hemera.indexer.executors.batch_work_executor import BatchWorkExecutor
+from hemera.indexer.jobs.base_job import FilterTransactionDataJob
+from hemera.indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera.indexer.utils.multicall_hemera import Call
+from hemera.indexer.utils.multicall_hemera.multi_call_helper import MultiCallHelper
+from hemera_udf.lido.abi.event import *
+from hemera_udf.lido.abi.functions import *
+from hemera_udf.lido.domains import LidoPositionValuesD, LidoShareBalanceCurrentD, LidoShareBalanceD
logger = logging.getLogger(__name__)
diff --git a/indexer/modules/custom/merchant_moe/domains/__init__.py b/hemera_udf/lido/models/__init__.py
similarity index 100%
rename from indexer/modules/custom/merchant_moe/domains/__init__.py
rename to hemera_udf/lido/models/__init__.py
diff --git a/indexer/modules/custom/lido/models/seth.py b/hemera_udf/lido/models/seth.py
similarity index 93%
rename from indexer/modules/custom/lido/models/seth.py
rename to hemera_udf/lido/models/seth.py
index f4965d250..04e753421 100644
--- a/indexer/modules/custom/lido/models/seth.py
+++ b/hemera_udf/lido/models/seth.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, PrimaryKeyConstraint, func, text
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, NUMERIC, TIMESTAMP, VARCHAR
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.lido.domains.seth import LidoPositionValuesD, LidoShareBalanceCurrentD, LidoShareBalanceD
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.lido.domains import LidoPositionValuesD, LidoShareBalanceCurrentD, LidoShareBalanceD
class LidoShareBalances(HemeraModel):
diff --git a/indexer/modules/custom/lido/sql/lido_sql_migration.sql b/hemera_udf/lido/sql/lido_sql_migration.sql
similarity index 100%
rename from indexer/modules/custom/lido/sql/lido_sql_migration.sql
rename to hemera_udf/lido/sql/lido_sql_migration.sql
diff --git a/indexer/modules/custom/merchant_moe/models/__init__.py b/hemera_udf/meme_agent/__init__.py
similarity index 100%
rename from indexer/modules/custom/merchant_moe/models/__init__.py
rename to hemera_udf/meme_agent/__init__.py
diff --git a/indexer/modules/custom/opensea/__init__.py b/hemera_udf/meme_agent/abi/__init__.py
similarity index 100%
rename from indexer/modules/custom/opensea/__init__.py
rename to hemera_udf/meme_agent/abi/__init__.py
diff --git a/hemera_udf/meme_agent/abi/event.py b/hemera_udf/meme_agent/abi/event.py
new file mode 100644
index 000000000..4ff84b601
--- /dev/null
+++ b/hemera_udf/meme_agent/abi/event.py
@@ -0,0 +1,97 @@
+from hemera.common.utils.abi_code_utils import Event
+
+clanker_token_created_event_v1 = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": False, "internalType": "address", "name": "tokenAddress", "type": "address"},
+ {"indexed": False, "internalType": "uint256", "name": "lpNftId", "type": "uint256"},
+ {"indexed": False, "internalType": "address", "name": "deployer", "type": "address"},
+ {"indexed": False, "internalType": "uint256", "name": "fid", "type": "uint256"},
+ {"indexed": False, "internalType": "string", "name": "name", "type": "string"},
+ {"indexed": False, "internalType": "string", "name": "symbol", "type": "string"},
+ {"indexed": False, "internalType": "uint256", "name": "supply", "type": "uint256"},
+ {"indexed": False, "internalType": "address", "name": "lockerAddress", "type": "address"},
+ {"indexed": False, "internalType": "string", "name": "castHash", "type": "string"},
+ ],
+ "name": "TokenCreated",
+ "type": "event",
+ }
+)
+
+clanker_token_created_event_v0 = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": False, "internalType": "address", "name": "tokenAddress", "type": "address"},
+ {"indexed": False, "internalType": "uint256", "name": "lpNftId", "type": "uint256"},
+ {"indexed": False, "internalType": "address", "name": "deployer", "type": "address"},
+ {"indexed": False, "internalType": "string", "name": "name", "type": "string"},
+ {"indexed": False, "internalType": "string", "name": "symbol", "type": "string"},
+ {"indexed": False, "internalType": "uint256", "name": "supply", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "_supply", "type": "uint256"},
+ {"indexed": False, "internalType": "address", "name": "lockerAddress", "type": "address"},
+ ],
+ "name": "TokenCreated",
+ "type": "event",
+ }
+)
+
+virtuals_token_created_event_v1 = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": False, "internalType": "uint256", "name": "virtualId", "type": "uint256"},
+ {"indexed": False, "internalType": "address", "name": "token", "type": "address"},
+ {"indexed": False, "internalType": "address", "name": "dao", "type": "address"},
+ {"indexed": False, "internalType": "address", "name": "tba", "type": "address"},
+ {"indexed": False, "internalType": "address", "name": "veToken", "type": "address"},
+ {"indexed": False, "internalType": "address", "name": "lp", "type": "address"},
+ ],
+ "name": "NewPersona",
+ "type": "event",
+ }
+)
+
+virtuals_token_created_event_v0 = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": False, "internalType": "uint256", "name": "virtualId", "type": "uint256"},
+ {"indexed": False, "internalType": "address", "name": "dao", "type": "address"},
+ {"indexed": False, "internalType": "address", "name": "token", "type": "address"},
+ {"indexed": False, "internalType": "address", "name": "lp", "type": "address"},
+ {"indexed": False, "internalType": "address", "name": "veToken", "type": "address"},
+ ],
+ "name": "AgentMigrated",
+ "type": "event",
+ }
+)
+
+larry_token_created_event = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "address", "name": "token", "type": "address"},
+ {"indexed": True, "internalType": "address", "name": "party", "type": "address"},
+ {"indexed": True, "internalType": "address", "name": "recipient", "type": "address"},
+ {"indexed": False, "internalType": "string", "name": "name", "type": "string"},
+ {"indexed": False, "internalType": "string", "name": "symbol", "type": "string"},
+ {"indexed": False, "internalType": "uint256", "name": "ethValue", "type": "uint256"},
+ {
+ "components": [
+ {"internalType": "uint256", "name": "totalSupply", "type": "uint256"},
+ {"internalType": "uint256", "name": "numTokensForDistribution", "type": "uint256"},
+ {"internalType": "uint256", "name": "numTokensForRecipient", "type": "uint256"},
+ {"internalType": "uint256", "name": "numTokensForLP", "type": "uint256"},
+ ],
+ "indexed": False,
+ "internalType": "struct ERC20CreatorV3.TokenDistributionConfiguration",
+ "name": "config",
+ "type": "tuple",
+ },
+ ],
+ "name": "ERC20Created",
+ "type": "event",
+ }
+)
diff --git a/hemera_udf/meme_agent/abi/fourmeme_event.py b/hemera_udf/meme_agent/abi/fourmeme_event.py
new file mode 100644
index 000000000..6d31ff9cd
--- /dev/null
+++ b/hemera_udf/meme_agent/abi/fourmeme_event.py
@@ -0,0 +1,55 @@
+from hemera.common.utils.abi_code_utils import Event
+
+token_create_event = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": False, "internalType": "address", "name": "creator", "type": "address"},
+ {"indexed": False, "internalType": "address", "name": "token", "type": "address"},
+ {"indexed": False, "internalType": "uint256", "name": "requestId", "type": "uint256"},
+ {"indexed": False, "internalType": "string", "name": "name", "type": "string"},
+ {"indexed": False, "internalType": "string", "name": "symbol", "type": "string"},
+ {"indexed": False, "internalType": "uint256", "name": "totalSupply", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "launchTime", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "launchFee", "type": "uint256"},
+ ],
+ "name": "TokenCreate",
+ "type": "event",
+ }
+)
+
+token_purchase_event = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": False, "internalType": "address", "name": "token", "type": "address"},
+ {"indexed": False, "internalType": "address", "name": "account", "type": "address"},
+ {"indexed": False, "internalType": "uint256", "name": "price", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "amount", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "cost", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "fee", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "offers", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "funds", "type": "uint256"},
+ ],
+ "name": "TokenPurchase",
+ "type": "event",
+ }
+)
+
+token_sale_event = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": False, "internalType": "address", "name": "token", "type": "address"},
+ {"indexed": False, "internalType": "address", "name": "account", "type": "address"},
+ {"indexed": False, "internalType": "uint256", "name": "price", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "amount", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "cost", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "fee", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "offers", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "funds", "type": "uint256"},
+ ],
+ "name": "TokenSale",
+ "type": "event",
+ }
+)
diff --git a/indexer/modules/custom/opensea/domain/__init__.py b/hemera_udf/meme_agent/domains/__init__.py
similarity index 100%
rename from indexer/modules/custom/opensea/domain/__init__.py
rename to hemera_udf/meme_agent/domains/__init__.py
diff --git a/hemera_udf/meme_agent/domains/clanker.py b/hemera_udf/meme_agent/domains/clanker.py
new file mode 100644
index 000000000..fe990a3b4
--- /dev/null
+++ b/hemera_udf/meme_agent/domains/clanker.py
@@ -0,0 +1,19 @@
+from dataclasses import dataclass
+
+from hemera.indexer.domains import Domain
+
+
+@dataclass
+class ClankerCreatedTokenD(Domain):
+ token_address: str
+ lp_nft_id: int
+ deployer: str
+ fid: int
+ name: str
+ symbol: str
+ supply: int
+ locker_address: str
+ cast_hash: str
+ block_number: int
+ block_timestamp: int
+ version: int
diff --git a/hemera_udf/meme_agent/domains/fourmeme.py b/hemera_udf/meme_agent/domains/fourmeme.py
new file mode 100644
index 000000000..d1be701ab
--- /dev/null
+++ b/hemera_udf/meme_agent/domains/fourmeme.py
@@ -0,0 +1,41 @@
+from dataclasses import dataclass
+
+from hemera.indexer.domains import Domain
+
+
+@dataclass
+class FourMemeTokenCreateD(Domain):
+ """Token creation event from FourMeme"""
+
+ creator: str
+ token: str
+ request_id: int
+ name: str
+ symbol: str
+ total_supply: int
+ launch_time: int
+ launch_fee: int
+ block_number: int
+ block_timestamp: int
+ transaction_hash: str
+
+
+@dataclass
+class FourMemeTokenTradeD(Domain):
+ """Token trading event (buy/sell) from FourMeme"""
+
+ token: str
+ account: str
+ log_index: int
+ price: int
+ price_usd: float
+ amount: int
+ cost: int
+ fee: int
+ offers: int
+ funds: int
+ block_number: int
+ transaction_hash: str
+ block_timestamp: int
+ # Type of trade: 'buy' or 'sell'
+ trade_type: str
diff --git a/hemera_udf/meme_agent/domains/larry.py b/hemera_udf/meme_agent/domains/larry.py
new file mode 100644
index 000000000..c9ed9aaab
--- /dev/null
+++ b/hemera_udf/meme_agent/domains/larry.py
@@ -0,0 +1,15 @@
+from dataclasses import dataclass
+
+from hemera.indexer.domains import Domain
+
+
+@dataclass
+class LarryCreatedTokenD(Domain):
+ token: str
+ party: str
+ recipient: str
+ name: str
+ symbol: str
+ eth_value: int
+ block_number: int
+ block_timestamp: int
diff --git a/hemera_udf/meme_agent/domains/virtuals.py b/hemera_udf/meme_agent/domains/virtuals.py
new file mode 100644
index 000000000..72c21b808
--- /dev/null
+++ b/hemera_udf/meme_agent/domains/virtuals.py
@@ -0,0 +1,15 @@
+from dataclasses import dataclass
+
+from hemera.indexer.domains import Domain
+
+
+@dataclass
+class VirtualsCreatedTokenD(Domain):
+ virtual_id: int
+ token: str
+ dao: str
+ tba: str
+ ve_token: str
+ lp: str
+ block_number: int
+ block_timestamp: int
diff --git a/hemera_udf/meme_agent/export_four_meme.py b/hemera_udf/meme_agent/export_four_meme.py
new file mode 100644
index 000000000..c981c4216
--- /dev/null
+++ b/hemera_udf/meme_agent/export_four_meme.py
@@ -0,0 +1,151 @@
+import logging
+from typing import List
+
+from hemera.indexer.domains.log import Log
+from hemera.indexer.domains.transaction import Transaction
+from hemera.indexer.jobs import FilterTransactionDataJob
+from hemera.indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera_udf.meme_agent.abi.fourmeme_event import token_create_event, token_purchase_event, token_sale_event
+from hemera_udf.meme_agent.domains.fourmeme import FourMemeTokenCreateD, FourMemeTokenTradeD
+from hemera_udf.meme_agent.models.fourmeme import FourMemeTokenCreate, FourMemeTokenTrade
+from hemera_udf.token_price.domains import BlockTokenPrice
+
+logger = logging.getLogger(__name__)
+
+
+class ExportFourMemeJob(FilterTransactionDataJob):
+ """Job for exporting FourMeme protocol events"""
+
+ dependency_types = [Log, BlockTokenPrice]
+ output_types = [FourMemeTokenCreateD, FourMemeTokenTradeD]
+ able_to_reorg = True
+
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+ self.events = {
+ token_create_event.get_signature(): token_create_event,
+ token_purchase_event.get_signature(): token_purchase_event,
+ token_sale_event.get_signature(): token_sale_event,
+ }
+
+ def get_filter(self):
+ """Get event filter specification"""
+ return TransactionFilterByLogs(
+ [
+ TopicSpecification(
+ addresses=[self.user_defined_config.get("token_manager2_addresses")],
+ topics=[
+ token_create_event.get_signature(),
+ token_purchase_event.get_signature(),
+ token_sale_event.get_signature(),
+ ],
+ )
+ ]
+ )
+
+ def get_wbnb_prices_dict(self):
+ wbnb_prices_dict = {}
+
+ block_token_prices = self._data_buff[BlockTokenPrice.type()]
+ for token_price in block_token_prices:
+ if token_price.token_symbol == "WBNB":
+ block_number = token_price.block_number
+ wbnb_prices_dict[block_number] = token_price.token_price
+
+ self.wbnb_prices_dict = wbnb_prices_dict
+
+ def _collect(self, **kwargs):
+ pass
+
+ def _process(self, **kwargs):
+ """Process log events"""
+ self.get_wbnb_prices_dict()
+
+ logs: List[Log] = self._data_buff.get(Log.type(), [])
+ for log in logs:
+ if log.topic0 == token_create_event.get_signature():
+ self._process_token_create(log)
+ elif log.topic0 == token_purchase_event.get_signature():
+ self._process_token_purchase(log)
+ elif log.topic0 == token_sale_event.get_signature():
+ self._process_token_sale(log)
+
+ def _process_token_create(self, log: Log):
+ """Process token creation event"""
+ log_data = token_create_event.decode_log(log)
+ if not log_data:
+ return
+
+ self._collect_domain(
+ FourMemeTokenCreateD(
+ creator=log_data["creator"],
+ token=log_data["token"],
+ request_id=log_data["requestId"],
+ name=log_data["name"],
+ symbol=log_data["symbol"],
+ total_supply=log_data["totalSupply"],
+ launch_time=log_data["launchTime"],
+ launch_fee=log_data["launchFee"],
+ block_number=log.block_number,
+ block_timestamp=log.block_timestamp,
+ transaction_hash=log.transaction_hash,
+ )
+ )
+
+ def _process_token_purchase(self, log: Log):
+ """Process token purchase event"""
+ log_data = token_purchase_event.decode_log(log)
+ if not log_data:
+ return
+
+ wbnb_usd_price = self.wbnb_prices_dict.get(log.block_number, 0)
+
+ price_usd = float(log_data["price"]) * float(wbnb_usd_price) / 10.0**18
+
+ self._collect_domain(
+ FourMemeTokenTradeD(
+ token=log_data["token"],
+ account=log_data["account"],
+ log_index=log.log_index,
+ price=log_data["price"],
+ price_usd=price_usd,
+ amount=log_data["amount"],
+ cost=log_data["cost"],
+ fee=log_data["fee"],
+ offers=log_data["offers"],
+ funds=log_data["funds"],
+ block_number=log.block_number,
+ block_timestamp=log.block_timestamp,
+ trade_type="buy",
+ transaction_hash=log.transaction_hash,
+ )
+ )
+
+ def _process_token_sale(self, log: Log):
+ """Process token sale event"""
+ log_data = token_sale_event.decode_log(log)
+ if not log_data:
+ return
+
+ wbnb_usd_price = self.wbnb_prices_dict.get(log.block_number, 0)
+
+ price_usd = float(log_data["price"]) * float(wbnb_usd_price) / 10.0**18
+
+ self._collect_domain(
+ FourMemeTokenTradeD(
+ token=log_data["token"],
+ account=log_data["account"],
+ log_index=log.log_index,
+ price=log_data["price"],
+ price_usd=price_usd,
+ amount=log_data["amount"],
+ cost=log_data["cost"],
+ fee=log_data["fee"],
+ offers=log_data["offers"],
+ funds=log_data["funds"],
+ block_number=log.block_number,
+ block_timestamp=log.block_timestamp,
+ trade_type="sell",
+ transaction_hash=log.transaction_hash,
+ )
+ )
diff --git a/hemera_udf/meme_agent/export_meme_token_created.py b/hemera_udf/meme_agent/export_meme_token_created.py
new file mode 100644
index 000000000..696c1ef57
--- /dev/null
+++ b/hemera_udf/meme_agent/export_meme_token_created.py
@@ -0,0 +1,174 @@
+import logging
+from typing import List
+
+from hemera.indexer.domains.log import Log
+from hemera.indexer.domains.transaction import Transaction
+from hemera.indexer.executors.batch_work_executor import BatchWorkExecutor
+from hemera.indexer.jobs.base_job import FilterTransactionDataJob
+from hemera.indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera_udf.meme_agent.abi.event import *
+from hemera_udf.meme_agent.domains.clanker import ClankerCreatedTokenD
+from hemera_udf.meme_agent.domains.larry import LarryCreatedTokenD
+from hemera_udf.meme_agent.domains.virtuals import VirtualsCreatedTokenD
+
+logger = logging.getLogger(__name__)
+
+
+class ExportMemeTokenCreatedJob(FilterTransactionDataJob):
+ dependency_types = [Transaction]
+ output_types = [ClankerCreatedTokenD, LarryCreatedTokenD, VirtualsCreatedTokenD]
+ able_to_reorg = True
+
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+
+ self._batch_work_executor = BatchWorkExecutor(
+ kwargs["batch_size"],
+ kwargs["max_workers"],
+ job_name=self.__class__.__name__,
+ )
+ self._is_batch = kwargs["batch_size"] > 1
+ self._filters = kwargs.get("filters", [])
+ self.user_defined_config["larry_factory_address"] = [
+ address.lower() for address in self.user_defined_config["larry_factory_address"]
+ ]
+
+ def get_filter(self):
+ return [
+ TransactionFilterByLogs(
+ [
+ TopicSpecification(
+ addresses=[
+ self.user_defined_config["clanker_factory_address_v0"],
+ self.user_defined_config["clanker_factory_address_v1"],
+ self.user_defined_config["virtuals_factory_address_v0"],
+ self.user_defined_config["virtuals_factory_address_v1"],
+ ]
+ + self.user_defined_config["larry_factory_address"],
+ topics=[
+ clanker_token_created_event_v0.get_signature(),
+ clanker_token_created_event_v1.get_signature(),
+ virtuals_token_created_event_v0.get_signature(),
+ virtuals_token_created_event_v1.get_signature(),
+ larry_token_created_event.get_signature(),
+ ],
+ )
+ ]
+ ),
+ ]
+
+ def _collect(self, **kwargs):
+ pass
+
+ def _process(self, **kwargs):
+ logs: List[Log] = self._data_buff.get(Log.type(), [])
+ for log in logs:
+ log_address = log.address.lower()
+ if log_address == self.user_defined_config["clanker_factory_address_v0"].lower():
+ self._process_clanker_token_created_v0(log)
+ elif log_address == self.user_defined_config["clanker_factory_address_v1"].lower():
+ self._process_clanker_token_created_v1(log)
+ elif log_address == self.user_defined_config["virtuals_factory_address_v0"].lower():
+ self._process_virtuals_token_created_v0(log)
+ elif log_address == self.user_defined_config["virtuals_factory_address_v1"].lower():
+ self._process_virtuals_token_created_v1(log)
+ elif log_address in self.user_defined_config["larry_factory_address"]:
+ self._process_larry_token_created(log)
+
+ def _process_clanker_token_created_v0(self, log: Log):
+ if log.topic0 != clanker_token_created_event_v0.get_signature():
+ return
+
+ log_data = clanker_token_created_event_v0.decode_log(log)
+ self._collect_domain(
+ ClankerCreatedTokenD(
+ token_address=log_data["tokenAddress"],
+ lp_nft_id=log_data["lpNftId"],
+ deployer=log_data["deployer"],
+ fid=0,
+ name=log_data["name"],
+ symbol=log_data["symbol"],
+ supply=log_data["supply"],
+ locker_address=log_data["lockerAddress"],
+ cast_hash="",
+ block_number=log.block_number,
+ block_timestamp=log.block_timestamp,
+ version=0,
+ )
+ )
+
+ def _process_clanker_token_created_v1(self, log: Log):
+ if log.topic0 != clanker_token_created_event_v1.get_signature():
+ return
+
+ log_data = clanker_token_created_event_v1.decode_log(log)
+ self._collect_domain(
+ ClankerCreatedTokenD(
+ token_address=log_data["tokenAddress"],
+ lp_nft_id=log_data["lpNftId"],
+ deployer=log_data["deployer"],
+ fid=log_data["fid"],
+ name=log_data["name"],
+ symbol=log_data["symbol"],
+ supply=log_data["supply"],
+ locker_address=log_data["lockerAddress"],
+ cast_hash=log_data["castHash"],
+ block_number=log.block_number,
+ block_timestamp=log.block_timestamp,
+ version=1,
+ )
+ )
+
+ def _process_virtuals_token_created_v0(self, log: Log):
+ if log.topic0 != virtuals_token_created_event_v0.get_signature():
+ return
+
+ log_data = virtuals_token_created_event_v0.decode_log(log)
+ self._collect_domain(
+ VirtualsCreatedTokenD(
+ virtual_id=log_data["virtualId"],
+ token=log_data["token"],
+ dao=log_data["dao"],
+ tba="",
+ ve_token=log_data["veToken"],
+ lp=log_data["lp"],
+ block_number=log.block_number,
+ block_timestamp=log.block_timestamp,
+ )
+ )
+
+ def _process_virtuals_token_created_v1(self, log: Log):
+ if log.topic0 != virtuals_token_created_event_v1.get_signature():
+ return
+
+ log_data = virtuals_token_created_event_v1.decode_log(log)
+ self._collect_domain(
+ VirtualsCreatedTokenD(
+ virtual_id=log_data["virtualId"],
+ token=log_data["token"],
+ dao=log_data["dao"],
+ tba=log_data["tba"],
+ ve_token=log_data["veToken"],
+ lp=log_data["lp"],
+ block_number=log.block_number,
+ block_timestamp=log.block_timestamp,
+ )
+ )
+
+ def _process_larry_token_created(self, log: Log):
+ if log.topic0 != larry_token_created_event.get_signature():
+ return
+
+ log_data = larry_token_created_event.decode_log(log)
+ self._collect_domain(
+ LarryCreatedTokenD(
+ token=log_data["token"],
+ party=log_data["party"],
+ recipient=log_data["recipient"],
+ name=log_data["name"],
+ symbol=log_data["symbol"],
+ eth_value=log_data["ethValue"],
+ block_number=log.block_number,
+ block_timestamp=log.block_timestamp,
+ )
+ )
diff --git a/indexer/modules/custom/opensea/parser/__init__.py b/hemera_udf/meme_agent/models/__init__.py
similarity index 100%
rename from indexer/modules/custom/opensea/parser/__init__.py
rename to hemera_udf/meme_agent/models/__init__.py
diff --git a/hemera_udf/meme_agent/models/clanker.py b/hemera_udf/meme_agent/models/clanker.py
new file mode 100644
index 000000000..cacd0c62f
--- /dev/null
+++ b/hemera_udf/meme_agent/models/clanker.py
@@ -0,0 +1,36 @@
+from sqlalchemy import Column, func
+from sqlalchemy.dialects.postgresql import BIGINT, BYTEA, INTEGER, NUMERIC, TIMESTAMP, VARCHAR
+
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.meme_agent.domains.clanker import ClankerCreatedTokenD
+
+
+class ClankerCreatedToken(HemeraModel):
+ __tablename__ = "af_clanker_created_token"
+
+ token_address = Column(BYTEA, primary_key=True)
+ lp_nft_id = Column(BIGINT)
+ deployer = Column(BYTEA)
+ fid = Column(BIGINT)
+ name = Column(VARCHAR)
+ symbol = Column(VARCHAR)
+ supply = Column(NUMERIC(100))
+ locker_address = Column(BYTEA)
+ cast_hash = Column(BYTEA)
+ block_number = Column(BIGINT)
+ block_timestamp = Column(TIMESTAMP)
+ version = Column(INTEGER)
+
+ create_time = Column(TIMESTAMP, server_default=func.now())
+ update_time = Column(TIMESTAMP, server_default=func.now(), onupdate=func.now())
+
+ @staticmethod
+ def model_domain_mapping():
+ return [
+ {
+ "domain": ClankerCreatedTokenD,
+ "conflict_do_update": None,
+ "update_strategy": None,
+ "converter": general_converter,
+ }
+ ]
diff --git a/hemera_udf/meme_agent/models/fourmeme.py b/hemera_udf/meme_agent/models/fourmeme.py
new file mode 100644
index 000000000..f20dfb55d
--- /dev/null
+++ b/hemera_udf/meme_agent/models/fourmeme.py
@@ -0,0 +1,73 @@
+from sqlalchemy import Column, func
+from sqlalchemy.dialects.postgresql import BIGINT, BYTEA, INTEGER, NUMERIC, TIMESTAMP, VARCHAR
+
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.meme_agent.domains.fourmeme import FourMemeTokenCreateD, FourMemeTokenTradeD
+
+
+class FourMemeTokenCreate(HemeraModel):
+ """Database model for FourMeme token creation events"""
+
+ __tablename__ = "af_fourmeme_token_create"
+
+ token = Column(BYTEA, primary_key=True)
+ creator = Column(BYTEA)
+ request_id = Column(BIGINT)
+ name = Column(VARCHAR)
+ symbol = Column(VARCHAR)
+ total_supply = Column(NUMERIC(100))
+ launch_time = Column(BIGINT)
+ launch_fee = Column(NUMERIC(100))
+ block_number = Column(BIGINT)
+ block_timestamp = Column(TIMESTAMP)
+ transaction_hash = Column(BYTEA)
+ create_time = Column(TIMESTAMP, server_default=func.now())
+ update_time = Column(TIMESTAMP, server_default=func.now(), onupdate=func.now())
+
+ @staticmethod
+ def model_domain_mapping():
+ return [
+ {
+ "domain": FourMemeTokenCreateD,
+ "conflict_do_update": None,
+ "update_strategy": None,
+ "converter": general_converter,
+ }
+ ]
+
+
+class FourMemeTokenTrade(HemeraModel):
+ """Database model for FourMeme token trading events (buy/sell)"""
+
+ __tablename__ = "af_fourmeme_token_trade"
+
+ # Using token + account + block_number + trade_type as composite primary key
+ token = Column(BYTEA, primary_key=True)
+ account = Column(BYTEA, primary_key=True)
+ block_number = Column(BIGINT, primary_key=True)
+ log_index = Column(INTEGER, primary_key=True)
+ trade_type = Column(VARCHAR, primary_key=True) # 'buy' or 'sell'
+
+ price = Column(NUMERIC(100))
+ price_usd = Column(NUMERIC)
+ amount = Column(NUMERIC(100))
+ cost = Column(NUMERIC(100))
+ fee = Column(NUMERIC(100))
+ offers = Column(NUMERIC(100))
+ funds = Column(NUMERIC(100))
+ block_timestamp = Column(TIMESTAMP)
+ transaction_hash = Column(BYTEA)
+
+ create_time = Column(TIMESTAMP, server_default=func.now())
+ update_time = Column(TIMESTAMP, server_default=func.now(), onupdate=func.now())
+
+ @staticmethod
+ def model_domain_mapping():
+ return [
+ {
+ "domain": FourMemeTokenTradeD,
+ "conflict_do_update": None,
+ "update_strategy": None,
+ "converter": general_converter,
+ }
+ ]
diff --git a/hemera_udf/meme_agent/models/larry.py b/hemera_udf/meme_agent/models/larry.py
new file mode 100644
index 000000000..5cc30b388
--- /dev/null
+++ b/hemera_udf/meme_agent/models/larry.py
@@ -0,0 +1,32 @@
+from sqlalchemy import Column, func
+from sqlalchemy.dialects.postgresql import BIGINT, BYTEA, INTEGER, NUMERIC, TIMESTAMP, VARCHAR
+
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.meme_agent.domains.larry import LarryCreatedTokenD
+
+
+class LarryCreatedToken(HemeraModel):
+ __tablename__ = "af_larry_created_token"
+
+ token = Column(BYTEA, primary_key=True)
+ party = Column(BYTEA)
+ recipient = Column(BYTEA)
+ name = Column(VARCHAR)
+ symbol = Column(VARCHAR)
+ eth_value = Column(NUMERIC(100))
+ block_number = Column(BIGINT)
+ block_timestamp = Column(TIMESTAMP)
+
+ create_time = Column(TIMESTAMP, server_default=func.now())
+ update_time = Column(TIMESTAMP, server_default=func.now(), onupdate=func.now())
+
+ @staticmethod
+ def model_domain_mapping():
+ return [
+ {
+ "domain": LarryCreatedTokenD,
+ "conflict_do_update": None,
+ "update_strategy": None,
+ "converter": general_converter,
+ }
+ ]
diff --git a/hemera_udf/meme_agent/models/virtuals.py b/hemera_udf/meme_agent/models/virtuals.py
new file mode 100644
index 000000000..a032fc2bb
--- /dev/null
+++ b/hemera_udf/meme_agent/models/virtuals.py
@@ -0,0 +1,32 @@
+from sqlalchemy import Column, func
+from sqlalchemy.dialects.postgresql import BIGINT, BYTEA, TIMESTAMP, VARCHAR
+
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.meme_agent.domains.virtuals import VirtualsCreatedTokenD
+
+
+class VirtualsCreatedToken(HemeraModel):
+ __tablename__ = "af_virtuals_created_token"
+
+ virtual_id = Column(BIGINT, primary_key=True)
+ token = Column(BYTEA)
+ dao = Column(BYTEA)
+ tba = Column(BYTEA)
+ ve_token = Column(BYTEA)
+ lp = Column(BYTEA)
+ block_number = Column(BIGINT)
+ block_timestamp = Column(TIMESTAMP)
+
+ create_time = Column(TIMESTAMP, server_default=func.now())
+ update_time = Column(TIMESTAMP, server_default=func.now(), onupdate=func.now())
+
+ @staticmethod
+ def model_domain_mapping():
+ return [
+ {
+ "domain": VirtualsCreatedTokenD,
+ "conflict_do_update": None,
+ "update_strategy": None,
+ "converter": general_converter,
+ }
+ ]
diff --git a/hemera_udf/meme_agent/sql/four_meme.sql b/hemera_udf/meme_agent/sql/four_meme.sql
new file mode 100644
index 000000000..5ace634da
--- /dev/null
+++ b/hemera_udf/meme_agent/sql/four_meme.sql
@@ -0,0 +1,36 @@
+CREATE TABLE "public"."af_fourmeme_token_trade" (
+ "token" bytea NOT NULL,
+ "account" bytea NOT NULL,
+ "block_number" int8 NOT NULL,
+ "log_index" int4 NOT NULL,
+ "trade_type" varchar NOT NULL,
+ "price" numeric,
+ "price_usd" numeric,
+ "amount" numeric,
+ "cost" numeric,
+ "fee" numeric,
+ "offers" numeric,
+ "funds" numeric,
+ "block_timestamp" timestamp,
+ "create_time" timestamp DEFAULT now(),
+ "update_time" timestamp DEFAULT now(),
+ "transaction_hash" bytea,
+ PRIMARY KEY ("token","account","block_number","log_index","trade_type")
+);
+
+CREATE TABLE "public"."af_fourmeme_token_create" (
+ "token" bytea NOT NULL,
+ "creator" bytea,
+ "request_id" int8,
+ "name" varchar,
+ "symbol" varchar,
+ "total_supply" numeric,
+ "launch_time" int8,
+ "launch_fee" numeric,
+ "transaction_hash" bytea,
+ "block_number" int8,
+ "block_timestamp" timestamp,
+ "create_time" timestamp DEFAULT now(),
+ "update_time" timestamp DEFAULT now(),
+ PRIMARY KEY ("token")
+);
\ No newline at end of file
diff --git a/hemera_udf/meme_agent/sql/token_agent_table_migration.sql b/hemera_udf/meme_agent/sql/token_agent_table_migration.sql
new file mode 100644
index 000000000..9648a2ce6
--- /dev/null
+++ b/hemera_udf/meme_agent/sql/token_agent_table_migration.sql
@@ -0,0 +1,48 @@
+BEGIN;
+
+CREATE TABLE IF NOT EXISTS af_clanker_created_token (
+ "token_address" bytea NOT NULL,
+ "lp_nft_id" int8,
+ "deployer" bytea,
+ "fid" int8,
+ "name" varchar,
+ "symbol" varchar,
+ "supply" numeric,
+ "locker_address" bytea,
+ "cast_hash" bytea,
+ "block_number" int8,
+ "version" int8,
+ "create_time" timestamp DEFAULT CURRENT_TIMESTAMP,
+ "update_time" timestamp DEFAULT CURRENT_TIMESTAMP,
+ PRIMARY KEY ("token_address")
+);
+
+CREATE TABLE IF NOT EXISTS af_virtuals_created_token (
+ "virtual_id" int8 NOT NULL,
+ "token" bytea,
+ "dao" bytea,
+ "tba" bytea,
+ "ve_token" bytea,
+ "lp" bytea,
+ "block_number" int8,
+ "block_timestamp" timestamp,
+ "create_time" timestamp DEFAULT CURRENT_TIMESTAMP,
+ "update_time" timestamp DEFAULT CURRENT_TIMESTAMP,
+ PRIMARY KEY ("virtual_id")
+);
+
+CREATE TABLE IF NOT EXISTS af_larry_created_token (
+ "token" bytea NOT NULL,
+ "party" bytea,
+ "recipient" bytea,
+ "name" varchar,
+ "symbol" varchar,
+ "eth_value" numeric,
+ "block_number" int8,
+ "block_timestamp" timestamp,
+ "create_time" timestamp DEFAULT CURRENT_TIMESTAMP,
+ "update_time" timestamp DEFAULT CURRENT_TIMESTAMP,
+ PRIMARY KEY ("token")
+);
+
+COMMIT;
\ No newline at end of file
diff --git a/hemera_udf/merchant_moe/__init__.py b/hemera_udf/merchant_moe/__init__.py
new file mode 100644
index 000000000..f33aec0f3
--- /dev/null
+++ b/hemera_udf/merchant_moe/__init__.py
@@ -0,0 +1,41 @@
+from __future__ import annotations
+
+import packaging.version
+
+from hemera import __version__ as hemera_version
+
+__all__ = ["__version__"]
+
+__version__ = "0.1.0"
+
+from hemera.common.enumeration.entity_type import DynamicEntityTypeRegistry
+from hemera_udf.merchant_moe.domains import (
+ MerchantMoeErc1155TokenCurrentHolding,
+ MerchantMoeErc1155TokenCurrentSupply,
+ MerchantMoeErc1155TokenHolding,
+ MerchantMoeErc1155TokenSupply,
+ MerchantMoePool,
+ MerchantMoePoolCurrentStatus,
+ MerchantMoePoolRecord,
+ MerchantMoeTokenBin,
+ MerchantMoeTokenCurrentBin,
+)
+
+if packaging.version.parse(packaging.version.parse(hemera_version).base_version) < packaging.version.parse("1.0.0"):
+ raise RuntimeError(f"The package `hemera-modules-xxx:{__version__}` needs Hemera 1.0.0+")
+
+value = DynamicEntityTypeRegistry.register("MERCHANTMOE")
+DynamicEntityTypeRegistry.register_output_types(
+ value,
+ {
+ MerchantMoeErc1155TokenHolding,
+ MerchantMoeErc1155TokenCurrentHolding,
+ MerchantMoeErc1155TokenSupply,
+ MerchantMoeErc1155TokenCurrentSupply,
+ MerchantMoeTokenBin,
+ MerchantMoeTokenCurrentBin,
+ MerchantMoePool,
+ MerchantMoePoolCurrentStatus,
+ MerchantMoePoolRecord,
+ },
+)
diff --git a/indexer/modules/custom/merchant_moe/abi.py b/hemera_udf/merchant_moe/abi.py
similarity index 98%
rename from indexer/modules/custom/merchant_moe/abi.py
rename to hemera_udf/merchant_moe/abi.py
index deb9c13dd..da52e8bd0 100644
--- a/indexer/modules/custom/merchant_moe/abi.py
+++ b/hemera_udf/merchant_moe/abi.py
@@ -1,4 +1,4 @@
-from common.utils.abi_code_utils import Event, Function
+from hemera.common.utils.abi_code_utils import Event, Function
TOTAL_SUPPLY_FUNCTION = Function(
{
diff --git a/indexer/modules/custom/merchant_moe/constants.py b/hemera_udf/merchant_moe/constants.py
similarity index 100%
rename from indexer/modules/custom/merchant_moe/constants.py
rename to hemera_udf/merchant_moe/constants.py
diff --git a/hemera_udf/merchant_moe/domains.py b/hemera_udf/merchant_moe/domains.py
new file mode 100644
index 000000000..832c45cae
--- /dev/null
+++ b/hemera_udf/merchant_moe/domains.py
@@ -0,0 +1,88 @@
+from dataclasses import dataclass
+
+from hemera.indexer.domains import Domain
+
+
+@dataclass
+class MerchantMoeErc1155TokenHolding(Domain):
+ position_token_address: str
+ wallet_address: str
+ token_id: int
+ balance: int
+ block_number: int
+ block_timestamp: int
+
+
+@dataclass
+class MerchantMoeErc1155TokenCurrentHolding(Domain):
+ position_token_address: str
+ wallet_address: str
+ token_id: int
+ balance: int
+ block_number: int
+ block_timestamp: int
+
+
+@dataclass
+class MerchantMoeErc1155TokenSupply(Domain):
+ position_token_address: str
+ token_id: int
+ total_supply: int
+ block_number: int
+ block_timestamp: int
+
+
+@dataclass
+class MerchantMoeErc1155TokenCurrentSupply(Domain):
+ position_token_address: str
+ token_id: int
+ total_supply: int
+ block_number: int
+ block_timestamp: int
+
+
+@dataclass
+class MerchantMoeTokenBin(Domain):
+ position_token_address: str
+ token_id: int
+ reserve0_bin: int
+ reserve1_bin: int
+ block_number: int
+ block_timestamp: int
+
+
+@dataclass
+class MerchantMoeTokenCurrentBin(Domain):
+ position_token_address: str
+ token_id: int
+ reserve0_bin: int
+ reserve1_bin: int
+ block_number: int
+ block_timestamp: int
+
+
+@dataclass
+class MerchantMoePool(Domain):
+ position_token_address: str
+ token0_address: str
+ token1_address: str
+ block_number: int
+ block_timestamp: int
+
+
+@dataclass
+class MerchantMoePoolRecord(Domain):
+ pool_address: str
+ active_id: int
+ bin_step: int
+ block_number: int
+ block_timestamp: int
+
+
+@dataclass
+class MerchantMoePoolCurrentStatus(Domain):
+ pool_address: str
+ active_id: int
+ bin_step: int
+ block_number: int
+ block_timestamp: int
diff --git a/indexer/modules/custom/merchant_moe/endpoints/__init__.py b/hemera_udf/merchant_moe/endpoints/__init__.py
similarity index 100%
rename from indexer/modules/custom/merchant_moe/endpoints/__init__.py
rename to hemera_udf/merchant_moe/endpoints/__init__.py
diff --git a/indexer/modules/custom/merchant_moe/endpoints/routes.py b/hemera_udf/merchant_moe/endpoints/routes.py
similarity index 87%
rename from indexer/modules/custom/merchant_moe/endpoints/routes.py
rename to hemera_udf/merchant_moe/endpoints/routes.py
index deaaa1bdc..c2fd8e923 100644
--- a/indexer/modules/custom/merchant_moe/endpoints/routes.py
+++ b/hemera_udf/merchant_moe/endpoints/routes.py
@@ -1,15 +1,13 @@
from flask_restx import Resource
-from common.models import db
-from common.models.current_token_balances import CurrentTokenBalances
-from common.models.tokens import Tokens
-from common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes
-from indexer.modules.custom.merchant_moe.endpoints import merchant_moe_namespace
-from indexer.modules.custom.merchant_moe.models.feature_erc1155_token_current_supply import (
- FeatureErc1155TokenCurrentSupplyStatus,
-)
-from indexer.modules.custom.merchant_moe.models.feature_merchant_moe_pool import FeatureMerchantMoePools
-from indexer.modules.custom.merchant_moe.models.feature_merchant_moe_token_current_bin import (
+from hemera.common.models import db
+from hemera.common.models.current_token_balances import CurrentTokenBalances
+from hemera.common.models.tokens import Tokens
+from hemera.common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes
+from hemera_udf.merchant_moe.endpoints import merchant_moe_namespace
+from hemera_udf.merchant_moe.models.feature_erc1155_token_current_supply import FeatureErc1155TokenCurrentSupplyStatus
+from hemera_udf.merchant_moe.models.feature_merchant_moe_pool import FeatureMerchantMoePools
+from hemera_udf.merchant_moe.models.feature_merchant_moe_token_current_bin import (
FeatureMerchantMoeTokenBinCurrentStatus,
)
diff --git a/indexer/modules/custom/pendle/__init__.py b/hemera_udf/merchant_moe/jobs/__init__.py
similarity index 100%
rename from indexer/modules/custom/pendle/__init__.py
rename to hemera_udf/merchant_moe/jobs/__init__.py
diff --git a/indexer/modules/custom/merchant_moe/export_merchant_moe_1155_token_holding_detail_job.py b/hemera_udf/merchant_moe/jobs/export_merchant_moe_1155_token_holding_detail_job.py
similarity index 93%
rename from indexer/modules/custom/merchant_moe/export_merchant_moe_1155_token_holding_detail_job.py
rename to hemera_udf/merchant_moe/jobs/export_merchant_moe_1155_token_holding_detail_job.py
index 4e5816953..d3994898d 100644
--- a/indexer/modules/custom/merchant_moe/export_merchant_moe_1155_token_holding_detail_job.py
+++ b/hemera_udf/merchant_moe/jobs/export_merchant_moe_1155_token_holding_detail_job.py
@@ -1,10 +1,14 @@
import logging
-from common.utils.format_utils import bytes_to_hex_str
-from indexer.domain.log import Log
-from indexer.domain.token_balance import TokenBalance
-from indexer.jobs import FilterTransactionDataJob
-from indexer.modules.custom.merchant_moe.abi import (
+from hemera.common.utils.format_utils import bytes_to_hex_str
+from hemera.indexer.domains.log import Log
+from hemera.indexer.domains.token_balance import TokenBalance
+from hemera.indexer.jobs import FilterTransactionDataJob
+from hemera.indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera.indexer.utils.collection_utils import distinct_collections_by_group
+from hemera.indexer.utils.multicall_hemera import Call
+from hemera.indexer.utils.multicall_hemera.multi_call_helper import MultiCallHelper
+from hemera_udf.merchant_moe.abi import (
DEPOSITED_TO_BINS_EVENT,
GET_ACTIVE_ID_FUNCTION,
GET_BIN_FUNCTION,
@@ -15,24 +19,18 @@
TRANSFER_BATCH_EVNET,
WITHDRAWN_FROM_BINS_EVENT,
)
-from indexer.modules.custom.merchant_moe.domains.erc1155_token_holding import (
+from hemera_udf.merchant_moe.domains import (
MerchantMoeErc1155TokenCurrentHolding,
MerchantMoeErc1155TokenCurrentSupply,
MerchantMoeErc1155TokenHolding,
MerchantMoeErc1155TokenSupply,
-)
-from indexer.modules.custom.merchant_moe.domains.merchant_moe import (
MerchantMoePool,
MerchantMoePoolCurrentStatus,
MerchantMoePoolRecord,
MerchantMoeTokenBin,
MerchantMoeTokenCurrentBin,
)
-from indexer.modules.custom.merchant_moe.models.feature_merchant_moe_pool import FeatureMerchantMoePools
-from indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
-from indexer.utils.collection_utils import distinct_collections_by_group
-from indexer.utils.multicall_hemera import Call
-from indexer.utils.multicall_hemera.multi_call_helper import MultiCallHelper
+from hemera_udf.merchant_moe.models.feature_merchant_moe_pool import FeatureMerchantMoePools
logger = logging.getLogger(__name__)
@@ -301,4 +299,4 @@ def get_exist_pools(db_service):
raise e
finally:
session.close()
- return history_pools
+ return list(history_pools)
diff --git a/indexer/modules/custom/pendle/domains/__init__.py b/hemera_udf/merchant_moe/models/__init__.py
similarity index 100%
rename from indexer/modules/custom/pendle/domains/__init__.py
rename to hemera_udf/merchant_moe/models/__init__.py
diff --git a/indexer/modules/custom/merchant_moe/models/feature_erc1155_token_current_holdings.py b/hemera_udf/merchant_moe/models/feature_erc1155_token_current_holdings.py
similarity index 83%
rename from indexer/modules/custom/merchant_moe/models/feature_erc1155_token_current_holdings.py
rename to hemera_udf/merchant_moe/models/feature_erc1155_token_current_holdings.py
index 0c5bb3c16..17b49cf82 100644
--- a/indexer/modules/custom/merchant_moe/models/feature_erc1155_token_current_holdings.py
+++ b/hemera_udf/merchant_moe/models/feature_erc1155_token_current_holdings.py
@@ -1,10 +1,8 @@
-from datetime import datetime
-
from sqlalchemy import Column, Index, PrimaryKeyConstraint, desc, func
-from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, NUMERIC, TIMESTAMP
+from sqlalchemy.dialects.postgresql import BIGINT, BYTEA, NUMERIC, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.merchant_moe.domains.erc1155_token_holding import MerchantMoeErc1155TokenCurrentHolding
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.merchant_moe.domains import MerchantMoeErc1155TokenCurrentHolding
class FeatureErc1155TokenCurrentHoldings(HemeraModel):
diff --git a/indexer/modules/custom/merchant_moe/models/feature_erc1155_token_current_supply.py b/hemera_udf/merchant_moe/models/feature_erc1155_token_current_supply.py
similarity index 86%
rename from indexer/modules/custom/merchant_moe/models/feature_erc1155_token_current_supply.py
rename to hemera_udf/merchant_moe/models/feature_erc1155_token_current_supply.py
index 1c5e07da5..2b81de76e 100644
--- a/indexer/modules/custom/merchant_moe/models/feature_erc1155_token_current_supply.py
+++ b/hemera_udf/merchant_moe/models/feature_erc1155_token_current_supply.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, PrimaryKeyConstraint, func
from sqlalchemy.dialects.postgresql import BIGINT, BYTEA, NUMERIC, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.merchant_moe.domains.erc1155_token_holding import MerchantMoeErc1155TokenCurrentSupply
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.merchant_moe.domains import MerchantMoeErc1155TokenCurrentSupply
class FeatureErc1155TokenCurrentSupplyStatus(HemeraModel):
diff --git a/indexer/modules/custom/merchant_moe/models/feature_erc1155_token_holding.py b/hemera_udf/merchant_moe/models/feature_erc1155_token_holding.py
similarity index 89%
rename from indexer/modules/custom/merchant_moe/models/feature_erc1155_token_holding.py
rename to hemera_udf/merchant_moe/models/feature_erc1155_token_holding.py
index b6e70841c..e76f83a61 100644
--- a/indexer/modules/custom/merchant_moe/models/feature_erc1155_token_holding.py
+++ b/hemera_udf/merchant_moe/models/feature_erc1155_token_holding.py
@@ -1,10 +1,8 @@
-from datetime import datetime
-
from sqlalchemy import Column, Index, PrimaryKeyConstraint, desc, func
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, NUMERIC, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.merchant_moe.domains.erc1155_token_holding import MerchantMoeErc1155TokenHolding
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.merchant_moe.domains import MerchantMoeErc1155TokenHolding
class FeatureErc1155TokenHoldings(HemeraModel):
diff --git a/indexer/modules/custom/merchant_moe/models/feature_erc1155_token_supply.py b/hemera_udf/merchant_moe/models/feature_erc1155_token_supply.py
similarity index 89%
rename from indexer/modules/custom/merchant_moe/models/feature_erc1155_token_supply.py
rename to hemera_udf/merchant_moe/models/feature_erc1155_token_supply.py
index 4edac3676..553c23635 100644
--- a/indexer/modules/custom/merchant_moe/models/feature_erc1155_token_supply.py
+++ b/hemera_udf/merchant_moe/models/feature_erc1155_token_supply.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, Index, PrimaryKeyConstraint, desc, func, text
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, NUMERIC, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.merchant_moe.domains.erc1155_token_holding import MerchantMoeErc1155TokenSupply
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.merchant_moe.domains import MerchantMoeErc1155TokenSupply
class FeatureErc1155TokenSupplyRecords(HemeraModel):
diff --git a/indexer/modules/custom/merchant_moe/models/feature_merchant_moe_pool.py b/hemera_udf/merchant_moe/models/feature_merchant_moe_pool.py
similarity index 87%
rename from indexer/modules/custom/merchant_moe/models/feature_merchant_moe_pool.py
rename to hemera_udf/merchant_moe/models/feature_merchant_moe_pool.py
index b55feab1b..e3f678ee1 100644
--- a/indexer/modules/custom/merchant_moe/models/feature_merchant_moe_pool.py
+++ b/hemera_udf/merchant_moe/models/feature_merchant_moe_pool.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, PrimaryKeyConstraint, func, text
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.merchant_moe.domains.merchant_moe import MerchantMoePool
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.merchant_moe.domains import MerchantMoePool
class FeatureMerchantMoePools(HemeraModel):
diff --git a/indexer/modules/custom/merchant_moe/models/feature_merchant_moe_pool_record_status.py b/hemera_udf/merchant_moe/models/feature_merchant_moe_pool_record_status.py
similarity index 86%
rename from indexer/modules/custom/merchant_moe/models/feature_merchant_moe_pool_record_status.py
rename to hemera_udf/merchant_moe/models/feature_merchant_moe_pool_record_status.py
index a771c5e57..63e5af7c6 100644
--- a/indexer/modules/custom/merchant_moe/models/feature_merchant_moe_pool_record_status.py
+++ b/hemera_udf/merchant_moe/models/feature_merchant_moe_pool_record_status.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, PrimaryKeyConstraint, func
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.merchant_moe.domains.merchant_moe import MerchantMoePoolCurrentStatus
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.merchant_moe.domains import MerchantMoePoolCurrentStatus
class FeatureMerchantMoePoolRecordStatus(HemeraModel):
diff --git a/indexer/modules/custom/merchant_moe/models/feature_merchant_moe_pool_records.py b/hemera_udf/merchant_moe/models/feature_merchant_moe_pool_records.py
similarity index 87%
rename from indexer/modules/custom/merchant_moe/models/feature_merchant_moe_pool_records.py
rename to hemera_udf/merchant_moe/models/feature_merchant_moe_pool_records.py
index 08346f821..73e78fe3b 100644
--- a/indexer/modules/custom/merchant_moe/models/feature_merchant_moe_pool_records.py
+++ b/hemera_udf/merchant_moe/models/feature_merchant_moe_pool_records.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, PrimaryKeyConstraint, func
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.merchant_moe.domains.merchant_moe import MerchantMoePoolRecord
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.merchant_moe.domains import MerchantMoePoolRecord
class FeatureMerchantMoePoolRecords(HemeraModel):
diff --git a/indexer/modules/custom/merchant_moe/models/feature_merchant_moe_token_bin.py b/hemera_udf/merchant_moe/models/feature_merchant_moe_token_bin.py
similarity index 90%
rename from indexer/modules/custom/merchant_moe/models/feature_merchant_moe_token_bin.py
rename to hemera_udf/merchant_moe/models/feature_merchant_moe_token_bin.py
index 356af52f4..0a2361813 100644
--- a/indexer/modules/custom/merchant_moe/models/feature_merchant_moe_token_bin.py
+++ b/hemera_udf/merchant_moe/models/feature_merchant_moe_token_bin.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, Index, PrimaryKeyConstraint, desc, func, text
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, NUMERIC, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.merchant_moe.domains.merchant_moe import MerchantMoeTokenBin
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.merchant_moe.domains import MerchantMoeTokenBin
class FeatureMerchantMoeTokenBinRecords(HemeraModel):
diff --git a/indexer/modules/custom/merchant_moe/models/feature_merchant_moe_token_current_bin.py b/hemera_udf/merchant_moe/models/feature_merchant_moe_token_current_bin.py
similarity index 89%
rename from indexer/modules/custom/merchant_moe/models/feature_merchant_moe_token_current_bin.py
rename to hemera_udf/merchant_moe/models/feature_merchant_moe_token_current_bin.py
index 48a831b8a..efd872b5d 100644
--- a/indexer/modules/custom/merchant_moe/models/feature_merchant_moe_token_current_bin.py
+++ b/hemera_udf/merchant_moe/models/feature_merchant_moe_token_current_bin.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, Index, PrimaryKeyConstraint, asc, desc, func
from sqlalchemy.dialects.postgresql import BIGINT, BYTEA, NUMERIC, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.merchant_moe.domains.merchant_moe import MerchantMoeTokenCurrentBin
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.merchant_moe.domains import MerchantMoeTokenCurrentBin
class FeatureMerchantMoeTokenBinCurrentStatus(HemeraModel):
diff --git a/hemera_udf/opensea/__init__.py b/hemera_udf/opensea/__init__.py
new file mode 100644
index 000000000..da4c49bee
--- /dev/null
+++ b/hemera_udf/opensea/__init__.py
@@ -0,0 +1,18 @@
+from __future__ import annotations
+
+import packaging.version
+
+from hemera import __version__ as hemera_version
+
+__all__ = ["__version__"]
+
+__version__ = "0.1.0"
+
+from hemera.common.enumeration.entity_type import DynamicEntityTypeRegistry
+from hemera_udf.opensea.domains import AddressOpenseaTransaction, OpenseaOrder
+
+if packaging.version.parse(packaging.version.parse(hemera_version).base_version) < packaging.version.parse("1.0.0"):
+ raise RuntimeError(f"The package `hemera-modules-xxx:{__version__}` needs Hemera 1.0.0+")
+
+value = DynamicEntityTypeRegistry.register("OPEN_SEA")
+DynamicEntityTypeRegistry.register_output_types(value, {AddressOpenseaTransaction, OpenseaOrder})
diff --git a/indexer/modules/custom/opensea/domain/address_opensea_transactions.py b/hemera_udf/opensea/domains.py
similarity index 54%
rename from indexer/modules/custom/opensea/domain/address_opensea_transactions.py
rename to hemera_udf/opensea/domains.py
index ba37ab995..e56a9438d 100644
--- a/indexer/modules/custom/opensea/domain/address_opensea_transactions.py
+++ b/hemera_udf/opensea/domains.py
@@ -1,6 +1,6 @@
from dataclasses import dataclass
-from indexer.domain import Domain
+from hemera.indexer.domains import Domain
@dataclass
@@ -24,3 +24,20 @@ class AddressOpenseaTransaction(Domain):
block_hash: str
protocol_version: str = "1.6"
+
+
+@dataclass
+class OpenseaOrder(Domain):
+ order_hash: str
+ zone: str
+ offerer: str
+ recipient: str
+ offer: dict
+ consideration: dict
+ block_timestamp: int
+ block_hash: str
+ transaction_hash: str
+ log_index: int
+ block_number: int
+
+ protocol_version: str = "1.6"
diff --git a/indexer/modules/custom/opensea/endpoint/__init__.py b/hemera_udf/opensea/endpoint/__init__.py
similarity index 100%
rename from indexer/modules/custom/opensea/endpoint/__init__.py
rename to hemera_udf/opensea/endpoint/__init__.py
diff --git a/indexer/modules/custom/opensea/endpoint/routes.py b/hemera_udf/opensea/endpoint/routes.py
similarity index 93%
rename from indexer/modules/custom/opensea/endpoint/routes.py
rename to hemera_udf/opensea/endpoint/routes.py
index b36c50c99..0de299eba 100644
--- a/indexer/modules/custom/opensea/endpoint/routes.py
+++ b/hemera_udf/opensea/endpoint/routes.py
@@ -6,23 +6,23 @@
from flask_restx import Resource
from sqlalchemy import and_, desc, func
-from api.app.address.features import register_feature
-from api.app.cache import cache
-from common.models import db
-from common.models.token_hourly_price import TokenHourlyPrices
-from common.models.tokens import Tokens
-from common.utils.format_utils import as_dict, bytes_to_hex_str, format_to_dict, hex_str_to_bytes
-from indexer.modules.custom.opensea.endpoint import opensea_namespace
-from indexer.modules.custom.opensea.models.address_opensea_profile import AddressOpenseaProfile
-from indexer.modules.custom.opensea.models.address_opensea_transaction import AddressOpenseaTransactions
-from indexer.modules.custom.opensea.models.opensea_crypto_mapping import OpenseaCryptoTokenMapping
-from indexer.modules.custom.opensea.models.opensea_order import OpenseaOrders
-from indexer.modules.custom.opensea.models.scheduled_metadata import ScheduledMetadata
-from indexer.modules.custom.opensea.opensea_job import (
+from hemera.api.app.address.features import register_feature
+from hemera.api.app.cache import cache
+from hemera.common.models import db
+from hemera.common.models.token_hourly_price import TokenHourlyPrices
+from hemera.common.models.tokens import Tokens
+from hemera.common.utils.format_utils import as_dict, bytes_to_hex_str, format_to_dict, hex_str_to_bytes
+from hemera_udf.opensea.endpoint import opensea_namespace
+from hemera_udf.opensea.jobs.opensea_job import (
OpenseaTransactionType,
get_item_type_string,
get_opensea_transaction_type_string,
)
+from hemera_udf.opensea.models.address_opensea_profile import AddressOpenseaProfile
+from hemera_udf.opensea.models.address_opensea_transaction import AddressOpenseaTransactions
+from hemera_udf.opensea.models.opensea_crypto_mapping import OpenseaCryptoTokenMapping
+from hemera_udf.opensea.models.opensea_order import OpenseaOrders
+from hemera_udf.opensea.models.scheduled_metadata import ScheduledMetadata
PAGE_SIZE = 10
diff --git a/indexer/modules/custom/pendle/models/__init__.py b/hemera_udf/opensea/jobs/__init__.py
similarity index 100%
rename from indexer/modules/custom/pendle/models/__init__.py
rename to hemera_udf/opensea/jobs/__init__.py
diff --git a/indexer/modules/custom/opensea/opensea_job.py b/hemera_udf/opensea/jobs/opensea_job.py
similarity index 93%
rename from indexer/modules/custom/opensea/opensea_job.py
rename to hemera_udf/opensea/jobs/opensea_job.py
index c1597b82a..a6f216120 100644
--- a/indexer/modules/custom/opensea/opensea_job.py
+++ b/hemera_udf/opensea/jobs/opensea_job.py
@@ -2,17 +2,16 @@
from enum import Enum
from typing import List
-from indexer.domain.transaction import Transaction
-from indexer.executors.batch_work_executor import BatchWorkExecutor
-from indexer.jobs import FilterTransactionDataJob
-from indexer.modules.custom.opensea.domain.address_opensea_transactions import AddressOpenseaTransaction
-from indexer.modules.custom.opensea.domain.opensea_order import OpenseaOrder
-from indexer.modules.custom.opensea.parser.opensea_contract_parser import (
+from hemera.indexer.domains.transaction import Transaction
+from hemera.indexer.executors.batch_work_executor import BatchWorkExecutor
+from hemera.indexer.jobs import FilterTransactionDataJob
+from hemera.indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera_udf.opensea.domains import AddressOpenseaTransaction, OpenseaOrder
+from hemera_udf.opensea.parser.opensea_contract_parser import (
OPENSEA_EVENT_ABI_SIGNATURE_MAPPING,
OpenseaLog,
parse_opensea_transaction_order_fulfilled_event,
)
-from indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
logger = logging.getLogger(__name__)
diff --git a/indexer/modules/custom/project_contracts/__init__.py b/hemera_udf/opensea/models/__init__.py
similarity index 100%
rename from indexer/modules/custom/project_contracts/__init__.py
rename to hemera_udf/opensea/models/__init__.py
diff --git a/indexer/modules/custom/opensea/models/address_opensea_profile.py b/hemera_udf/opensea/models/address_opensea_profile.py
similarity index 96%
rename from indexer/modules/custom/opensea/models/address_opensea_profile.py
rename to hemera_udf/opensea/models/address_opensea_profile.py
index 998f1052b..6005de0ab 100644
--- a/indexer/modules/custom/opensea/models/address_opensea_profile.py
+++ b/hemera_udf/opensea/models/address_opensea_profile.py
@@ -1,7 +1,7 @@
from sqlalchemy import Column, Computed, func, text
from sqlalchemy.dialects.postgresql import BYTEA, INTEGER, JSONB, NUMERIC, TIMESTAMP
-from common.models import HemeraModel
+from hemera.common.models import HemeraModel
class AddressOpenseaProfile(HemeraModel):
diff --git a/indexer/modules/custom/opensea/models/address_opensea_transaction.py b/hemera_udf/opensea/models/address_opensea_transaction.py
similarity index 92%
rename from indexer/modules/custom/opensea/models/address_opensea_transaction.py
rename to hemera_udf/opensea/models/address_opensea_transaction.py
index 7f2d155ab..c3b4f3cba 100644
--- a/indexer/modules/custom/opensea/models/address_opensea_transaction.py
+++ b/hemera_udf/opensea/models/address_opensea_transaction.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, Index, desc, func, text
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, JSONB, SMALLINT, TIMESTAMP, VARCHAR
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.opensea.domain.address_opensea_transactions import AddressOpenseaTransaction
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.opensea.domains import AddressOpenseaTransaction
class AddressOpenseaTransactions(HemeraModel):
diff --git a/indexer/modules/custom/opensea/models/daily_address_opensea_stats.py b/hemera_udf/opensea/models/daily_address_opensea_stats.py
similarity index 95%
rename from indexer/modules/custom/opensea/models/daily_address_opensea_stats.py
rename to hemera_udf/opensea/models/daily_address_opensea_stats.py
index a4c7956ab..2cac3476d 100644
--- a/indexer/modules/custom/opensea/models/daily_address_opensea_stats.py
+++ b/hemera_udf/opensea/models/daily_address_opensea_stats.py
@@ -1,7 +1,7 @@
from sqlalchemy import Column, func
from sqlalchemy.dialects.postgresql import BYTEA, DATE, INTEGER, JSONB, NUMERIC, TIMESTAMP
-from common.models import HemeraModel
+from hemera.common.models import HemeraModel
class DailyAddressOpenseaTransactions(HemeraModel):
diff --git a/indexer/modules/custom/opensea/models/opensea_crypto_mapping.py b/hemera_udf/opensea/models/opensea_crypto_mapping.py
similarity index 90%
rename from indexer/modules/custom/opensea/models/opensea_crypto_mapping.py
rename to hemera_udf/opensea/models/opensea_crypto_mapping.py
index 830540ead..9f72d406e 100644
--- a/indexer/modules/custom/opensea/models/opensea_crypto_mapping.py
+++ b/hemera_udf/opensea/models/opensea_crypto_mapping.py
@@ -1,7 +1,7 @@
from sqlalchemy import Column, text
from sqlalchemy.dialects.postgresql import INTEGER, VARCHAR
-from common.models import HemeraModel
+from hemera.common.models import HemeraModel
class OpenseaCryptoTokenMapping(HemeraModel):
diff --git a/indexer/modules/custom/opensea/models/opensea_order.py b/hemera_udf/opensea/models/opensea_order.py
similarity index 90%
rename from indexer/modules/custom/opensea/models/opensea_order.py
rename to hemera_udf/opensea/models/opensea_order.py
index 82c69d68e..ab378e5c1 100644
--- a/indexer/modules/custom/opensea/models/opensea_order.py
+++ b/hemera_udf/opensea/models/opensea_order.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, Index, PrimaryKeyConstraint, func, text
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, INTEGER, JSON, TIMESTAMP, VARCHAR
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.opensea.domain.opensea_order import OpenseaOrder
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.opensea.domains import OpenseaOrder
class OpenseaOrders(HemeraModel):
diff --git a/indexer/modules/custom/opensea/models/scheduled_metadata.py b/hemera_udf/opensea/models/scheduled_metadata.py
similarity index 88%
rename from indexer/modules/custom/opensea/models/scheduled_metadata.py
rename to hemera_udf/opensea/models/scheduled_metadata.py
index 7fb5035c4..ad7154046 100644
--- a/indexer/modules/custom/opensea/models/scheduled_metadata.py
+++ b/hemera_udf/opensea/models/scheduled_metadata.py
@@ -1,7 +1,7 @@
from sqlalchemy import Column
from sqlalchemy.dialects.postgresql import INTEGER, TIMESTAMP, VARCHAR
-from common.models import HemeraModel
+from hemera.common.models import HemeraModel
class ScheduledMetadata(HemeraModel):
diff --git a/indexer/modules/custom/project_contracts/domain/__init__.py b/hemera_udf/opensea/parser/__init__.py
similarity index 100%
rename from indexer/modules/custom/project_contracts/domain/__init__.py
rename to hemera_udf/opensea/parser/__init__.py
diff --git a/indexer/modules/custom/opensea/parser/opensea_contract_parser.py b/hemera_udf/opensea/parser/opensea_contract_parser.py
similarity index 93%
rename from indexer/modules/custom/opensea/parser/opensea_contract_parser.py
rename to hemera_udf/opensea/parser/opensea_contract_parser.py
index 12c778643..6b1f2a9a4 100644
--- a/indexer/modules/custom/opensea/parser/opensea_contract_parser.py
+++ b/hemera_udf/opensea/parser/opensea_contract_parser.py
@@ -4,10 +4,10 @@
from web3.types import ABIEvent
-from common.utils.abi_code_utils import decode_log
-from common.utils.format_utils import bytes_to_hex_str
-from indexer.domain.transaction import Transaction
-from indexer.utils.abi import event_log_abi_to_topic
+from hemera.common.utils.abi_code_utils import decode_log
+from hemera.common.utils.format_utils import bytes_to_hex_str
+from hemera.indexer.domains.transaction import Transaction
+from hemera.indexer.utils.abi import event_log_abi_to_topic
OPENSEA_EVENT_ABIS = {
"ORDER_FULFILLED_EVENT": '{"anonymous":false,"inputs":[{"indexed":false,"internalType":"bytes32","name":"orderHash","type":"bytes32"},{"indexed":true,"internalType":"address","name":"offerer","type":"address"},{"indexed":true,"internalType":"address","name":"zone","type":"address"},{"indexed":false,"internalType":"address","name":"recipient","type":"address"},{"components":[{"internalType":"enum ItemType","name":"itemType","type":"uint8"},{"internalType":"address","name":"token","type":"address"},{"internalType":"uint256","name":"identifier","type":"uint256"},{"internalType":"uint256","name":"amount","type":"uint256"}],"indexed":false,"internalType":"struct SpentItem[]","name":"offer","type":"tuple[]"},{"components":[{"internalType":"enum ItemType","name":"itemType","type":"uint8"},{"internalType":"address","name":"token","type":"address"},{"internalType":"uint256","name":"identifier","type":"uint256"},{"internalType":"uint256","name":"amount","type":"uint256"},{"internalType":"address payable","name":"recipient","type":"address"}],"indexed":false,"internalType":"struct ReceivedItem[]","name":"consideration","type":"tuple[]"}],"name":"OrderFulfilled","type":"event"}'
diff --git a/hemera_udf/pendle/__init__.py b/hemera_udf/pendle/__init__.py
new file mode 100644
index 000000000..4632ff849
--- /dev/null
+++ b/hemera_udf/pendle/__init__.py
@@ -0,0 +1,12 @@
+from __future__ import annotations
+
+import packaging.version
+
+from hemera import __version__ as hemera_version
+
+__all__ = ["__version__"]
+
+__version__ = "0.1.0"
+
+if packaging.version.parse(packaging.version.parse(hemera_version).base_version) < packaging.version.parse("1.0.0"):
+ raise RuntimeError(f"The package `hemera-modules-xxx:{__version__}` needs Hemera 1.0.0+")
diff --git a/indexer/modules/custom/project_contracts/models/__init__.py b/hemera_udf/pendle/abi/__init__.py
similarity index 100%
rename from indexer/modules/custom/project_contracts/models/__init__.py
rename to hemera_udf/pendle/abi/__init__.py
diff --git a/indexer/modules/custom/pendle/abi/event.py b/hemera_udf/pendle/abi/event.py
similarity index 96%
rename from indexer/modules/custom/pendle/abi/event.py
rename to hemera_udf/pendle/abi/event.py
index c37ce2dc6..40b28ff31 100644
--- a/indexer/modules/custom/pendle/abi/event.py
+++ b/hemera_udf/pendle/abi/event.py
@@ -1,4 +1,4 @@
-from common.utils.abi_code_utils import Event
+from hemera.common.utils.abi_code_utils import Event
create_market_event_v3 = Event(
{
diff --git a/indexer/modules/custom/pendle/abi/function.py b/hemera_udf/pendle/abi/function.py
similarity index 95%
rename from indexer/modules/custom/pendle/abi/function.py
rename to hemera_udf/pendle/abi/function.py
index ac5211fe1..da716ff50 100644
--- a/indexer/modules/custom/pendle/abi/function.py
+++ b/hemera_udf/pendle/abi/function.py
@@ -1,4 +1,4 @@
-from common.utils.abi_code_utils import Function
+from hemera.common.utils.abi_code_utils import Function
get_sy_by_pt = Function(
{
diff --git a/indexer/modules/custom/staking_fbtc/domain/__init__.py b/hemera_udf/pendle/domains/__init__.py
similarity index 100%
rename from indexer/modules/custom/staking_fbtc/domain/__init__.py
rename to hemera_udf/pendle/domains/__init__.py
diff --git a/indexer/modules/custom/pendle/domains/market.py b/hemera_udf/pendle/domains/market.py
similarity index 78%
rename from indexer/modules/custom/pendle/domains/market.py
rename to hemera_udf/pendle/domains/market.py
index 291e7425d..d7732b111 100644
--- a/indexer/modules/custom/pendle/domains/market.py
+++ b/hemera_udf/pendle/domains/market.py
@@ -1,10 +1,10 @@
from dataclasses import dataclass
-from indexer.domain import FilterData
+from hemera.indexer.domains import Domain
@dataclass
-class PendlePoolD(FilterData):
+class PendlePoolD(Domain):
market_address: str
sy_address: str
pt_address: str
@@ -15,7 +15,7 @@ class PendlePoolD(FilterData):
@dataclass
-class PendleUserActiveBalanceD(FilterData):
+class PendleUserActiveBalanceD(Domain):
market_address: str
user_address: str
sy_balance: int
@@ -27,7 +27,7 @@ class PendleUserActiveBalanceD(FilterData):
@dataclass
-class PendleUserActiveBalanceCurrentD(FilterData):
+class PendleUserActiveBalanceCurrentD(Domain):
market_address: str
user_address: str
sy_balance: int
diff --git a/indexer/modules/custom/eigen_layer/__init__.py b/hemera_udf/pendle/jobs/__init__.py
similarity index 78%
rename from indexer/modules/custom/eigen_layer/__init__.py
rename to hemera_udf/pendle/jobs/__init__.py
index e14305a10..d9e13fd36 100644
--- a/indexer/modules/custom/eigen_layer/__init__.py
+++ b/hemera_udf/pendle/jobs/__init__.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
-# @Time 2024/9/23 17:12
+# @Time 2024/12/10 16:52
# @Author will
# @File __init__.py.py
# @Brief
diff --git a/indexer/modules/custom/pendle/export_pendle_balance_job.py b/hemera_udf/pendle/jobs/export_pendle_balance_job.py
similarity index 86%
rename from indexer/modules/custom/pendle/export_pendle_balance_job.py
rename to hemera_udf/pendle/jobs/export_pendle_balance_job.py
index eded8946c..fcc71e04f 100644
--- a/indexer/modules/custom/pendle/export_pendle_balance_job.py
+++ b/hemera_udf/pendle/jobs/export_pendle_balance_job.py
@@ -2,21 +2,19 @@
from itertools import groupby
from typing import List
-from web3 import Web3
-
-from common.utils.web3_utils import ZERO_ADDRESS, event_topic_to_address
-from indexer.domain.log import Log
-from indexer.domain.transaction import Transaction
-from indexer.executors.batch_work_executor import BatchWorkExecutor
-from indexer.jobs import FilterTransactionDataJob
-from indexer.modules.custom.pendle.abi.event import redeem_rewards_event
-from indexer.modules.custom.pendle.abi.function import *
-from indexer.modules.custom.pendle.domains.market import PendleUserActiveBalanceCurrentD, PendleUserActiveBalanceD
-from indexer.modules.custom.pendle.models.market import PendlePool
-from indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
-from indexer.utils.abi_setting import ERC20_BALANCE_OF_FUNCTION, ERC20_TRANSFER_EVENT
-from indexer.utils.multicall_hemera import Call
-from indexer.utils.multicall_hemera.multi_call_helper import MultiCallHelper
+from hemera.common.utils.web3_utils import ZERO_ADDRESS, event_topic_to_address
+from hemera.indexer.domains.log import Log
+from hemera.indexer.domains.transaction import Transaction
+from hemera.indexer.executors.batch_work_executor import BatchWorkExecutor
+from hemera.indexer.jobs import FilterTransactionDataJob
+from hemera.indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera.indexer.utils.abi_setting import ERC20_BALANCE_OF_FUNCTION, ERC20_TRANSFER_EVENT
+from hemera.indexer.utils.multicall_hemera import Call
+from hemera.indexer.utils.multicall_hemera.multi_call_helper import MultiCallHelper
+from hemera_udf.pendle.abi.event import redeem_rewards_event
+from hemera_udf.pendle.abi.function import *
+from hemera_udf.pendle.domains.market import PendleUserActiveBalanceCurrentD, PendleUserActiveBalanceD
+from hemera_udf.pendle.models.market import PendlePool
logger = logging.getLogger(__name__)
diff --git a/indexer/modules/custom/pendle/export_pendle_pools_job.py b/hemera_udf/pendle/jobs/export_pendle_pools_job.py
similarity index 85%
rename from indexer/modules/custom/pendle/export_pendle_pools_job.py
rename to hemera_udf/pendle/jobs/export_pendle_pools_job.py
index a7175a54a..19077dadc 100644
--- a/indexer/modules/custom/pendle/export_pendle_pools_job.py
+++ b/hemera_udf/pendle/jobs/export_pendle_pools_job.py
@@ -1,19 +1,17 @@
import logging
from typing import List
-from web3 import Web3
-
-from common.utils.web3_utils import event_topic_to_address
-from indexer.domain.log import Log
-from indexer.domain.transaction import Transaction
-from indexer.executors.batch_work_executor import BatchWorkExecutor
-from indexer.jobs import FilterTransactionDataJob
-from indexer.modules.custom.pendle.abi.event import *
-from indexer.modules.custom.pendle.abi.function import *
-from indexer.modules.custom.pendle.domains.market import PendlePoolD
-from indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
-from indexer.utils.multicall_hemera import Call
-from indexer.utils.multicall_hemera.multi_call_helper import MultiCallHelper
+from hemera.common.utils.web3_utils import event_topic_to_address
+from hemera.indexer.domains.log import Log
+from hemera.indexer.domains.transaction import Transaction
+from hemera.indexer.executors.batch_work_executor import BatchWorkExecutor
+from hemera.indexer.jobs import FilterTransactionDataJob
+from hemera.indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera.indexer.utils.multicall_hemera import Call
+from hemera.indexer.utils.multicall_hemera.multi_call_helper import MultiCallHelper
+from hemera_udf.pendle.abi.event import *
+from hemera_udf.pendle.abi.function import *
+from hemera_udf.pendle.domains.market import PendlePoolD
logger = logging.getLogger(__name__)
diff --git a/indexer/modules/custom/staking_fbtc/models/__init__.py b/hemera_udf/pendle/models/__init__.py
similarity index 100%
rename from indexer/modules/custom/staking_fbtc/models/__init__.py
rename to hemera_udf/pendle/models/__init__.py
diff --git a/indexer/modules/custom/pendle/models/market.py b/hemera_udf/pendle/models/market.py
similarity index 92%
rename from indexer/modules/custom/pendle/models/market.py
rename to hemera_udf/pendle/models/market.py
index 3d8fcba3c..fc8b30591 100644
--- a/indexer/modules/custom/pendle/models/market.py
+++ b/hemera_udf/pendle/models/market.py
@@ -1,12 +1,8 @@
from sqlalchemy import Column, PrimaryKeyConstraint, func
from sqlalchemy.dialects.postgresql import BIGINT, BYTEA, NUMERIC, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.pendle.domains.market import (
- PendlePoolD,
- PendleUserActiveBalanceCurrentD,
- PendleUserActiveBalanceD,
-)
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.pendle.domains.market import PendlePoolD, PendleUserActiveBalanceCurrentD, PendleUserActiveBalanceD
class PendlePool(HemeraModel):
diff --git a/indexer/modules/custom/pendle/sql/pendle_sql_migration.sql b/hemera_udf/pendle/sql/pendle_sql_migration.sql
similarity index 100%
rename from indexer/modules/custom/pendle/sql/pendle_sql_migration.sql
rename to hemera_udf/pendle/sql/pendle_sql_migration.sql
diff --git a/hemera_udf/project_contracts/__init__.py b/hemera_udf/project_contracts/__init__.py
new file mode 100644
index 000000000..4632ff849
--- /dev/null
+++ b/hemera_udf/project_contracts/__init__.py
@@ -0,0 +1,12 @@
+from __future__ import annotations
+
+import packaging.version
+
+from hemera import __version__ as hemera_version
+
+__all__ = ["__version__"]
+
+__version__ = "0.1.0"
+
+if packaging.version.parse(packaging.version.parse(hemera_version).base_version) < packaging.version.parse("1.0.0"):
+ raise RuntimeError(f"The package `hemera-modules-xxx:{__version__}` needs Hemera 1.0.0+")
diff --git a/indexer/modules/custom/project_contracts/domain/project_contract_domain.py b/hemera_udf/project_contracts/domains.py
similarity index 88%
rename from indexer/modules/custom/project_contracts/domain/project_contract_domain.py
rename to hemera_udf/project_contracts/domains.py
index 95f9ba138..c6f0d3837 100644
--- a/indexer/modules/custom/project_contracts/domain/project_contract_domain.py
+++ b/hemera_udf/project_contracts/domains.py
@@ -7,11 +7,11 @@
from dataclasses import dataclass
from typing import Optional
-from indexer.domain import FilterData
+from hemera.indexer.domains import Domain
@dataclass
-class ProjectContractD(FilterData):
+class ProjectContractD(Domain):
project_id: Optional[str] = None
chain_id: Optional[int] = None
diff --git a/hemera_udf/project_contracts/jobs/__init__.py b/hemera_udf/project_contracts/jobs/__init__.py
new file mode 100644
index 000000000..0a3833b61
--- /dev/null
+++ b/hemera_udf/project_contracts/jobs/__init__.py
@@ -0,0 +1,6 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time 2024/12/10 16:56
+# @Author will
+# @File __init__.py.py
+# @Brief
diff --git a/indexer/modules/custom/project_contracts/export_project_contracts_job.py b/hemera_udf/project_contracts/jobs/export_project_contracts_job.py
similarity index 89%
rename from indexer/modules/custom/project_contracts/export_project_contracts_job.py
rename to hemera_udf/project_contracts/jobs/export_project_contracts_job.py
index 95825402a..dda22d5eb 100644
--- a/indexer/modules/custom/project_contracts/export_project_contracts_job.py
+++ b/hemera_udf/project_contracts/jobs/export_project_contracts_job.py
@@ -2,13 +2,13 @@
from collections import defaultdict
from typing import List
-from indexer.domain.contract_internal_transaction import ContractInternalTransaction
-from indexer.domain.transaction import Transaction
-from indexer.jobs.base_job import ExtensionJob
-from indexer.modules.custom.project_contracts.domain.project_contract_domain import ProjectContractD
-from indexer.modules.custom.project_contracts.models.project_contract import AfProjectContracts
-from indexer.modules.custom.project_contracts.models.projects import AfProjects
-from indexer.utils.abi import bytes_to_hex_str
+from hemera.indexer.domains.contract_internal_transaction import ContractInternalTransaction
+from hemera.indexer.domains.transaction import Transaction
+from hemera.indexer.jobs.base_job import ExtensionJob
+from hemera.indexer.utils.abi import bytes_to_hex_str
+from hemera_udf.project_contracts.domains import ProjectContractD
+from hemera_udf.project_contracts.models.project_contract import AfProjectContracts
+from hemera_udf.project_contracts.models.projects import AfProjects
logger = logging.getLogger(__name__)
diff --git a/indexer/modules/custom/stats/models/__init__.py b/hemera_udf/project_contracts/models/__init__.py
similarity index 100%
rename from indexer/modules/custom/stats/models/__init__.py
rename to hemera_udf/project_contracts/models/__init__.py
diff --git a/indexer/modules/custom/project_contracts/models/project_contract.py b/hemera_udf/project_contracts/models/project_contract.py
similarity index 88%
rename from indexer/modules/custom/project_contracts/models/project_contract.py
rename to hemera_udf/project_contracts/models/project_contract.py
index 2b6811174..165ab55cc 100644
--- a/indexer/modules/custom/project_contracts/models/project_contract.py
+++ b/hemera_udf/project_contracts/models/project_contract.py
@@ -7,8 +7,8 @@
from sqlalchemy import Column, PrimaryKeyConstraint, func, text
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, INTEGER, TIMESTAMP, VARCHAR
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.project_contracts.domain.project_contract_domain import ProjectContractD
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.project_contracts.domains import ProjectContractD
class AfProjectContracts(HemeraModel):
diff --git a/indexer/modules/custom/project_contracts/models/projects.py b/hemera_udf/project_contracts/models/projects.py
similarity index 94%
rename from indexer/modules/custom/project_contracts/models/projects.py
rename to hemera_udf/project_contracts/models/projects.py
index 6ee64fd45..8c6572d28 100644
--- a/indexer/modules/custom/project_contracts/models/projects.py
+++ b/hemera_udf/project_contracts/models/projects.py
@@ -7,7 +7,7 @@
from sqlalchemy import Column, PrimaryKeyConstraint, func, text
from sqlalchemy.dialects.postgresql import BOOLEAN, BYTEA, INTEGER, JSONB, TIMESTAMP, VARCHAR
-from common.models import HemeraModel
+from hemera.common.models import HemeraModel
class AfProjects(HemeraModel):
diff --git a/indexer/modules/custom/project_contracts/sql/project_contract_table_migration.sql b/hemera_udf/project_contracts/sql/project_contract_table_migration.sql
similarity index 100%
rename from indexer/modules/custom/project_contracts/sql/project_contract_table_migration.sql
rename to hemera_udf/project_contracts/sql/project_contract_table_migration.sql
diff --git a/hemera_udf/staking_fbtc/__init__.py b/hemera_udf/staking_fbtc/__init__.py
new file mode 100644
index 000000000..20515ec83
--- /dev/null
+++ b/hemera_udf/staking_fbtc/__init__.py
@@ -0,0 +1,16 @@
+"""
+Not only index FBTC, but also cmETH and even other tokens
+"""
+
+from __future__ import annotations
+
+import packaging.version
+
+from hemera import __version__ as hemera_version
+
+__all__ = ["__version__"]
+
+__version__ = "0.1.0"
+
+if packaging.version.parse(packaging.version.parse(hemera_version).base_version) < packaging.version.parse("1.0.0"):
+ raise RuntimeError(f"The package `hemera-modules-xxx:{__version__}` needs Hemera 1.0.0+")
diff --git a/indexer/modules/custom/staking_fbtc/config.ini b/hemera_udf/staking_fbtc/config.ini
similarity index 100%
rename from indexer/modules/custom/staking_fbtc/config.ini
rename to hemera_udf/staking_fbtc/config.ini
diff --git a/hemera_udf/staking_fbtc/domains.py b/hemera_udf/staking_fbtc/domains.py
new file mode 100644
index 000000000..008e10e30
--- /dev/null
+++ b/hemera_udf/staking_fbtc/domains.py
@@ -0,0 +1,72 @@
+from dataclasses import dataclass
+
+from hemera.indexer.domains import Domain
+
+
+# records for all token
+@dataclass
+class AfStakedTransferredBalanceHistDomain(Domain):
+ contract_address: str
+ protocol_id: str
+ wallet_address: str
+ token_address: str
+ block_transfer_value: int
+ block_cumulative_value: int
+ block_number: int
+ block_timestamp: int
+
+
+@dataclass
+class AfStakedTransferredBalanceCurrentDomain(Domain):
+ contract_address: str
+ protocol_id: str
+ wallet_address: str
+ token_address: str
+ block_transfer_value: int
+ block_cumulative_value: int
+ block_number: int
+ block_timestamp: int
+
+
+@dataclass
+class StakedFBTCDetail(Domain):
+ vault_address: str
+ protocol_id: str
+ wallet_address: str
+ amount: int
+ changed_amount: int
+ block_number: int
+ block_timestamp: int
+
+
+@dataclass
+class StakedFBTCCurrentStatus(Domain):
+ vault_address: str
+ protocol_id: str
+ wallet_address: str
+ amount: int
+ changed_amount: int
+ block_number: int
+ block_timestamp: int
+
+
+@dataclass
+class TransferredFBTCDetail(Domain):
+ vault_address: str
+ protocol_id: str
+ wallet_address: str
+ amount: int
+ changed_amount: int
+ block_number: int
+ block_timestamp: int
+
+
+@dataclass
+class TransferredFBTCCurrentStatus(Domain):
+ vault_address: str
+ protocol_id: str
+ wallet_address: str
+ amount: int
+ changed_amount: int
+ block_number: int
+ block_timestamp: int
diff --git a/indexer/modules/custom/staking_fbtc/endpoints/__init__.py b/hemera_udf/staking_fbtc/endpoints/__init__.py
similarity index 100%
rename from indexer/modules/custom/staking_fbtc/endpoints/__init__.py
rename to hemera_udf/staking_fbtc/endpoints/__init__.py
diff --git a/indexer/modules/custom/staking_fbtc/endpoints/routes.py b/hemera_udf/staking_fbtc/endpoints/routes.py
similarity index 82%
rename from indexer/modules/custom/staking_fbtc/endpoints/routes.py
rename to hemera_udf/staking_fbtc/endpoints/routes.py
index a8f932186..6278de96c 100644
--- a/indexer/modules/custom/staking_fbtc/endpoints/routes.py
+++ b/hemera_udf/staking_fbtc/endpoints/routes.py
@@ -1,11 +1,11 @@
from flask_restx import Resource
from sqlalchemy import func
-from common.models import db
-from common.models.tokens import Tokens
-from common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes
-from indexer.modules.custom.staking_fbtc.endpoints import staking_namespace
-from indexer.modules.custom.staking_fbtc.models.feature_staked_fbtc_detail_records import FeatureStakedFBTCDetailRecords
+from hemera.common.models import db
+from hemera.common.models.tokens import Tokens
+from hemera.common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes
+from hemera_udf.staking_fbtc.endpoints import staking_namespace
+from hemera_udf.staking_fbtc.models.feature_staked_fbtc_detail_records import FeatureStakedFBTCDetailRecords
FBTC_ADDRESS = "0xc96de26018a54d51c097160568752c4e3bd6c364"
diff --git a/hemera_udf/staking_fbtc/jobs/__init__.py b/hemera_udf/staking_fbtc/jobs/__init__.py
new file mode 100644
index 000000000..6a400ba4e
--- /dev/null
+++ b/hemera_udf/staking_fbtc/jobs/__init__.py
@@ -0,0 +1,6 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time 2024/12/10 16:58
+# @Author will
+# @File __init__.py.py
+# @Brief
diff --git a/indexer/modules/custom/staking_fbtc/export_staked_fbtc_detail_job.py b/hemera_udf/staking_fbtc/jobs/export_staked_fbtc_detail_job.py
similarity index 92%
rename from indexer/modules/custom/staking_fbtc/export_staked_fbtc_detail_job.py
rename to hemera_udf/staking_fbtc/jobs/export_staked_fbtc_detail_job.py
index 6579e6ada..82f7e0ca1 100644
--- a/indexer/modules/custom/staking_fbtc/export_staked_fbtc_detail_job.py
+++ b/hemera_udf/staking_fbtc/jobs/export_staked_fbtc_detail_job.py
@@ -5,17 +5,14 @@
from collections import defaultdict
from typing import Any, Dict, List, Tuple
-from common.utils.abi_code_utils import decode_log
-from indexer.domain.log import Log
-from indexer.executors.batch_work_executor import BatchWorkExecutor
-from indexer.jobs import FilterTransactionDataJob
-from indexer.modules.custom import common_utils
-from indexer.modules.custom.staking_fbtc import utils
-from indexer.modules.custom.staking_fbtc.domain.feature_staked_fbtc_detail import (
- StakedFBTCCurrentStatus,
- StakedFBTCDetail,
-)
-from indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera.common.utils.abi_code_utils import decode_log
+from hemera.indexer.domains.log import Log
+from hemera.indexer.executors.batch_work_executor import BatchWorkExecutor
+from hemera.indexer.jobs import FilterTransactionDataJob
+from hemera.indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera_udf.aci_features import common_utils
+from hemera_udf.staking_fbtc import utils
+from hemera_udf.staking_fbtc.domains import StakedFBTCCurrentStatus, StakedFBTCDetail
logger = logging.getLogger(__name__)
diff --git a/indexer/modules/custom/staking_fbtc/export_staked_transferred_balance_job.py b/hemera_udf/staking_fbtc/jobs/export_staked_transferred_balance_job.py
similarity index 92%
rename from indexer/modules/custom/staking_fbtc/export_staked_transferred_balance_job.py
rename to hemera_udf/staking_fbtc/jobs/export_staked_transferred_balance_job.py
index c66191d10..21aa3aadd 100644
--- a/indexer/modules/custom/staking_fbtc/export_staked_transferred_balance_job.py
+++ b/hemera_udf/staking_fbtc/jobs/export_staked_transferred_balance_job.py
@@ -3,16 +3,16 @@
from sqlalchemy import and_, func, or_
-from common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes
-from indexer.domain.token_transfer import ERC20TokenTransfer
-from indexer.jobs import FilterTransactionDataJob
-from indexer.modules.custom.staking_fbtc.domain.af_staked_transferred_balance import (
+from hemera.common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes
+from hemera.indexer.domains.token_transfer import ERC20TokenTransfer
+from hemera.indexer.jobs import FilterTransactionDataJob
+from hemera.indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera.indexer.utils.collection_utils import distinct_collections_by_group
+from hemera_udf.staking_fbtc.domains import (
AfStakedTransferredBalanceCurrentDomain,
AfStakedTransferredBalanceHistDomain,
)
-from indexer.modules.custom.staking_fbtc.models.af_staked_transferred_balance_hist import AfStakedTransferredBalanceHist
-from indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
-from indexer.utils.collection_utils import distinct_collections_by_group
+from hemera_udf.staking_fbtc.models.af_staked_transferred_balance_hist import AfStakedTransferredBalanceHist
logger = logging.getLogger(__name__)
diff --git a/indexer/modules/custom/staking_fbtc/export_transferred_fbtc_detail_job.py b/hemera_udf/staking_fbtc/jobs/export_transferred_fbtc_detail_job.py
similarity index 88%
rename from indexer/modules/custom/staking_fbtc/export_transferred_fbtc_detail_job.py
rename to hemera_udf/staking_fbtc/jobs/export_transferred_fbtc_detail_job.py
index 0366e2178..59dc726fb 100644
--- a/indexer/modules/custom/staking_fbtc/export_transferred_fbtc_detail_job.py
+++ b/hemera_udf/staking_fbtc/jobs/export_transferred_fbtc_detail_job.py
@@ -1,20 +1,25 @@
-import ast
import configparser
import logging
import os
from collections import defaultdict
+from operator import and_, or_
from typing import Dict, List, Tuple
-from indexer.domain.token_transfer import ERC20TokenTransfer
-from indexer.executors.batch_work_executor import BatchWorkExecutor
-from indexer.jobs import FilterTransactionDataJob
-from indexer.modules.custom import common_utils
-from indexer.modules.custom.staking_fbtc import utils
-from indexer.modules.custom.staking_fbtc.domain.feature_staked_fbtc_detail import (
+from hemera.common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes
+from hemera.indexer.domains.token_transfer import ERC20TokenTransfer
+from hemera.indexer.executors.batch_work_executor import BatchWorkExecutor
+from hemera.indexer.jobs import FilterTransactionDataJob
+from hemera.indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera.indexer.utils.collection_utils import distinct_collections_by_group
+from hemera_udf.aci_features import common_utils
+from hemera_udf.staking_fbtc import utils
+from hemera_udf.staking_fbtc.domains import (
+ AfStakedTransferredBalanceCurrentDomain,
+ AfStakedTransferredBalanceHistDomain,
TransferredFBTCCurrentStatus,
TransferredFBTCDetail,
)
-from indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera_udf.staking_fbtc.models.af_staked_transferred_balance_hist import AfStakedTransferredBalanceHist
logger = logging.getLogger(__name__)
diff --git a/indexer/modules/custom/total_supply/__init__.py b/hemera_udf/staking_fbtc/models/__init__.py
similarity index 100%
rename from indexer/modules/custom/total_supply/__init__.py
rename to hemera_udf/staking_fbtc/models/__init__.py
diff --git a/indexer/modules/custom/staking_fbtc/models/af_staked_transferred_balance_current.py b/hemera_udf/staking_fbtc/models/af_staked_transferred_balance_current.py
similarity index 78%
rename from indexer/modules/custom/staking_fbtc/models/af_staked_transferred_balance_current.py
rename to hemera_udf/staking_fbtc/models/af_staked_transferred_balance_current.py
index 918a6de93..61a22589a 100644
--- a/indexer/modules/custom/staking_fbtc/models/af_staked_transferred_balance_current.py
+++ b/hemera_udf/staking_fbtc/models/af_staked_transferred_balance_current.py
@@ -1,10 +1,8 @@
from sqlalchemy import Column, PrimaryKeyConstraint, func
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, NUMERIC, TIMESTAMP, VARCHAR
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.staking_fbtc.domain.af_staked_transferred_balance import (
- AfStakedTransferredBalanceCurrentDomain,
-)
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.staking_fbtc.domains import AfStakedTransferredBalanceCurrentDomain
class AfStakedTransferredBalanceCurrent(HemeraModel):
@@ -22,9 +20,7 @@ class AfStakedTransferredBalanceCurrent(HemeraModel):
update_time = Column(TIMESTAMP, server_default=func.now())
reorg = Column(BOOLEAN, default=False)
- __table_args__ = (
- PrimaryKeyConstraint("contract_address", "wallet_address", "token_address", "block_timestamp", "block_number"),
- )
+ __table_args__ = (PrimaryKeyConstraint("contract_address", "wallet_address", "token_address"),)
@staticmethod
def model_domain_mapping():
diff --git a/indexer/modules/custom/staking_fbtc/models/af_staked_transferred_balance_hist.py b/hemera_udf/staking_fbtc/models/af_staked_transferred_balance_hist.py
similarity index 87%
rename from indexer/modules/custom/staking_fbtc/models/af_staked_transferred_balance_hist.py
rename to hemera_udf/staking_fbtc/models/af_staked_transferred_balance_hist.py
index 21f748e22..0feafd297 100644
--- a/indexer/modules/custom/staking_fbtc/models/af_staked_transferred_balance_hist.py
+++ b/hemera_udf/staking_fbtc/models/af_staked_transferred_balance_hist.py
@@ -1,10 +1,8 @@
from sqlalchemy import Column, PrimaryKeyConstraint, func
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, NUMERIC, TIMESTAMP, VARCHAR
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.staking_fbtc.domain.af_staked_transferred_balance import (
- AfStakedTransferredBalanceHistDomain,
-)
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.staking_fbtc.domains import AfStakedTransferredBalanceHistDomain
class AfStakedTransferredBalanceHist(HemeraModel):
diff --git a/indexer/modules/custom/staking_fbtc/models/feature_staked_fbtc_detail_records.py b/hemera_udf/staking_fbtc/models/feature_staked_fbtc_detail_records.py
similarity index 90%
rename from indexer/modules/custom/staking_fbtc/models/feature_staked_fbtc_detail_records.py
rename to hemera_udf/staking_fbtc/models/feature_staked_fbtc_detail_records.py
index 2f657c45a..5be15d2b4 100644
--- a/indexer/modules/custom/staking_fbtc/models/feature_staked_fbtc_detail_records.py
+++ b/hemera_udf/staking_fbtc/models/feature_staked_fbtc_detail_records.py
@@ -1,11 +1,10 @@
+from ast import Index
+
from sqlalchemy import Column, Index, PrimaryKeyConstraint, desc, func
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, NUMERIC, TIMESTAMP, VARCHAR
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.staking_fbtc.domain.feature_staked_fbtc_detail import (
- StakedFBTCDetail,
- TransferredFBTCDetail,
-)
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.staking_fbtc.domains import StakedFBTCDetail, TransferredFBTCDetail
class FeatureStakedFBTCDetailRecords(HemeraModel):
diff --git a/indexer/modules/custom/staking_fbtc/models/feature_staked_fbtc_detail_status.py b/hemera_udf/staking_fbtc/models/feature_staked_fbtc_detail_status.py
similarity index 88%
rename from indexer/modules/custom/staking_fbtc/models/feature_staked_fbtc_detail_status.py
rename to hemera_udf/staking_fbtc/models/feature_staked_fbtc_detail_status.py
index 743b2b7f9..ba69c4971 100644
--- a/indexer/modules/custom/staking_fbtc/models/feature_staked_fbtc_detail_status.py
+++ b/hemera_udf/staking_fbtc/models/feature_staked_fbtc_detail_status.py
@@ -1,11 +1,8 @@
from sqlalchemy import Column, Index, PrimaryKeyConstraint, desc, func
from sqlalchemy.dialects.postgresql import BIGINT, BYTEA, NUMERIC, TIMESTAMP, VARCHAR
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.staking_fbtc.domain.feature_staked_fbtc_detail import (
- StakedFBTCCurrentStatus,
- TransferredFBTCCurrentStatus,
-)
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.staking_fbtc.domains import StakedFBTCCurrentStatus, TransferredFBTCCurrentStatus
class FeatureStakedFBTCDetailStatus(HemeraModel):
diff --git a/indexer/modules/custom/staking_fbtc/utils.py b/hemera_udf/staking_fbtc/utils.py
similarity index 89%
rename from indexer/modules/custom/staking_fbtc/utils.py
rename to hemera_udf/staking_fbtc/utils.py
index d89e5aa5d..5ffb4c2ff 100644
--- a/indexer/modules/custom/staking_fbtc/utils.py
+++ b/hemera_udf/staking_fbtc/utils.py
@@ -1,11 +1,8 @@
from sqlalchemy import and_, func
-from common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes
-from indexer.modules.custom.staking_fbtc.domain.feature_staked_fbtc_detail import (
- StakedFBTCCurrentStatus,
- TransferredFBTCCurrentStatus,
-)
-from indexer.modules.custom.staking_fbtc.models.feature_staked_fbtc_detail_records import FeatureStakedFBTCDetailRecords
+from hemera.common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes
+from hemera_udf.staking_fbtc.domains import StakedFBTCCurrentStatus, TransferredFBTCCurrentStatus
+from hemera_udf.staking_fbtc.models.feature_staked_fbtc_detail_records import FeatureStakedFBTCDetailRecords
def get_current_status_generic(db_service, contract_list, block_number, status_class):
diff --git a/hemera_udf/stats/__init__.py b/hemera_udf/stats/__init__.py
new file mode 100644
index 000000000..4632ff849
--- /dev/null
+++ b/hemera_udf/stats/__init__.py
@@ -0,0 +1,12 @@
+from __future__ import annotations
+
+import packaging.version
+
+from hemera import __version__ as hemera_version
+
+__all__ = ["__version__"]
+
+__version__ = "0.1.0"
+
+if packaging.version.parse(packaging.version.parse(hemera_version).base_version) < packaging.version.parse("1.0.0"):
+ raise RuntimeError(f"The package `hemera-modules-xxx:{__version__}` needs Hemera 1.0.0+")
diff --git a/indexer/modules/custom/total_supply/domain/__init__.py b/hemera_udf/stats/models/__init__.py
similarity index 100%
rename from indexer/modules/custom/total_supply/domain/__init__.py
rename to hemera_udf/stats/models/__init__.py
diff --git a/indexer/modules/custom/stats/models/daily_addresses_stats.py b/hemera_udf/stats/models/daily_addresses_stats.py
similarity index 90%
rename from indexer/modules/custom/stats/models/daily_addresses_stats.py
rename to hemera_udf/stats/models/daily_addresses_stats.py
index 8766fc24b..b2839efc7 100644
--- a/indexer/modules/custom/stats/models/daily_addresses_stats.py
+++ b/hemera_udf/stats/models/daily_addresses_stats.py
@@ -1,7 +1,7 @@
from sqlalchemy import Column
from sqlalchemy.dialects.postgresql import BIGINT, DATE
-from common.models import HemeraModel
+from hemera.common.models import HemeraModel
class DailyAddressesStats(HemeraModel):
diff --git a/indexer/modules/custom/stats/models/daily_blocks_stats.py b/hemera_udf/stats/models/daily_blocks_stats.py
similarity index 92%
rename from indexer/modules/custom/stats/models/daily_blocks_stats.py
rename to hemera_udf/stats/models/daily_blocks_stats.py
index 65185a067..f4a540fd7 100644
--- a/indexer/modules/custom/stats/models/daily_blocks_stats.py
+++ b/hemera_udf/stats/models/daily_blocks_stats.py
@@ -1,7 +1,7 @@
from sqlalchemy import NUMERIC, Column
from sqlalchemy.dialects.postgresql import BIGINT, DATE
-from common.models import HemeraModel
+from hemera.common.models import HemeraModel
class DailyBlocksStats(HemeraModel):
diff --git a/indexer/modules/custom/stats/models/daily_bridge_transactions_stats.py b/hemera_udf/stats/models/daily_bridge_transactions_stats.py
similarity index 87%
rename from indexer/modules/custom/stats/models/daily_bridge_transactions_stats.py
rename to hemera_udf/stats/models/daily_bridge_transactions_stats.py
index c442bef5c..78a3463f0 100644
--- a/indexer/modules/custom/stats/models/daily_bridge_transactions_stats.py
+++ b/hemera_udf/stats/models/daily_bridge_transactions_stats.py
@@ -1,7 +1,7 @@
from sqlalchemy import Column
from sqlalchemy.dialects.postgresql import BIGINT, DATE
-from common.models import HemeraModel
+from hemera.common.models import HemeraModel
class DailyBridgeTransactionsAggregates(HemeraModel):
diff --git a/indexer/modules/custom/stats/models/daily_tokens_stats.py b/hemera_udf/stats/models/daily_tokens_stats.py
similarity index 91%
rename from indexer/modules/custom/stats/models/daily_tokens_stats.py
rename to hemera_udf/stats/models/daily_tokens_stats.py
index c422c39dd..fc308e95d 100644
--- a/indexer/modules/custom/stats/models/daily_tokens_stats.py
+++ b/hemera_udf/stats/models/daily_tokens_stats.py
@@ -1,7 +1,7 @@
from sqlalchemy import Column
from sqlalchemy.dialects.postgresql import BIGINT, DATE, INTEGER
-from common.models import HemeraModel
+from hemera.common.models import HemeraModel
class DailyTokensStats(HemeraModel):
diff --git a/indexer/modules/custom/stats/models/daily_transactions_stats.py b/hemera_udf/stats/models/daily_transactions_stats.py
similarity index 94%
rename from indexer/modules/custom/stats/models/daily_transactions_stats.py
rename to hemera_udf/stats/models/daily_transactions_stats.py
index 54b698815..dfa488750 100644
--- a/indexer/modules/custom/stats/models/daily_transactions_stats.py
+++ b/hemera_udf/stats/models/daily_transactions_stats.py
@@ -1,7 +1,7 @@
from sqlalchemy import Column
from sqlalchemy.dialects.postgresql import BIGINT, DATE, NUMERIC
-from common.models import HemeraModel
+from hemera.common.models import HemeraModel
class DailyTransactionsStats(HemeraModel):
diff --git a/indexer/modules/custom/total_supply/models/__init__.py b/hemera_udf/thena/__init__.py
similarity index 100%
rename from indexer/modules/custom/total_supply/models/__init__.py
rename to hemera_udf/thena/__init__.py
diff --git a/hemera_udf/thena/abi.py b/hemera_udf/thena/abi.py
new file mode 100644
index 000000000..dd2215841
--- /dev/null
+++ b/hemera_udf/thena/abi.py
@@ -0,0 +1,82 @@
+from hemera.common.utils.abi_code_utils import Event, Function
+
+MINT_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": False, "internalType": "address", "name": "sender", "type": "address"},
+ {"indexed": True, "internalType": "address", "name": "owner", "type": "address"},
+ {"indexed": True, "internalType": "int24", "name": "bottomTick", "type": "int24"},
+ {"indexed": True, "internalType": "int24", "name": "topTick", "type": "int24"},
+ {"indexed": False, "internalType": "uint128", "name": "liquidityAmount", "type": "uint128"},
+ {"indexed": False, "internalType": "uint256", "name": "amount0", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "amount1", "type": "uint256"},
+ ],
+ "name": "Mint",
+ "type": "event",
+ }
+)
+
+BURN_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "address", "name": "owner", "type": "address"},
+ {"indexed": True, "internalType": "int24", "name": "bottomTick", "type": "int24"},
+ {"indexed": True, "internalType": "int24", "name": "topTick", "type": "int24"},
+ {"indexed": False, "internalType": "uint128", "name": "liquidityAmount", "type": "uint128"},
+ {"indexed": False, "internalType": "uint256", "name": "amount0", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "amount1", "type": "uint256"},
+ ],
+ "name": "Burn",
+ "type": "event",
+ }
+)
+LIQUIDITY_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "liquidity",
+ "outputs": [{"internalType": "uint128", "name": "", "type": "uint128"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+
+BALANCE_OF_FUNCTION = Function(
+ {
+ "inputs": [{"internalType": "address", "name": "account", "type": "address"}],
+ "name": "balanceOf",
+ "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+
+TOTAL_SUPPLY_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "totalSupply",
+ "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+
+BASE_LOWER_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "baseLower",
+ "outputs": [{"internalType": "int24", "name": "", "type": "int24"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+BASE_UPPER_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "baseUpper",
+ "outputs": [{"internalType": "int24", "name": "", "type": "int24"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
diff --git a/indexer/modules/custom/uniswap_v2/__init__.py b/hemera_udf/thena/domains/__init__.py
similarity index 100%
rename from indexer/modules/custom/uniswap_v2/__init__.py
rename to hemera_udf/thena/domains/__init__.py
diff --git a/hemera_udf/thena/domains/feature_thena.py b/hemera_udf/thena/domains/feature_thena.py
new file mode 100644
index 000000000..73383585b
--- /dev/null
+++ b/hemera_udf/thena/domains/feature_thena.py
@@ -0,0 +1,27 @@
+from dataclasses import dataclass
+
+from hemera.indexer.domains import Domain
+
+
+@dataclass
+class ThenaLiquidityDomain(Domain):
+ pool_address: str
+ liquidity: int
+ block_number: int
+ block_timestamp: int
+
+
+@dataclass
+class ThenaSharesDomain(Domain):
+ farming_address: str
+ gamma_address: str
+ pool_address: str
+ wallet_address: str
+
+ shares: int
+ total_supply: int
+ tick_lower: int
+ tick_upper: int
+
+ block_number: int
+ block_timestamp: int
diff --git a/hemera_udf/thena/liquidity_job.py b/hemera_udf/thena/liquidity_job.py
new file mode 100644
index 000000000..815b43d1e
--- /dev/null
+++ b/hemera_udf/thena/liquidity_job.py
@@ -0,0 +1,67 @@
+import logging
+
+from hemera.indexer.domains.log import Log
+from hemera.indexer.jobs import FilterTransactionDataJob
+from hemera.indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera.indexer.utils.multicall_hemera import Call
+from hemera.indexer.utils.multicall_hemera.multi_call_helper import MultiCallHelper
+from hemera_udf.thena.abi import BURN_EVENT, LIQUIDITY_FUNCTION, MINT_EVENT
+from hemera_udf.thena.domains.feature_thena import ThenaLiquidityDomain
+
+logger = logging.getLogger(__name__)
+
+
+class ThenaLiquidityJob(FilterTransactionDataJob):
+ dependency_types = [Log]
+ output_types = [ThenaLiquidityDomain]
+ able_to_reorg = True
+
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+ self._service = kwargs["config"].get("db_service")
+ config = kwargs["config"]["thena_job"]
+ self.thena_liquidity_pool = config.get("thena_liquidity_pool_address")
+ self.multi_call_helper = MultiCallHelper(self._web3, kwargs, logger)
+
+ def get_filter(self):
+ return TransactionFilterByLogs(
+ [
+ TopicSpecification(
+ topics=[MINT_EVENT.get_signature(), BURN_EVENT.get_signature()],
+ addresses=[self.thena_liquidity_pool],
+ ),
+ ]
+ )
+
+ def _process(self, **kwargs):
+ logs = self._data_buff[Log.type()]
+
+ call_dict = {}
+
+ # liquidity
+ for log in logs:
+ if log.address == self.thena_liquidity_pool and log.topic0 in [
+ MINT_EVENT.get_signature(),
+ BURN_EVENT.get_signature(),
+ ]:
+ call_dict[log.block_number] = Call(
+ target=log.address,
+ function_abi=LIQUIDITY_FUNCTION,
+ block_number=log.block_number,
+ user_defined_k=log.block_timestamp,
+ )
+
+ call_list = list(call_dict.values())
+ self.multi_call_helper.execute_calls(call_list)
+
+ for call in call_list:
+ if call.returns:
+ pool_address = call.target.lower()
+ liquidity = call.returns.get("")
+ thena_liquidity_domain = ThenaLiquidityDomain(
+ pool_address=pool_address,
+ block_number=call.block_number,
+ block_timestamp=call.user_defined_k,
+ liquidity=liquidity,
+ )
+ self._collect_domain(thena_liquidity_domain)
diff --git a/indexer/modules/custom/uniswap_v2/domain/__init__.py b/hemera_udf/thena/models/__init__.py
similarity index 100%
rename from indexer/modules/custom/uniswap_v2/domain/__init__.py
rename to hemera_udf/thena/models/__init__.py
diff --git a/hemera_udf/thena/models/af_thena_liquidity.py b/hemera_udf/thena/models/af_thena_liquidity.py
new file mode 100644
index 000000000..92ebe9b47
--- /dev/null
+++ b/hemera_udf/thena/models/af_thena_liquidity.py
@@ -0,0 +1,30 @@
+from sqlalchemy import INTEGER, Column, PrimaryKeyConstraint, func
+from sqlalchemy.dialects.postgresql import BIGINT, BYTEA, NUMERIC, TIMESTAMP
+
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.thena.domains.feature_thena import ThenaLiquidityDomain
+
+
+class AfThenaLiquidity(HemeraModel):
+ __tablename__ = "af_thena_liquidity"
+ pool_address = Column(BYTEA, primary_key=True)
+ liquidity = Column(NUMERIC)
+
+ block_number = Column(BIGINT, primary_key=True)
+ block_timestamp = Column(TIMESTAMP)
+
+ create_time = Column(TIMESTAMP, server_default=func.now())
+ update_time = Column(TIMESTAMP, server_default=func.now())
+
+ __table_args__ = (PrimaryKeyConstraint("pool_address", "block_number"),)
+
+ @staticmethod
+ def model_domain_mapping():
+ return [
+ {
+ "domain": ThenaLiquidityDomain,
+ "conflict_do_update": True,
+ "update_strategy": None,
+ "converter": general_converter,
+ }
+ ]
diff --git a/hemera_udf/thena/models/af_thena_shares.py b/hemera_udf/thena/models/af_thena_shares.py
new file mode 100644
index 000000000..e78943fd5
--- /dev/null
+++ b/hemera_udf/thena/models/af_thena_shares.py
@@ -0,0 +1,40 @@
+from sqlalchemy import Column, PrimaryKeyConstraint, func
+from sqlalchemy.dialects.postgresql import BIGINT, BYTEA, NUMERIC, TIMESTAMP
+
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.thena.domains.feature_thena import ThenaSharesDomain
+
+
+class AfThenaShares(HemeraModel):
+ __tablename__ = "af_thena_shares"
+
+ pool_address = Column(BYTEA, primary_key=True)
+ farming_address = Column(BYTEA, primary_key=True)
+ gamma_address = Column(BYTEA, primary_key=True)
+ wallet_address = Column(BYTEA, primary_key=True)
+
+ shares = Column(NUMERIC)
+ total_supply = Column(NUMERIC)
+ tick_lower = Column(NUMERIC)
+ tick_upper = Column(NUMERIC)
+
+ block_number = Column(BIGINT, primary_key=True)
+ block_timestamp = Column(TIMESTAMP)
+
+ create_time = Column(TIMESTAMP, server_default=func.now())
+ update_time = Column(TIMESTAMP, server_default=func.now())
+
+ __table_args__ = (
+ PrimaryKeyConstraint("farming_address", "pool_address", "gamma_address", "wallet_address", "block_number"),
+ )
+
+ @staticmethod
+ def model_domain_mapping():
+ return [
+ {
+ "domain": ThenaSharesDomain,
+ "conflict_do_update": True,
+ "update_strategy": None,
+ "converter": general_converter,
+ }
+ ]
diff --git a/hemera_udf/thena/shares_job.py b/hemera_udf/thena/shares_job.py
new file mode 100644
index 000000000..ed730d1cb
--- /dev/null
+++ b/hemera_udf/thena/shares_job.py
@@ -0,0 +1,105 @@
+import logging
+
+from hemera.indexer.domains.token_transfer import ERC20TokenTransfer
+from hemera.indexer.jobs import FilterTransactionDataJob
+from hemera.indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera.indexer.utils.multicall_hemera import Call
+from hemera.indexer.utils.multicall_hemera.multi_call_helper import MultiCallHelper
+from hemera_udf.thena.abi import BALANCE_OF_FUNCTION, BASE_LOWER_FUNCTION, BASE_UPPER_FUNCTION, TOTAL_SUPPLY_FUNCTION
+from hemera_udf.thena.domains.feature_thena import ThenaSharesDomain
+
+logger = logging.getLogger(__name__)
+
+
+class ThenaSharesJob(FilterTransactionDataJob):
+ dependency_types = [ERC20TokenTransfer]
+ output_types = [ThenaSharesDomain]
+ able_to_reorg = True
+
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+ self._service = kwargs["config"].get("db_service")
+ config = kwargs["config"]["thena_job"]
+ self.gamma_pool_address = config["gamma_pool_address"]
+ self.thena_farming_pool_address = config["thena_farming_pool_address"]
+ self.thena_liquidity_pool = config.get("thena_liquidity_pool_address")
+
+ self.multi_call_helper = MultiCallHelper(self._web3, kwargs, logger)
+
+ def get_filter(self):
+ return TransactionFilterByLogs(
+ [
+ TopicSpecification(
+ addresses=[self.gamma_pool_address],
+ ),
+ ]
+ )
+
+ def _process(self, **kwargs):
+ erc20_token_transfers = self._data_buff[ERC20TokenTransfer.type()]
+ wallet_address_dict = {}
+ for tt in erc20_token_transfers:
+ if tt.token_address == self.gamma_pool_address:
+ common_data = {
+ "target": self.thena_farming_pool_address,
+ "block_number": tt.block_number,
+ "user_defined_k": tt.block_timestamp,
+ }
+ wallet_address_dict[tt.from_address, tt.block_number] = {**common_data, "parameters": [tt.from_address]}
+ wallet_address_dict[tt.to_address, tt.block_number] = {**common_data, "parameters": [tt.to_address]}
+
+ # share
+ call_dict_list = list(wallet_address_dict.values())
+ shares_list = []
+
+ for call_dict in call_dict_list:
+ call = Call(**call_dict, function_abi=BALANCE_OF_FUNCTION)
+ shares_list.append(call)
+
+ self.multi_call_helper.execute_calls(shares_list)
+
+ # total supply
+ supply_list = []
+ tick_lower_list = []
+ tick_upper_list = []
+
+ for call_dict in call_dict_list:
+ call_dict.pop("parameters")
+ call = Call(**call_dict, function_abi=TOTAL_SUPPLY_FUNCTION)
+ supply_list.append(call)
+
+ call_dict_copy = call_dict.copy()
+ call_dict_copy["target"] = self.gamma_pool_address
+
+ call = Call(**call_dict_copy, function_abi=BASE_LOWER_FUNCTION)
+ tick_lower_list.append(call)
+
+ call = Call(**call_dict_copy, function_abi=BASE_UPPER_FUNCTION)
+ tick_upper_list.append(call)
+
+ self.multi_call_helper.execute_calls(supply_list)
+ self.multi_call_helper.execute_calls(tick_lower_list)
+ self.multi_call_helper.execute_calls(tick_upper_list)
+
+ for shares_call, supply_call, tick_lower_call, tick_upper_call in zip(
+ shares_list, supply_list, tick_lower_list, tick_upper_list
+ ):
+ if shares_call.returns:
+ shares = shares_call.returns.get("")
+ total_supply = supply_call.returns.get("")
+ tick_lower = tick_lower_call.returns.get("")
+ tick_upper = tick_upper_call.returns.get("")
+
+ shares_domain = ThenaSharesDomain(
+ farming_address=self.thena_farming_pool_address,
+ gamma_address=self.gamma_pool_address,
+ pool_address=self.thena_liquidity_pool,
+ wallet_address=shares_call.parameters[0],
+ total_supply=total_supply,
+ shares=shares,
+ tick_lower=tick_lower,
+ tick_upper=tick_upper,
+ block_number=shares_call.block_number,
+ block_timestamp=shares_call.user_defined_k,
+ )
+ self._collect_domain(shares_domain)
diff --git a/indexer/modules/custom/uniswap_v2/models/__init__.py b/hemera_udf/token_holder_metrics/__init__.py
similarity index 100%
rename from indexer/modules/custom/uniswap_v2/models/__init__.py
rename to hemera_udf/token_holder_metrics/__init__.py
diff --git a/hemera_udf/token_holder_metrics/domains/metrics.py b/hemera_udf/token_holder_metrics/domains/metrics.py
new file mode 100644
index 000000000..e303366ed
--- /dev/null
+++ b/hemera_udf/token_holder_metrics/domains/metrics.py
@@ -0,0 +1,76 @@
+from dataclasses import dataclass
+
+from hemera.indexer.domains import Domain
+
+
+@dataclass
+class ERC20TokenTransferWithPriceD(Domain):
+ transaction_hash: str
+ log_index: int
+ from_address: str
+ to_address: str
+ value: int
+ price: float
+ decimals: int
+ is_swap: bool
+ from_address_balance: int # balance after transfer
+ to_address_balance: int # balance after transfer
+ token_type: str
+ token_address: str
+ block_number: int
+ block_hash: str
+ block_timestamp: int
+
+
+@dataclass
+class TokenHolderMetricsD(Domain):
+ holder_address: str
+ token_address: str
+ block_number: int
+ block_timestamp: int
+
+ first_block_timestamp: int
+ last_swap_timestamp: int
+ last_transfer_timestamp: int
+ last_price: float = 0.0
+ current_balance: int = 0
+ max_balance: int = 0
+ max_balance_timestamp: int = 0
+ sell_25_timestamp: int = 0
+ sell_50_timestamp: int = 0
+
+ total_buy_count: int = 0
+ total_buy_amount: int = 0
+ total_buy_usd: float = 0.0
+
+ total_sell_count: int = 0
+ total_sell_amount: int = 0
+ total_sell_usd: float = 0.0
+
+ swap_buy_count: int = 0
+ swap_buy_amount: int = 0
+ swap_buy_usd: float = 0.0
+
+ swap_sell_count: int = 0
+ swap_sell_amount: int = 0
+ swap_sell_usd: float = 0.0
+
+ success_sell_count: int = 0
+ fail_sell_count: int = 0
+
+ current_average_buy_price: float = 0.0
+
+ realized_pnl: float = 0.0
+ sell_pnl: float = 0.0
+ win_rate: float = 0.0
+ pnl_valid: bool = False
+
+
+@dataclass
+class TokenHolderMetricsHistoryD(TokenHolderMetricsD):
+ pass
+
+
+@dataclass
+class TokenHolderMetricsCurrentD(TokenHolderMetricsD):
+ pass
diff --git a/hemera_udf/token_holder_metrics/export_token_holder_metrics.py b/hemera_udf/token_holder_metrics/export_token_holder_metrics.py
new file mode 100644
index 000000000..175ce9caf
--- /dev/null
+++ b/hemera_udf/token_holder_metrics/export_token_holder_metrics.py
@@ -0,0 +1,384 @@
+import logging
+import time
+from dataclasses import asdict
+
+from sqlalchemy import text
+
+from hemera.common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes
+from hemera.indexer.jobs.base_job import ExtensionJob
+from hemera_udf.token_holder_metrics.domains.metrics import (
+ ERC20TokenTransferWithPriceD,
+ TokenHolderMetricsCurrentD,
+ TokenHolderMetricsHistoryD,
+)
+from hemera_udf.token_holder_metrics.models.metrics import TokenHolderMetricsCurrent
+
+logger = logging.getLogger(__name__)
+
+MAX_SAFE_VALUE = 2**255
+MIN_BALANCE_THRESHOLD = 1e-4
+
+
+class ExportTokenHolderMetricsJob(ExtensionJob):
+ dependency_types = [ERC20TokenTransferWithPriceD]
+ output_types = [TokenHolderMetricsCurrentD, TokenHolderMetricsHistoryD]
+ able_to_reorg = True
+
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+ self._service = kwargs["config"].get("db_service")
+ self._non_meme_tokens = self._load_non_meme_tokens()
+ self.history_token_prices = None
+
+ def _collect(self, **kwargs):
+ pass
+
+ def _load_non_meme_tokens(self):
+ session = self._service.get_service_session()
+ non_meme_tokens = set(
+ bytes_to_hex_str(row[0]) for row in session.execute(text("SELECT address FROM non_meme_tokens")).fetchall()
+ )
+ session.close()
+ return non_meme_tokens
+
+ def _process(self, **kwargs):
+ start_time = time.time()
+
+ transfers = self._data_buff[ERC20TokenTransferWithPriceD.type()]
+
+ t2 = time.time()
+ transfers = sorted(
+ [t for t in transfers if t.token_address not in self._non_meme_tokens],
+ key=lambda x: (x.block_number, x.log_index),
+ )
+ logger.info(f"Filtered non-meme tokens in {time.time() - t2:.2f}s")
+
+ self._block_address_token_values = {}
+ self._block_address_token_balances = {}
+ for transfer in transfers:
+ block_number = transfer.block_number
+ from_key = (block_number, transfer.from_address, transfer.token_address)
+ to_key = (block_number, transfer.to_address, transfer.token_address)
+
+ if from_key not in self._block_address_token_values:
+ self._block_address_token_values[from_key] = 0
+ self._block_address_token_values[from_key] -= transfer.value
+
+ if to_key not in self._block_address_token_values:
+ self._block_address_token_values[to_key] = 0
+ self._block_address_token_values[to_key] += transfer.value
+
+ self._block_address_token_balances[from_key] = transfer.from_address_balance
+ self._block_address_token_balances[to_key] = transfer.to_address_balance
+
+ t3 = time.time()
+ address_token_pairs = set()
+ for transfer in transfers:
+ if transfer.value > MAX_SAFE_VALUE:
+ logger.warning(
+ f"Skipping transfer with unusually large value: {getattr(transfer, 'value', 'N/A')}, "
+ f"tx: {getattr(transfer, 'transaction_hash', 'N/A')}, "
+ f"token: {getattr(transfer, 'token_address', 'N/A')}"
+ )
+ continue
+ token = self.tokens.get(transfer.token_address)
+ if not token:
+ logger.warning(f"Token {transfer.token_address} not found")
+ continue
+
+ # Add both from and to addresses to the set
+ address_token_pairs.add((transfer.from_address, transfer.token_address))
+ address_token_pairs.add((transfer.to_address, transfer.token_address))
+
+ logger.info(f"Created {len(address_token_pairs)} address-token pairs in {time.time() - t3:.2f}s")
+
+ logger.info("Querying existing metrics...")
+ t5 = time.time()
+ current_metrics = self._get_address_token_holder_metrics_batch(list(address_token_pairs))
+ logger.info(f"Query completed in {time.time() - t5:.2f}s")
+
+ t6 = time.time()
+ for transfer in transfers:
+ if transfer.value > MAX_SAFE_VALUE or transfer.token_address not in self.tokens:
+ continue
+
+ token = self.tokens[transfer.token_address]
+ amount_usd = transfer.value * transfer.price / 10 ** (token["decimals"] or 0)
+
+ # Process "from" address
+ self._update_holder_metrics(
+ current_metrics,
+ transfer.from_address,
+ transfer.token_address,
+ transfer,
+ "out",
+ amount_usd,
+ transfer.price,
+ token,
+ )
+
+ # Process "to" address
+ self._update_holder_metrics(
+ current_metrics,
+ transfer.to_address,
+ transfer.token_address,
+ transfer,
+ "in",
+ amount_usd,
+ transfer.price,
+ token,
+ )
+
+ for metrics in current_metrics.values():
+ metrics.current_balance = self._block_address_token_balances.get(
+ (metrics.block_number, metrics.holder_address, metrics.token_address), 0
+ )
+
+ logger.info(f"Metrics update completed in {time.time() - t6:.2f}s")
+
+ self._collect_items(TokenHolderMetricsCurrentD.type(), list(current_metrics.values()))
+ history_metrics = [TokenHolderMetricsHistoryD(**asdict(metrics)) for metrics in current_metrics.values()]
+ self._collect_items(TokenHolderMetricsHistoryD.type(), history_metrics)
+
+ total_time = time.time() - start_time
+ logger.info(f"Total processing time: {total_time:.2f}s")
+
+ def _get_address_token_holder_metrics_batch(
+ self, address_token_pairs: list[tuple[str, str]]
+ ) -> dict[tuple[str, str], TokenHolderMetricsCurrentD]:
+ if not address_token_pairs:
+ return {}
+
+ start_time = time.time()
+ logger.info(f"Starting to process {len(address_token_pairs)} address-token pairs")
+
+ BATCH_SIZE = 1000
+ result = {}
+ session = self._service.get_service_session()
+
+ partition_groups = {}
+ for addr, token in address_token_pairs:
+ first_char = int(addr[2:3], 16)
+ partition_idx = first_char
+ partition_groups.setdefault(partition_idx, []).append((addr, token))
+
+ for partition_idx, partition_pairs in partition_groups.items():
+ logger.info(f"Processing partition {partition_idx} with {len(partition_pairs)} pairs")
+
+ for i in range(0, len(partition_pairs), BATCH_SIZE):
+ batch_pairs = partition_pairs[i : i + BATCH_SIZE]
+
+ address_bytes_pairs = [(hex_str_to_bytes(addr), hex_str_to_bytes(token)) for addr, token in batch_pairs]
+
+ query = text(
+ f"""
+ SELECT *
+ FROM af_token_holder_metrics_current_p{partition_idx}
+ WHERE (holder_address, token_address) IN :pairs
+ """
+ )
+
+ batch_results = (
+ session.query(TokenHolderMetricsCurrent)
+ .from_statement(query.params(pairs=tuple(address_bytes_pairs)))
+ .all()
+ )
+
+ pair_lookup = {
+ (bytes_to_hex_str(m.holder_address), bytes_to_hex_str(m.token_address)): m for m in batch_results
+ }
+
+ for addr, token in batch_pairs:
+ metrics = pair_lookup.get((addr, token))
+ if metrics:
+ result[(addr, token)] = TokenHolderMetricsCurrentD(
+ holder_address=addr,
+ token_address=token,
+ block_number=metrics.block_number,
+ block_timestamp=int(metrics.block_timestamp.timestamp()) if metrics.block_timestamp else 0,
+ first_block_timestamp=(
+ int(metrics.first_block_timestamp.timestamp()) if metrics.first_block_timestamp else 0
+ ),
+ last_swap_timestamp=(
+ int(metrics.last_swap_timestamp.timestamp()) if metrics.last_swap_timestamp else 0
+ ),
+ last_transfer_timestamp=(
+ int(metrics.last_transfer_timestamp.timestamp())
+ if metrics.last_transfer_timestamp
+ else 0
+ ),
+ current_balance=float(metrics.current_balance or 0),
+ max_balance=float(metrics.max_balance or 0),
+ max_balance_timestamp=(
+ int(metrics.max_balance_timestamp.timestamp()) if metrics.max_balance_timestamp else 0
+ ),
+ sell_25_timestamp=(
+ int(metrics.sell_25_timestamp.timestamp()) if metrics.sell_25_timestamp else 0
+ ),
+ sell_50_timestamp=(
+ int(metrics.sell_50_timestamp.timestamp()) if metrics.sell_50_timestamp else 0
+ ),
+ total_buy_count=metrics.total_buy_count or 0,
+ total_buy_amount=float(metrics.total_buy_amount or 0),
+ total_buy_usd=float(metrics.total_buy_usd or 0),
+ total_sell_count=metrics.total_sell_count or 0,
+ total_sell_amount=float(metrics.total_sell_amount or 0),
+ total_sell_usd=float(metrics.total_sell_usd or 0),
+ swap_buy_count=metrics.swap_buy_count or 0,
+ swap_buy_amount=float(metrics.swap_buy_amount or 0),
+ swap_buy_usd=float(metrics.swap_buy_usd or 0),
+ swap_sell_count=metrics.swap_sell_count or 0,
+ swap_sell_amount=float(metrics.swap_sell_amount or 0),
+ swap_sell_usd=float(metrics.swap_sell_usd or 0),
+ success_sell_count=metrics.success_sell_count or 0,
+ fail_sell_count=metrics.fail_sell_count or 0,
+ current_average_buy_price=float(metrics.current_average_buy_price or 0),
+ realized_pnl=float(metrics.realized_pnl or 0),
+ sell_pnl=float(metrics.sell_pnl or 0),
+ win_rate=float(metrics.win_rate or 0),
+ pnl_valid=bool(metrics.pnl_valid or False),
+ )
+
+ session.close()
+ return result
+
+ def _update_holder_metrics(
+ self,
+ current_metrics: dict,
+ holder_address: str,
+ token_address: str,
+ transfer,
+ transfer_action: str,
+ amount_usd: float,
+ token_price: float,
+ token: dict,
+ ):
+ key = (holder_address, token_address)
+
+ if not current_metrics.get(key):
+ current_metrics[key] = TokenHolderMetricsCurrentD(
+ holder_address=holder_address,
+ token_address=token_address,
+ block_number=transfer.block_number,
+ block_timestamp=transfer.block_timestamp,
+ first_block_timestamp=transfer.block_timestamp,
+ last_swap_timestamp=transfer.block_timestamp,
+ last_transfer_timestamp=transfer.block_timestamp,
+ pnl_valid=False,
+ )
+
+ metrics = current_metrics[key]
+ if metrics.block_number > transfer.block_number:
+ return
+
+ metrics.block_number = transfer.block_number
+ metrics.block_timestamp = transfer.block_timestamp
+
+ set_pnl_valid_block_number = 0
+
+ new_current_balance = (
+ self._block_address_token_balances.get((transfer.block_number, holder_address, token_address), 0) or 0
+ )
+
+ if not metrics.pnl_valid:
+ block_key = (transfer.block_number, holder_address, token_address)
+ total_value = self._block_address_token_values.get(block_key, 0)
+
+ if (
+ abs(total_value - new_current_balance) < MIN_BALANCE_THRESHOLD
+ or new_current_balance < MIN_BALANCE_THRESHOLD
+ ):
+ metrics.pnl_valid = True
+ set_pnl_valid_block_number = transfer.block_number
+
+ # buy
+ # update balance
+ # update total buy count, amount, usd
+ # update current average buy price
+ # sell
+ # set average buy price to 0 when balance is less than MIN_BALANCE_THRESHOLD
+ # calculate pnl
+ # update balance
+ # update total sell count, amount, usd
+ # update realized pnl
+ # update success sell count
+ # update fail sell count
+ # update win rate
+ if transfer_action == "in":
+ new_balance = metrics.current_balance + transfer.value
+ if new_balance / 10 ** token["decimals"] > MIN_BALANCE_THRESHOLD:
+ new_average_buy_price = (
+ amount_usd + metrics.current_balance * metrics.current_average_buy_price / 10 ** token["decimals"]
+ ) / ((transfer.value + metrics.current_balance) / 10 ** token["decimals"])
+ else:
+ new_average_buy_price = 0
+
+ metrics.current_balance = new_balance
+ metrics.total_buy_count += 1
+ metrics.total_buy_amount += transfer.value
+ metrics.total_buy_usd += amount_usd
+ metrics.current_average_buy_price = new_average_buy_price
+ else:
+ sell_amount = transfer.value
+
+ if metrics.current_balance > 0:
+ sell_pnl = (token_price - metrics.current_average_buy_price) * sell_amount / 10 ** token["decimals"]
+ metrics.sell_pnl += sell_pnl
+
+ metrics.realized_pnl = (
+ metrics.total_sell_usd
+ - metrics.total_buy_usd
+ + metrics.current_balance * token_price / 10 ** token["decimals"]
+ )
+
+ if token_price > metrics.current_average_buy_price:
+ metrics.success_sell_count += 1
+ else:
+ metrics.fail_sell_count += 1
+
+ total_sells = metrics.success_sell_count + metrics.fail_sell_count
+ if total_sells > 0:
+ metrics.win_rate = metrics.success_sell_count / total_sells
+
+ metrics.current_balance -= sell_amount
+ if metrics.current_balance / 10 ** token["decimals"] < MIN_BALANCE_THRESHOLD:
+ metrics.current_average_buy_price = 0
+
+ metrics.total_sell_count += 1
+ metrics.total_sell_amount += sell_amount
+ metrics.total_sell_usd += amount_usd
+
+ if metrics.current_balance >= metrics.max_balance:
+ metrics.max_balance = metrics.current_balance
+ metrics.max_balance_timestamp = metrics.block_timestamp
+ metrics.sell_25_timestamp = 0
+ metrics.sell_50_timestamp = 0
+
+ if metrics.current_balance <= metrics.max_balance * 0.75 and metrics.sell_25_timestamp == 0:
+ metrics.sell_25_timestamp = metrics.block_timestamp
+ if metrics.current_balance <= metrics.max_balance * 0.5 and metrics.sell_50_timestamp == 0:
+ metrics.sell_50_timestamp = metrics.block_timestamp
+
+ metrics.last_transfer_timestamp = metrics.block_timestamp
+ metrics.last_price = token_price
+ if transfer.is_swap:
+ metrics.last_swap_timestamp = metrics.block_timestamp
+
+ if transfer_action == "in":
+ metrics.swap_buy_count += 1
+ metrics.swap_buy_amount += transfer.value
+ metrics.swap_buy_usd += amount_usd
+ else:
+ metrics.swap_sell_count += 1
+ metrics.swap_sell_amount += transfer.value
+ metrics.swap_sell_usd += amount_usd
+
+ if not metrics.pnl_valid or metrics.block_number == set_pnl_valid_block_number:
+ metrics.sell_pnl = 0
+ metrics.realized_pnl = 0
+ metrics.success_sell_count = 0
+ metrics.fail_sell_count = 0
+ metrics.win_rate = 0
+ metrics.current_average_buy_price = 0
+ if metrics.block_number == set_pnl_valid_block_number and new_current_balance > MIN_BALANCE_THRESHOLD:
+ metrics.current_average_buy_price = token_price
diff --git a/hemera_udf/token_holder_metrics/export_token_transfer_with_price.py b/hemera_udf/token_holder_metrics/export_token_transfer_with_price.py
new file mode 100644
index 000000000..450b44808
--- /dev/null
+++ b/hemera_udf/token_holder_metrics/export_token_transfer_with_price.py
@@ -0,0 +1,157 @@
+import logging
+from dataclasses import asdict
+
+from sortedcontainers import SortedDict
+from sqlalchemy import text
+
+from hemera.common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes
+from hemera.indexer.domains.current_token_balance import CurrentTokenBalance
+from hemera.indexer.domains.token_balance import TokenBalance
+from hemera.indexer.domains.token_transfer import ERC20TokenTransfer
+from hemera.indexer.jobs.base_job import ExtensionJob
+from hemera_udf.token_holder_metrics.domains.metrics import ERC20TokenTransferWithPriceD
+from hemera_udf.token_price.domains import DexBlockTokenPrice
+from hemera_udf.uniswap_v2.domains import UniswapV2SwapEvent
+from hemera_udf.uniswap_v3.domains.feature_uniswap_v3 import UniswapV3SwapEvent
+
+logger = logging.getLogger(__name__)
+
+
+class ExportTokenTransferWithPriceJob(ExtensionJob):
+ dependency_types = [ERC20TokenTransfer, DexBlockTokenPrice, UniswapV2SwapEvent, UniswapV3SwapEvent, TokenBalance]
+ output_types = [ERC20TokenTransferWithPriceD]
+ able_to_reorg = True
+
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+ self._service = kwargs["config"].get("db_service")
+ self.token_price_maps = None
+
+ def _collect(self, **kwargs):
+ pass
+
+ def _process(self, **kwargs):
+ self._init_history_token_prices(kwargs["start_block"])
+ self._init_token_dex_prices_batch(kwargs["start_block"], kwargs["end_block"])
+ transfers = self._data_buff[ERC20TokenTransfer.type()]
+ token_balance = {}
+ for balance in self._data_buff[TokenBalance.type()]:
+ token_balance[f"{balance.token_address}_{balance.address}_{balance.block_number}"] = balance.balance
+
+ swaps = self._data_buff[UniswapV2SwapEvent.type()] + self._data_buff[UniswapV3SwapEvent.type()]
+ swap_txs = {swap.transaction_hash: swap for swap in swaps}
+
+ to_export = []
+ for transfer in transfers:
+ swap = swap_txs.get(transfer.transaction_hash)
+ is_swap = False
+ if swap:
+ if swap.sender == transfer.from_address:
+ is_swap = True
+ elif (hasattr(swap, "to_address") and swap.to_address == transfer.from_address) or (
+ hasattr(swap, "recipient") and swap.recipient == transfer.from_address
+ ):
+ is_swap = True
+
+ decimals = 0
+ token = self.tokens.get(transfer.token_address)
+ if token:
+ decimals = token["decimals"]
+
+ price = self._get_token_dex_price(transfer.token_address, transfer.block_number)
+ from_address_balance = token_balance.get(
+ f"{transfer.token_address}_{transfer.from_address}_{transfer.block_number}", 0
+ )
+ to_address_balance = token_balance.get(
+ f"{transfer.token_address}_{transfer.to_address}_{transfer.block_number}", 0
+ )
+ to_export.append(
+ ERC20TokenTransferWithPriceD(
+ **asdict(transfer),
+ price=price,
+ is_swap=is_swap,
+ from_address_balance=from_address_balance,
+ to_address_balance=to_address_balance,
+ decimals=decimals,
+ )
+ )
+ self._collect_items(ERC20TokenTransferWithPriceD.type(), to_export)
+ self._update_history_token_prices()
+
+ def _init_history_token_prices(self, start_block: int):
+ if self.token_price_maps is not None:
+ return
+ session = self._service.get_service_session()
+ token_blocks = session.execute(
+ text(
+ """
+ SELECT token_address, block_number, token_price
+ FROM (
+ SELECT token_address, block_number, token_price,
+ ROW_NUMBER() OVER (PARTITION BY token_address ORDER BY block_number DESC) as rn
+ FROM af_dex_block_token_price
+ WHERE block_number < :start_block
+ ) t
+ WHERE rn = 1
+ """
+ ),
+ {"start_block": start_block},
+ ).fetchall()
+ session.close()
+ self.token_price_maps = {}
+ for row in token_blocks:
+ token_addr = bytes_to_hex_str(row[0])
+ block_number = row[1]
+ self.token_price_maps[token_addr] = SortedDict()
+ self.token_price_maps[token_addr][block_number] = float(row[2])
+
+ def _init_token_dex_prices_batch(self, start_block: int, end_block: int):
+
+ price_sql = text(
+ """
+ SELECT token_address, block_number, token_price
+ FROM af_dex_block_token_price
+ WHERE block_number BETWEEN :min_block AND :max_block
+ ORDER BY block_number
+ """
+ )
+
+ session = self._service.get_service_session()
+ prices = session.execute(price_sql, {"min_block": start_block, "max_block": end_block}).fetchall()
+ session.close()
+
+ for price_row in prices:
+ token_addr = bytes_to_hex_str(price_row[0])
+ if token_addr not in self.token_price_maps:
+ self.token_price_maps[token_addr] = SortedDict()
+ block_num = price_row[1]
+ price = float(price_row[2])
+ self.token_price_maps[token_addr][block_num] = price
+
+ def _get_token_dex_price(self, token_addr: str, block_num: int):
+ price_map = self.token_price_maps.get(token_addr)
+ if not price_map:
+ return 0.0
+
+ keys = list(price_map.keys())
+ idx = price_map.bisect_left(block_num)
+
+ if idx == 0:
+ return 0.0
+ elif idx == len(keys):
+ return price_map[keys[-1]]
+ elif keys[idx] == block_num:
+ return price_map[block_num]
+ else:
+ return price_map[keys[idx - 1]]
+
+ def _update_history_token_prices(self):
+ for token_addr, price_map in self.token_price_maps.items():
+ if not price_map:
+ continue
+ latest_block = price_map.keys()[-1]
+ latest_price = price_map[latest_block]
+
+ # Clear all data and keep only the latest price
+ price_map.clear()
+ price_map[latest_block] = latest_price
diff --git a/hemera_udf/token_holder_metrics/models/metrics.py b/hemera_udf/token_holder_metrics/models/metrics.py
new file mode 100644
index 000000000..ad66ee77d
--- /dev/null
+++ b/hemera_udf/token_holder_metrics/models/metrics.py
@@ -0,0 +1,172 @@
+from sqlalchemy import BOOLEAN, INTEGER, Column, PrimaryKeyConstraint, func, text
+from sqlalchemy.dialects.postgresql import BIGINT, BYTEA, NUMERIC, TIMESTAMP, VARCHAR
+
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.token_holder_metrics.domains.metrics import (
+ ERC20TokenTransferWithPriceD,
+ TokenHolderMetricsCurrentD,
+ TokenHolderMetricsHistoryD,
+)
+
+
+class TokenHolderMetricsCurrent(HemeraModel):
+ __tablename__ = "af_token_holder_metrics_current"
+
+ holder_address = Column(BYTEA, primary_key=True)
+ token_address = Column(BYTEA, primary_key=True)
+ block_number = Column(BIGINT)
+ block_timestamp = Column(TIMESTAMP)
+
+ current_balance = Column(NUMERIC)
+ max_balance = Column(NUMERIC)
+ max_balance_timestamp = Column(TIMESTAMP)
+ sell_25_timestamp = Column(TIMESTAMP)
+ sell_50_timestamp = Column(TIMESTAMP)
+
+ total_buy_count = Column(BIGINT)
+ total_buy_amount = Column(NUMERIC)
+ total_buy_usd = Column(NUMERIC)
+
+ total_sell_count = Column(BIGINT)
+ total_sell_amount = Column(NUMERIC)
+ total_sell_usd = Column(NUMERIC)
+
+ swap_buy_count = Column(BIGINT)
+ swap_buy_amount = Column(NUMERIC)
+ swap_buy_usd = Column(NUMERIC)
+
+ swap_sell_count = Column(BIGINT)
+ swap_sell_amount = Column(NUMERIC)
+ swap_sell_usd = Column(NUMERIC)
+
+ last_transfer_timestamp = Column(TIMESTAMP)
+ last_swap_timestamp = Column(TIMESTAMP)
+ last_price = Column(NUMERIC)
+ success_sell_count = Column(BIGINT)
+ fail_sell_count = Column(BIGINT)
+
+ current_average_buy_price = Column(NUMERIC)
+
+ realized_pnl = Column(NUMERIC)
+ sell_pnl = Column(NUMERIC)
+ win_rate = Column(NUMERIC)
+ pnl_valid = Column(BOOLEAN)
+
+ first_block_timestamp = Column(TIMESTAMP)
+
+ create_time = Column(TIMESTAMP, server_default=func.now())
+ update_time = Column(TIMESTAMP, server_default=func.now())
+
+ __table_args__ = (PrimaryKeyConstraint("holder_address", "token_address"),)
+
+ @staticmethod
+ def model_domain_mapping():
+ return [
+ {
+ "domain": TokenHolderMetricsCurrentD,
+ "conflict_do_update": True,
+ "update_strategy": None,
+ "converter": general_converter,
+ }
+ ]
+
+
+class TokenHolderMetricsHistory(HemeraModel):
+ __tablename__ = "af_token_holder_metrics_history"
+
+ holder_address = Column(BYTEA, primary_key=True)
+ token_address = Column(BYTEA, primary_key=True)
+ block_number = Column(BIGINT)
+ block_timestamp = Column(TIMESTAMP, primary_key=True)
+
+ current_balance = Column(NUMERIC)
+ max_balance = Column(NUMERIC)
+ max_balance_timestamp = Column(TIMESTAMP)
+ sell_25_timestamp = Column(TIMESTAMP)
+ sell_50_timestamp = Column(TIMESTAMP)
+
+ total_buy_count = Column(BIGINT)
+ total_buy_amount = Column(NUMERIC)
+ total_buy_usd = Column(NUMERIC)
+
+ total_sell_count = Column(BIGINT)
+ total_sell_amount = Column(NUMERIC)
+ total_sell_usd = Column(NUMERIC)
+
+ swap_buy_count = Column(BIGINT)
+ swap_buy_amount = Column(NUMERIC)
+ swap_buy_usd = Column(NUMERIC)
+
+ swap_sell_count = Column(BIGINT)
+ swap_sell_amount = Column(NUMERIC)
+ swap_sell_usd = Column(NUMERIC)
+
+ last_transfer_timestamp = Column(TIMESTAMP)
+ last_swap_timestamp = Column(TIMESTAMP)
+ last_price = Column(NUMERIC)
+
+ success_sell_count = Column(BIGINT)
+ fail_sell_count = Column(BIGINT)
+
+ current_average_buy_price = Column(NUMERIC)
+
+ realized_pnl = Column(NUMERIC)
+ sell_pnl = Column(NUMERIC)
+ win_rate = Column(NUMERIC)
+ pnl_valid = Column(BOOLEAN)
+
+ first_block_timestamp = Column(TIMESTAMP)
+
+ create_time = Column(TIMESTAMP, server_default=func.now())
+ update_time = Column(TIMESTAMP, server_default=func.now())
+
+ __table_args__ = (PrimaryKeyConstraint("holder_address", "token_address", "block_timestamp"),)
+
+ @staticmethod
+ def model_domain_mapping():
+ return [
+ {
+ "domain": TokenHolderMetricsHistoryD,
+ "conflict_do_update": True,
+ "update_strategy": None,
+ "converter": general_converter,
+ }
+ ]
+
+
+class ERC20TokenTransfersWithPrice(HemeraModel):
+ __tablename__ = "af_erc20_token_transfers_with_price"
+
+ transaction_hash = Column(BYTEA, primary_key=True)
+ log_index = Column(INTEGER, primary_key=True)
+ from_address = Column(BYTEA)
+ to_address = Column(BYTEA)
+ token_address = Column(BYTEA)
+ value = Column(NUMERIC(100))
+ price = Column(NUMERIC)
+ decimals = Column(NUMERIC(100))
+ is_swap = Column(BOOLEAN)
+ from_address_balance = Column(NUMERIC(100))
+ to_address_balance = Column(NUMERIC(100))
+
+ block_number = Column(BIGINT)
+ block_hash = Column(BYTEA, primary_key=True)
+ block_timestamp = Column(TIMESTAMP, primary_key=True)
+
+ create_time = Column(TIMESTAMP, server_default=func.now())
+ update_time = Column(TIMESTAMP, server_default=func.now())
+ reorg = Column(BOOLEAN, server_default=text("false"))
+
+ __table_args__ = (PrimaryKeyConstraint("transaction_hash", "block_hash", "log_index", "block_timestamp"),)
+ __query_order__ = [block_number, log_index]
+
+ @staticmethod
+ def model_domain_mapping():
+ return [
+ {
+ "domain": ERC20TokenTransferWithPriceD,
+ "conflict_do_update": True,
+ "update_strategy": None,
+ "converter": general_converter,
+ }
+ ]
diff --git a/indexer/modules/custom/uniswap_v3/__init__.py b/hemera_udf/token_price/__init__.py
similarity index 100%
rename from indexer/modules/custom/uniswap_v3/__init__.py
rename to hemera_udf/token_price/__init__.py
diff --git a/hemera_udf/token_price/domains.py b/hemera_udf/token_price/domains.py
new file mode 100644
index 000000000..6184bdf5a
--- /dev/null
+++ b/hemera_udf/token_price/domains.py
@@ -0,0 +1,36 @@
+from dataclasses import dataclass
+
+from hemera.indexer.domains import Domain
+
+
+@dataclass
+class BlockTokenPrice(Domain):
+ token_symbol: str
+ token_price: float
+ block_number: int
+
+
+@dataclass
+class DexBlockTokenPrice(Domain):
+ token_address: str
+ token_symbol: str
+ decimals: int
+ amount: float
+ amount_usd: float
+ token_price: float
+
+ block_number: int
+ block_timestamp: int
+
+
+@dataclass
+class DexBlockTokenPriceCurrent(Domain):
+ token_address: str
+ token_symbol: str
+ decimals: int
+ amount: float
+ amount_usd: float
+ token_price: float
+
+ block_number: int
+ block_timestamp: int
diff --git a/hemera_udf/token_price/jobs/__init__.py b/hemera_udf/token_price/jobs/__init__.py
new file mode 100644
index 000000000..a4708498b
--- /dev/null
+++ b/hemera_udf/token_price/jobs/__init__.py
@@ -0,0 +1,6 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time 2024/12/10 17:03
+# @Author will
+# @File __init__.py.py
+# @Brief
diff --git a/hemera_udf/token_price/jobs/export_block_token_price_job.py b/hemera_udf/token_price/jobs/export_block_token_price_job.py
new file mode 100644
index 000000000..c1d6fc0f6
--- /dev/null
+++ b/hemera_udf/token_price/jobs/export_block_token_price_job.py
@@ -0,0 +1,113 @@
+import logging
+from datetime import datetime
+
+from sqlalchemy import text
+
+from hemera.indexer.domains.block import Block
+from hemera.indexer.jobs.base_job import ExtensionJob
+from hemera_udf.token_price.domains import BlockTokenPrice
+
+logger = logging.getLogger(__name__)
+
+
+class ExportBlockTokenPriceJob(ExtensionJob):
+ dependency_types = [Block]
+
+ output_types = [BlockTokenPrice]
+ able_to_reorg = True
+
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+ self._service = kwargs["config"].get("db_service")
+ config = kwargs["config"].get("export_block_token_price_job", {})
+ self.symbols = set(config.values())
+ pass
+
+ @staticmethod
+ def ts_to_date(ts):
+ return datetime.utcfromtimestamp(ts).strftime("%Y-%m-%d %H:%M:%S")
+
+ @staticmethod
+ def date_to_ts(date):
+ return int(date.timestamp())
+
+ def _process(self, **kwargs):
+ if not self.symbols:
+ return
+
+ blocks = self._data_buff[Block.type()]
+ if not blocks:
+ return
+ blocks.sort(key=lambda block: block.number)
+
+ start_block_timestamp = datetime.utcfromtimestamp(blocks[0].timestamp).strftime("%Y-%m-%d %H:%M:%S")
+ end_block_timestamp = datetime.utcfromtimestamp(blocks[-1].timestamp).strftime("%Y-%m-%d %H:%M:%S")
+
+ token_price_results = self.get_token_price(start_block_timestamp, end_block_timestamp)
+
+ for block in blocks:
+ price_map = {}
+
+ for symbol in self.symbols:
+ closest_price = None
+ closest_time_diff = float("inf")
+
+ for price in token_price_results:
+ if price.symbol == symbol:
+ time_diff = abs(price.timestamp.timestamp() - block.timestamp)
+ if time_diff < closest_time_diff:
+ closest_time_diff = time_diff
+ closest_price = float(price.price)
+
+ price_map[symbol] = closest_price
+
+ block_token_price = BlockTokenPrice(
+ token_symbol=symbol, token_price=price_map[symbol], block_number=block.number
+ )
+ self._collect_domain(block_token_price)
+ pass
+
+ def get_token_price(self, start_block_timestamp, end_block_timestamp):
+ session = self._service.Session()
+
+ sql = text(
+ """
+ select *
+ from token_prices
+ where symbol in :symbols
+ and timestamp between :start_block_timestamp and :end_block_timestamp
+ """
+ )
+
+ result = session.execute(
+ sql,
+ {
+ "symbols": tuple(self.symbols), # 将集合转成元组
+ "start_block_timestamp": start_block_timestamp,
+ "end_block_timestamp": end_block_timestamp,
+ },
+ )
+
+ result_fetchall = result.fetchall()
+
+ existing_symbols = {r.symbol for r in result_fetchall}
+ missing_symbols = self.symbols - existing_symbols
+
+ latest_symbol_sql = text(
+ """
+ SELECT *
+ FROM token_prices
+ WHERE symbol = :symbols and timestamp < :end_block_timestamp
+ order by timestamp desc limit 1
+ """
+ )
+
+ for symbol in missing_symbols:
+ latest_symbol_result = session.execute(
+ latest_symbol_sql, {"symbols": symbol, "end_block_timestamp": end_block_timestamp}
+ )
+
+ latest_symbol_result_fetchall = latest_symbol_result.fetchall()
+ result_fetchall.extend(latest_symbol_result_fetchall)
+
+ return result_fetchall
diff --git a/hemera_udf/token_price/jobs/export_dex_block_token_price_job.py b/hemera_udf/token_price/jobs/export_dex_block_token_price_job.py
new file mode 100644
index 000000000..a547cb348
--- /dev/null
+++ b/hemera_udf/token_price/jobs/export_dex_block_token_price_job.py
@@ -0,0 +1,258 @@
+import logging
+
+import pandas as pd
+from sqlalchemy import and_, func, or_
+
+from hemera.common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes
+from hemera.indexer.domains.token_balance import TokenBalance
+from hemera.indexer.jobs.base_job import ExtensionJob
+from hemera.indexer.utils.collection_utils import distinct_collections_by_group
+from hemera_udf.meme_agent.domains.fourmeme import FourMemeTokenTradeD
+from hemera_udf.token_price.domains import DexBlockTokenPrice, DexBlockTokenPriceCurrent
+from hemera_udf.uniswap_v2 import UniswapV2SwapEvent
+from hemera_udf.uniswap_v3 import UniswapV3SwapEvent
+from hemera_udf.uniswap_v4.domains.feature_uniswap_v4 import UniswapV4SwapEvent
+
+logger = logging.getLogger(__name__)
+
+
+class ExportDexBlockTokenPriceJob(ExtensionJob):
+ dependency_types = [UniswapV2SwapEvent, UniswapV3SwapEvent, TokenBalance, UniswapV4SwapEvent, FourMemeTokenTradeD]
+
+ output_types = [DexBlockTokenPrice, DexBlockTokenPriceCurrent]
+ able_to_reorg = True
+
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+
+ config = kwargs["config"].get("export_block_token_price_job", {})
+ self.stable_tokens = config
+
+ self.max_price = 200000
+ self.max_market_cap = 1880666183880
+
+ self.balance_limit_map = {"WETH": 0.001, "ETH": 0.001, "WBNB": 0.01, "BNB": 0.01}
+
+ @staticmethod
+ def dataclass_to_df(dataclass):
+ dataclass_list = [dc.__dict__ for dc in dataclass]
+ df = pd.DataFrame(dataclass_list)
+ return df
+
+ @staticmethod
+ def process_swap_df(df):
+ if df.empty:
+ columns = ["block_number", "block_timestamp", "token_address", "token_price", "amount", "amount_usd"]
+ return pd.DataFrame(columns=columns)
+
+ token0_df = df[
+ ["block_number", "block_timestamp", "token0_address", "token0_price", "amount0", "amount_usd"]
+ ].rename(columns={"token0_address": "token_address", "token0_price": "token_price", "amount0": "amount"})
+ token1_df = df[
+ ["block_number", "block_timestamp", "token1_address", "token1_price", "amount1", "amount_usd"]
+ ].rename(columns={"token1_address": "token_address", "token1_price": "token_price", "amount1": "amount"})
+ return pd.concat([token0_df, token1_df], ignore_index=True)
+
+ @staticmethod
+ def process_fourmeme_df(df):
+ if df.empty:
+ columns = ["block_number", "block_timestamp", "token_address", "token_price", "amount", "amount_usd"]
+ return pd.DataFrame(columns=columns)
+
+ # Convert token to token_address for consistency with other sources
+ result_df = df[["block_number", "block_timestamp", "token", "price_usd", "amount"]].rename(
+ columns={"token": "token_address", "price_usd": "token_price"}
+ )
+
+ # Calculate amount_usd as amount * token_price
+ # Normalize amount based on decimals if needed
+ result_df["amount_usd"] = result_df["amount"] * result_df["token_price"] / 10**18
+
+ return result_df
+
+ @staticmethod
+ def extract_current_status(records, current_status_domain, keys):
+ results = []
+ last_records = distinct_collections_by_group(collections=records, group_by=keys, max_key="block_number")
+ for last_record in last_records:
+ record = current_status_domain(**vars(last_record))
+ results.append(record)
+ return results
+
+ def process_token(self, df, token_prefix):
+ # 获取列名
+ address_col = f"{token_prefix}_address"
+ price_col = f"{token_prefix}_price"
+ dict_col = f"{token_prefix}_address_dict"
+ decimals_col = f"{token_prefix}_decimals"
+ supply_col = f"{token_prefix}_total_supply"
+ # symbol_col = f'{token_prefix}_symbol'
+
+ market_cap_col = "market_cap"
+
+ # 提取 token 信息
+ df[dict_col] = df[address_col].map(self.tokens)
+ df[decimals_col] = df[dict_col].map(lambda x: x.get("decimals"))
+ df[supply_col] = df[dict_col].map(lambda x: x.get("total_supply"))
+ # df[symbol_col] = df[dict_col].map(lambda x: x.get('symbol'))
+
+ # 计算市值
+ df[market_cap_col] = df[price_col] * df[supply_col] / 10 ** df[decimals_col]
+ return df[df[market_cap_col] < self.max_market_cap]
+
+ def _process(self, **kwargs):
+ token_balance_dict = {
+ (tt.token_address, tt.address, tt.block_number): tt.balance
+ for tt in self._data_buff[TokenBalance.type()]
+ if tt.token_address in self.stable_tokens
+ }
+
+ uniswapv2_df_ = self.dataclass_to_df(self._data_buff[UniswapV2SwapEvent.type()])
+ if uniswapv2_df_.empty:
+ uniswapv2_df = uniswapv2_df_
+ else:
+ uniswapv2_df = self.process_uniswap_data(
+ uniswapv2_df_, token_balance_dict, self.stable_tokens, self.max_price, self.process_token, True
+ )
+ uniswapv3_df_ = self.dataclass_to_df(self._data_buff[UniswapV3SwapEvent.type()])
+ if uniswapv3_df_.empty:
+ uniswapv3_df = uniswapv3_df_
+ else:
+ uniswapv3_df = self.process_uniswap_data(
+ uniswapv3_df_, token_balance_dict, self.stable_tokens, self.max_price, self.process_token
+ )
+
+ # Process FourMeme trade data
+ fourmeme_df_ = self.dataclass_to_df(self._data_buff[FourMemeTokenTradeD.type()])
+ if fourmeme_df_.empty:
+ fourmeme_df = fourmeme_df_
+ else:
+ fourmeme_df = self.process_fourmeme_df(fourmeme_df_)
+
+ uniswapv4_df_ = self.dataclass_to_df(self._data_buff[UniswapV4SwapEvent.type()])
+ if uniswapv4_df_.empty:
+ uniswapv4_df = uniswapv4_df_
+ else:
+ uniswapv4_df = self.process_uniswap_data(
+ uniswapv4_df_,
+ token_balance_dict,
+ self.stable_tokens,
+ self.max_price,
+ self.process_token,
+ skip_balance_check=True,
+ )
+
+ processed_v2 = self.process_swap_df(uniswapv2_df)
+ processed_v3 = self.process_swap_df(uniswapv3_df)
+ processed_v4 = self.process_swap_df(uniswapv4_df)
+
+ # Combine all data sources
+ combined_df = pd.concat([processed_v2, processed_v3, processed_v4, fourmeme_df], ignore_index=True)
+
+ df_results = (
+ combined_df.groupby(["token_address", "block_number", "block_timestamp"])
+ .agg(
+ token_price=("token_price", "median"),
+ amount=("amount", lambda x: x.abs().sum()),
+ amount_usd=("amount_usd", "sum"),
+ )
+ .reset_index()
+ )
+
+ records = df_results.to_dict("records")
+
+ dex_block_token_price_list = []
+ for record in records:
+
+ # todo: improve
+ token_dict = self.tokens.get(record.get("token_address"), {})
+ token_symbol = token_dict.get("symbol")
+ if not token_symbol:
+ continue
+
+ decimals = token_dict.get("decimals")
+ record["amount"] = record.get("amount") / 10**decimals
+
+ dex_block_token_price = DexBlockTokenPrice(**record, token_symbol=token_symbol, decimals=decimals)
+
+ dex_block_token_price_list.append(dex_block_token_price)
+
+ self._collect_domains(dex_block_token_price_list)
+
+ current_results = self.extract_current_status(
+ dex_block_token_price_list, DexBlockTokenPriceCurrent, ["token_address"]
+ )
+ self._collect_domains(current_results)
+ pass
+
+ def process_uniswap_data(
+ self, df, token_balance_dict, stable_tokens, max_price, process_token_fn, skip_balance_check=False
+ ):
+ df = df.dropna(subset=["token0_price"])
+ df = df[df["token0_price"] < max_price]
+ df = df[df["token1_price"] < max_price]
+
+ df = process_token_fn(df, "token0")
+ df = process_token_fn(df, "token1")
+
+ df["stable_token_address_position"] = df.apply(lambda x: 0 if x.token0_address in stable_tokens else 1, axis=1)
+
+ df["stable_token_symbol"] = df.apply(
+ lambda x: stable_tokens.get(x.token0_address) or stable_tokens.get(x.token1_address), axis=1
+ )
+
+ df["stable_token_balance_limit"] = df["stable_token_symbol"].map(self.balance_limit_map).fillna(10)
+
+ if not skip_balance_check:
+ df["token_balance_raw"] = df.apply(
+ lambda x: token_balance_dict.get((x.token0_address, x.pool_address, x.block_number))
+ or token_balance_dict.get((x.token1_address, x.pool_address, x.block_number)),
+ axis=1,
+ )
+
+ df = df.dropna(subset=["token_balance_raw"])
+
+ df["stable_balance"] = df.apply(
+ lambda x: x.token_balance_raw
+ / 10 ** (x.token1_decimals if x.stable_token_address_position else x.token0_decimals),
+ axis=1,
+ )
+
+ df = df[df["stable_balance"] > df["stable_token_balance_limit"]]
+
+ return df
+
+ def _get_current_holdings(self, tokens, block_number):
+ session = self._service.get_service_session()
+
+ conditions = [
+ and_(
+ DexBlockTokenPrice.token_address == hex_str_to_bytes(token),
+ ).self_group() # need to group for one combination
+ for token in tokens
+ ]
+
+ windowed_block_number = func.row_number().over(
+ partition_by=(DexBlockTokenPrice.token_address,),
+ order_by=DexBlockTokenPrice.block_number.desc(),
+ )
+
+ combined_conditions = or_(*conditions)
+
+ subquery = (
+ session.query(DexBlockTokenPrice, windowed_block_number.label("row_number"))
+ .filter(combined_conditions, DexBlockTokenPrice.block_number < block_number)
+ .subquery()
+ )
+
+ query = session.query(subquery).filter(subquery.c.row_number == 1)
+
+ results = query.all()
+
+ pre_prices_dict = {}
+ for record in results:
+ token_address = bytes_to_hex_str(record.token_address)
+ pre_prices_dict[token_address] = record.token_price
+ session.close()
+
+ return pre_prices_dict
diff --git a/hemera_udf/token_price/models.py b/hemera_udf/token_price/models.py
new file mode 100644
index 000000000..aeefee316
--- /dev/null
+++ b/hemera_udf/token_price/models.py
@@ -0,0 +1,69 @@
+from sqlalchemy import Column, PrimaryKeyConstraint, func
+from sqlalchemy.dialects.postgresql import BIGINT, BYTEA, NUMERIC, TIMESTAMP, VARCHAR
+
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.token_price.domains import DexBlockTokenPrice, DexBlockTokenPriceCurrent
+
+
+class AfDexBlockTokenPrice(HemeraModel):
+ __tablename__ = "af_dex_block_token_price"
+
+ token_address = Column(BYTEA, primary_key=True)
+ block_number = Column(BIGINT, primary_key=True)
+
+ token_symbol = Column(VARCHAR)
+ decimals = Column(BIGINT)
+
+ amount = Column(NUMERIC)
+ amount_usd = Column(NUMERIC)
+
+ token_price = Column(NUMERIC)
+ block_timestamp = Column(TIMESTAMP, primary_key=True)
+
+ create_time = Column(TIMESTAMP, server_default=func.now())
+ update_time = Column(TIMESTAMP, server_default=func.now())
+
+ __table_args__ = (PrimaryKeyConstraint("token_address", "block_number", "block_timestamp"),)
+
+ @staticmethod
+ def model_domain_mapping():
+ return [
+ {
+ "domain": DexBlockTokenPrice,
+ "conflict_do_update": True,
+ "update_strategy": None,
+ "converter": general_converter,
+ }
+ ]
+
+
+class AfDexBlockTokenPriceCurrent(HemeraModel):
+ __tablename__ = "af_dex_block_token_price_current"
+
+ token_address = Column(BYTEA, primary_key=True)
+ block_number = Column(BIGINT)
+
+ token_symbol = Column(VARCHAR)
+ decimals = Column(BIGINT)
+
+ amount = Column(NUMERIC)
+ amount_usd = Column(NUMERIC)
+
+ token_price = Column(NUMERIC)
+ block_timestamp = Column(TIMESTAMP)
+
+ create_time = Column(TIMESTAMP, server_default=func.now())
+ update_time = Column(TIMESTAMP, server_default=func.now())
+
+ __table_args__ = (PrimaryKeyConstraint("token_address"),)
+
+ @staticmethod
+ def model_domain_mapping():
+ return [
+ {
+ "domain": DexBlockTokenPriceCurrent,
+ "conflict_do_update": True,
+ "update_strategy": "EXCLUDED.block_number > af_dex_block_token_price_current.block_number",
+ "converter": general_converter,
+ }
+ ]
diff --git a/hemera_udf/total_supply/__init__.py b/hemera_udf/total_supply/__init__.py
new file mode 100644
index 000000000..4632ff849
--- /dev/null
+++ b/hemera_udf/total_supply/__init__.py
@@ -0,0 +1,12 @@
+from __future__ import annotations
+
+import packaging.version
+
+from hemera import __version__ as hemera_version
+
+__all__ = ["__version__"]
+
+__version__ = "0.1.0"
+
+if packaging.version.parse(packaging.version.parse(hemera_version).base_version) < packaging.version.parse("1.0.0"):
+ raise RuntimeError(f"The package `hemera-modules-xxx:{__version__}` needs Hemera 1.0.0+")
diff --git a/indexer/modules/custom/total_supply/domain/erc20_total_supply.py b/hemera_udf/total_supply/domains.py
similarity index 61%
rename from indexer/modules/custom/total_supply/domain/erc20_total_supply.py
rename to hemera_udf/total_supply/domains.py
index 4c5be8896..359c7724b 100644
--- a/indexer/modules/custom/total_supply/domain/erc20_total_supply.py
+++ b/hemera_udf/total_supply/domains.py
@@ -1,11 +1,10 @@
from dataclasses import dataclass
-from typing import Optional
-from indexer.domain import Domain, FilterData
+from hemera.indexer.domains import Domain
@dataclass
-class Erc20TotalSupply(FilterData):
+class Erc20TotalSupply(Domain):
token_address: str
total_supply: int
block_number: int
@@ -13,7 +12,7 @@ class Erc20TotalSupply(FilterData):
@dataclass
-class Erc20CurrentTotalSupply(FilterData):
+class Erc20CurrentTotalSupply(Domain):
token_address: str
total_supply: int
block_number: int
diff --git a/hemera_udf/total_supply/jobs/__init__.py b/hemera_udf/total_supply/jobs/__init__.py
new file mode 100644
index 000000000..a4708498b
--- /dev/null
+++ b/hemera_udf/total_supply/jobs/__init__.py
@@ -0,0 +1,6 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time 2024/12/10 17:03
+# @Author will
+# @File __init__.py.py
+# @Brief
diff --git a/indexer/modules/custom/total_supply/export_erc20_total_supply_job.py b/hemera_udf/total_supply/jobs/export_erc20_total_supply_job.py
similarity index 55%
rename from indexer/modules/custom/total_supply/export_erc20_total_supply_job.py
rename to hemera_udf/total_supply/jobs/export_erc20_total_supply_job.py
index 88d729e9c..c2a59a5ce 100644
--- a/indexer/modules/custom/total_supply/export_erc20_total_supply_job.py
+++ b/hemera_udf/total_supply/jobs/export_erc20_total_supply_job.py
@@ -1,12 +1,12 @@
import logging
-from indexer.domain.token_transfer import ERC20TokenTransfer
-from indexer.jobs import FilterTransactionDataJob
-from indexer.modules.custom.total_supply.domain.erc20_total_supply import Erc20CurrentTotalSupply, Erc20TotalSupply
-from indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
-from indexer.utils.abi_setting import TOKEN_TOTAL_SUPPLY_FUNCTION
-from indexer.utils.multicall_hemera import Call
-from indexer.utils.multicall_hemera.multi_call_helper import MultiCallHelper
+from hemera.indexer.domains.token_transfer import ERC20TokenTransfer
+from hemera.indexer.jobs import FilterTransactionDataJob
+from hemera.indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera.indexer.utils.abi_setting import TOKEN_TOTAL_SUPPLY_FUNCTION
+from hemera.indexer.utils.multicall_hemera import Call
+from hemera.indexer.utils.multicall_hemera.multi_call_helper import MultiCallHelper
+from hemera_udf.total_supply.domains import Erc20CurrentTotalSupply, Erc20TotalSupply
logger = logging.getLogger(__name__)
@@ -34,7 +34,8 @@ def get_filter(self):
)
def _process(self, **kwargs):
- token_transfers = self._data_buff[ERC20TokenTransfer.type()]
+ erc20_token_transfers = self._data_buff[ERC20TokenTransfer.type()]
+ token_transfers = [tt for tt in erc20_token_transfers if tt.token_address in self.token_address_list]
call_dict = {}
for token_transfer in token_transfers:
token_address = token_transfer.token_address
@@ -57,17 +58,18 @@ def _process(self, **kwargs):
call_list.sort(key=lambda call: call.block_number)
for call in call_list:
- total_supply = call.returns.get("totalSupply")
-
- token_address = call.target.lower()
- erc_total_supply = Erc20TotalSupply(
- token_address=token_address,
- total_supply=total_supply,
- block_number=call.block_number,
- block_timestamp=call.user_defined_k,
- )
-
- current_dict[token_address] = Erc20CurrentTotalSupply(**vars(erc_total_supply))
- records.append(erc_total_supply)
+ if call.returns:
+ total_supply = call.returns.get("totalSupply")
+
+ token_address = call.target.lower()
+ erc_total_supply = Erc20TotalSupply(
+ token_address=token_address,
+ total_supply=total_supply,
+ block_number=call.block_number,
+ block_timestamp=call.user_defined_k,
+ )
+
+ current_dict[token_address] = Erc20CurrentTotalSupply(**vars(erc_total_supply))
+ records.append(erc_total_supply)
self._collect_domains(records)
self._collect_domains(current_dict.values())
diff --git a/indexer/modules/custom/uniswap_v3/domains/__init__.py b/hemera_udf/total_supply/models/__init__.py
similarity index 100%
rename from indexer/modules/custom/uniswap_v3/domains/__init__.py
rename to hemera_udf/total_supply/models/__init__.py
diff --git a/indexer/modules/custom/total_supply/models/af_erc20_total_supply_current.py b/hemera_udf/total_supply/models/af_erc20_total_supply_current.py
similarity index 69%
rename from indexer/modules/custom/total_supply/models/af_erc20_total_supply_current.py
rename to hemera_udf/total_supply/models/af_erc20_total_supply_current.py
index 6584f7bbd..ad1b515c2 100644
--- a/indexer/modules/custom/total_supply/models/af_erc20_total_supply_current.py
+++ b/hemera_udf/total_supply/models/af_erc20_total_supply_current.py
@@ -1,11 +1,9 @@
-from datetime import datetime
+from sqlalchemy import Column, func
+from sqlalchemy.dialects.postgresql import BIGINT, BYTEA, NUMERIC, TIMESTAMP
-from sqlalchemy import Column, Index, PrimaryKeyConstraint, desc, func
-from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, NUMERIC, TIMESTAMP
-
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.total_supply.domain.erc20_total_supply import Erc20CurrentTotalSupply
-from indexer.modules.custom.uniswap_v2.domain.feature_uniswap_v2 import UniswapV2Erc20CurrentTotalSupply
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.total_supply.domains import Erc20CurrentTotalSupply
+from hemera_udf.uniswap_v2.domains import UniswapV2Erc20CurrentTotalSupply
class AfErc20TotalSupplyCurrent(HemeraModel):
diff --git a/indexer/modules/custom/total_supply/models/af_erc20_total_supply_hist.py b/hemera_udf/total_supply/models/af_erc20_total_supply_hist.py
similarity index 76%
rename from indexer/modules/custom/total_supply/models/af_erc20_total_supply_hist.py
rename to hemera_udf/total_supply/models/af_erc20_total_supply_hist.py
index 28e558a32..21815b70a 100644
--- a/indexer/modules/custom/total_supply/models/af_erc20_total_supply_hist.py
+++ b/hemera_udf/total_supply/models/af_erc20_total_supply_hist.py
@@ -1,11 +1,9 @@
-from datetime import datetime
-
-from sqlalchemy import Column, Index, PrimaryKeyConstraint, desc, func
+from sqlalchemy import Column, PrimaryKeyConstraint, func
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, NUMERIC, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.total_supply.domain.erc20_total_supply import Erc20TotalSupply
-from indexer.modules.custom.uniswap_v2.domain.feature_uniswap_v2 import UniswapV2Erc20TotalSupply
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.total_supply.domains import Erc20TotalSupply
+from hemera_udf.uniswap_v2.domains import UniswapV2Erc20TotalSupply
class AfErc20TotalSupplyHist(HemeraModel):
diff --git a/hemera_udf/uniswap_v2/__init__.py b/hemera_udf/uniswap_v2/__init__.py
new file mode 100644
index 000000000..b44bb82e9
--- /dev/null
+++ b/hemera_udf/uniswap_v2/__init__.py
@@ -0,0 +1,27 @@
+from __future__ import annotations
+
+import packaging.version
+
+from hemera import __version__ as hemera_version
+
+__all__ = ["__version__"]
+
+__version__ = "0.1.0"
+
+from hemera.common.enumeration.entity_type import DynamicEntityTypeRegistry
+from hemera_udf.uniswap_v2.domains import *
+
+if packaging.version.parse(packaging.version.parse(hemera_version).base_version) < packaging.version.parse("1.0.0"):
+ raise RuntimeError(f"The package `hemera-modules-xxx:{__version__}` needs Hemera 1.0.0+")
+
+value = DynamicEntityTypeRegistry.register("UNISWAP_V2")
+DynamicEntityTypeRegistry.register_output_types(
+ value,
+ {
+ UniswapV2Pool,
+ UniswapV2SwapEvent,
+ UniswapV2PoolFromSwapEvent,
+ UniswapV2Erc20TotalSupply,
+ UniswapV2Erc20CurrentTotalSupply,
+ },
+)
diff --git a/indexer/modules/custom/uniswap_v3/models/__init__.py b/hemera_udf/uniswap_v2/abi/__init__.py
similarity index 100%
rename from indexer/modules/custom/uniswap_v3/models/__init__.py
rename to hemera_udf/uniswap_v2/abi/__init__.py
diff --git a/indexer/modules/custom/uniswap_v2/aerodrome_abi.py b/hemera_udf/uniswap_v2/abi/aerodromev2_abi.py
similarity index 63%
rename from indexer/modules/custom/uniswap_v2/aerodrome_abi.py
rename to hemera_udf/uniswap_v2/abi/aerodromev2_abi.py
index e307c62fe..9fce80069 100644
--- a/indexer/modules/custom/uniswap_v2/aerodrome_abi.py
+++ b/hemera_udf/uniswap_v2/abi/aerodromev2_abi.py
@@ -1,4 +1,4 @@
-from common.utils.abi_code_utils import Event
+from hemera.common.utils.abi_code_utils import Event, Function
POOL_CREATED_EVENT = Event(
{
@@ -30,3 +30,32 @@
"type": "event",
}
)
+
+TOKEN0_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "token0",
+ "outputs": [{"internalType": "address", "name": "", "type": "address"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+TOKEN1_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "token1",
+ "outputs": [{"internalType": "address", "name": "", "type": "address"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+
+FACTORY_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "factory",
+ "outputs": [{"internalType": "address", "name": "", "type": "address"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
diff --git a/indexer/modules/custom/uniswap_v2/uniswapv2_abi.py b/hemera_udf/uniswap_v2/abi/uniswapv2_abi.py
similarity index 57%
rename from indexer/modules/custom/uniswap_v2/uniswapv2_abi.py
rename to hemera_udf/uniswap_v2/abi/uniswapv2_abi.py
index 5f88fdf1a..15e7a12e1 100644
--- a/indexer/modules/custom/uniswap_v2/uniswapv2_abi.py
+++ b/hemera_udf/uniswap_v2/abi/uniswapv2_abi.py
@@ -1,4 +1,4 @@
-from common.utils.abi_code_utils import Event
+from hemera.common.utils.abi_code_utils import Event, Function
PAIR_CREATED_EVENT = Event(
{
@@ -29,3 +29,37 @@
"type": "event",
}
)
+
+TOKEN0_FUNCTION = Function(
+ {
+ "constant": True,
+ "inputs": [],
+ "name": "token0",
+ "outputs": [{"internalType": "address", "name": "", "type": "address"}],
+ "payable": False,
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+TOKEN1_FUNCTION = Function(
+ {
+ "constant": True,
+ "inputs": [],
+ "name": "token1",
+ "outputs": [{"internalType": "address", "name": "", "type": "address"}],
+ "payable": False,
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+FACTORY_FUNCTION = Function(
+ {
+ "constant": True,
+ "inputs": [],
+ "name": "factory",
+ "outputs": [{"internalType": "address", "name": "", "type": "address"}],
+ "payable": False,
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
diff --git a/indexer/modules/custom/uniswap_v2/domain/feature_uniswap_v2.py b/hemera_udf/uniswap_v2/domains.py
similarity index 58%
rename from indexer/modules/custom/uniswap_v2/domain/feature_uniswap_v2.py
rename to hemera_udf/uniswap_v2/domains.py
index 24358e715..ee1176de4 100644
--- a/indexer/modules/custom/uniswap_v2/domain/feature_uniswap_v2.py
+++ b/hemera_udf/uniswap_v2/domains.py
@@ -1,10 +1,10 @@
from dataclasses import dataclass
-from indexer.domain import FilterData
+from hemera.indexer.domains import Domain
@dataclass
-class UniswapV2Pool(FilterData):
+class UniswapV2Pool(Domain):
factory_address: str
pool_address: str
token0_address: str
@@ -15,22 +15,33 @@ class UniswapV2Pool(FilterData):
@dataclass
-class UniswapV2SwapEvent(FilterData):
+class UniswapV2SwapEvent(Domain):
sender: str
amount0_in: int
amount1_in: int
amount0_out: int
amount1_out: int
- log_index: int
+
to_address: str
pool_address: str
+ token0_address: str
+ token1_address: str
+ # calculate
+ amount0: int
+ amount1: int
+ token0_price: float
+ token1_price: float
+ amount_usd: float
+
block_number: int
block_timestamp: int
transaction_hash: str
+ log_index: int
+ transaction_from_address: str
@dataclass
-class UniswapV2Erc20TotalSupply(FilterData):
+class UniswapV2Erc20TotalSupply(Domain):
token_address: str
total_supply: int
block_number: int
@@ -38,8 +49,13 @@ class UniswapV2Erc20TotalSupply(FilterData):
@dataclass
-class UniswapV2Erc20CurrentTotalSupply(FilterData):
+class UniswapV2Erc20CurrentTotalSupply(Domain):
token_address: str
total_supply: int
block_number: int
block_timestamp: int
+
+
+@dataclass
+class UniswapV2PoolFromSwapEvent(UniswapV2Pool):
+ pass
diff --git a/hemera_udf/uniswap_v2/jobs/__init__.py b/hemera_udf/uniswap_v2/jobs/__init__.py
new file mode 100644
index 000000000..c3e1a2eaf
--- /dev/null
+++ b/hemera_udf/uniswap_v2/jobs/__init__.py
@@ -0,0 +1,6 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time 2024/12/10 17:09
+# @Author will
+# @File __init__.py.py
+# @Brief
diff --git a/indexer/modules/custom/uniswap_v2/uniswapv2_pools.py b/hemera_udf/uniswap_v2/jobs/uniswapv2_pools_job.py
similarity index 74%
rename from indexer/modules/custom/uniswap_v2/uniswapv2_pools.py
rename to hemera_udf/uniswap_v2/jobs/uniswapv2_pools_job.py
index 3db6604f0..d03eab42e 100644
--- a/indexer/modules/custom/uniswap_v2/uniswapv2_pools.py
+++ b/hemera_udf/uniswap_v2/jobs/uniswapv2_pools_job.py
@@ -1,11 +1,11 @@
import logging
-from indexer.domain.log import Log
-from indexer.jobs import FilterTransactionDataJob
-from indexer.modules.custom.uniswap_v2.aerodrome_abi import POOL_CREATED_EVENT
-from indexer.modules.custom.uniswap_v2.domain.feature_uniswap_v2 import UniswapV2Pool
-from indexer.modules.custom.uniswap_v2.uniswapv2_abi import PAIR_CREATED_EVENT
-from indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera.indexer.domains.log import Log
+from hemera.indexer.jobs import FilterTransactionDataJob
+from hemera.indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera_udf.uniswap_v2.abi.aerodromev2_abi import POOL_CREATED_EVENT
+from hemera_udf.uniswap_v2.abi.uniswapv2_abi import PAIR_CREATED_EVENT
+from hemera_udf.uniswap_v2.domains import UniswapV2Pool
logger = logging.getLogger(__name__)
@@ -26,11 +26,17 @@ def get_filter(self):
def _process(self, **kwargs):
logs = self._data_buff[Log.type()]
+
+ pools_dict = {}
+
for log in logs:
pool = None
if log.topic0 == PAIR_CREATED_EVENT.get_signature():
decoded_dict = PAIR_CREATED_EVENT.decode_log(log)
+ if not decoded_dict:
+ continue
+
pool = UniswapV2Pool(
factory_address=log.address,
pool_address=decoded_dict["pair"],
@@ -53,4 +59,6 @@ def _process(self, **kwargs):
)
if pool:
- self._collect_domain(pool)
+ pools_dict[pool.pool_address] = pool
+
+ self._collect_domains(pools_dict.values())
diff --git a/hemera_udf/uniswap_v2/jobs/uniswapv2_swap_event_job.py b/hemera_udf/uniswap_v2/jobs/uniswapv2_swap_event_job.py
new file mode 100644
index 000000000..05415b1b3
--- /dev/null
+++ b/hemera_udf/uniswap_v2/jobs/uniswapv2_swap_event_job.py
@@ -0,0 +1,210 @@
+import logging
+
+from hemera.common.utils.format_utils import bytes_to_hex_str
+from hemera.indexer.domains.log import Log
+from hemera.indexer.domains.transaction import Transaction
+from hemera.indexer.jobs import FilterTransactionDataJob
+from hemera.indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera.indexer.utils.multicall_hemera import Call
+from hemera.indexer.utils.multicall_hemera.multi_call_helper import MultiCallHelper
+from hemera_udf.token_price.domains import BlockTokenPrice
+from hemera_udf.uniswap_v2.abi import aerodromev2_abi, uniswapv2_abi
+from hemera_udf.uniswap_v2.abi.aerodromev2_abi import SWAP_EVENT as AERODROME_SWAP_EVENT
+from hemera_udf.uniswap_v2.abi.uniswapv2_abi import SWAP_EVENT as UNISWAPV2_SWAP_EVENT
+from hemera_udf.uniswap_v2.domains import UniswapV2PoolFromSwapEvent, UniswapV2SwapEvent
+from hemera_udf.uniswap_v2.models.feature_uniswap_v2_pools import UniswapV2Pools
+
+logger = logging.getLogger(__name__)
+
+
+class ExportUniSwapV2SwapEventJob(FilterTransactionDataJob):
+ dependency_types = [Transaction, BlockTokenPrice]
+ output_types = [UniswapV2SwapEvent, UniswapV2PoolFromSwapEvent]
+
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+
+ stable_tokens_config = kwargs["config"].get("export_block_token_price_job", {})
+
+ self.stable_tokens = stable_tokens_config
+ self.pools_requested_by_rpc = set()
+ self.multi_call_helper = MultiCallHelper(self._web3, kwargs, logger)
+ self._existing_pools = self.get_existing_pools()
+
+ def get_filter(self):
+ return TransactionFilterByLogs(
+ [
+ TopicSpecification(topics=[UNISWAPV2_SWAP_EVENT.get_signature(), AERODROME_SWAP_EVENT.get_signature()]),
+ ]
+ )
+
+ def change_block_token_prices_to_dict(self):
+ symbol_address_dict = {symbol: address for address, symbol in self.stable_tokens.items()}
+
+ token_prices_dict = {}
+
+ block_token_prices = self._data_buff[BlockTokenPrice.type()]
+ for token_price in block_token_prices:
+ address = symbol_address_dict.get(token_price.token_symbol)
+ if address:
+ block_number = token_price.block_number
+ token_prices_dict[address, block_number] = token_price.token_price
+
+ return token_prices_dict
+
+ def get_missing_pools_by_rpc(self):
+ # pool_logs
+ missing_pool_address_dict = {}
+
+ transactions = self._data_buff["transaction"]
+
+ for transaction in transactions:
+ logs = transaction.receipt.logs
+ for log in logs:
+ abi_module = None
+ if log.topic0 == UNISWAPV2_SWAP_EVENT.get_signature() and log.address not in self._existing_pools:
+ if log.address not in self.pools_requested_by_rpc:
+ abi_module = uniswapv2_abi
+ self.pools_requested_by_rpc.add(log.address)
+ elif log.topic0 == AERODROME_SWAP_EVENT.get_signature() and log.address not in self._existing_pools:
+ if log.address not in self.pools_requested_by_rpc:
+ abi_module = aerodromev2_abi
+ self.pools_requested_by_rpc.add(log.address)
+
+ if abi_module:
+ call_dict = {
+ "abi_module": abi_module,
+ "target": log.address,
+ "block_number": log.block_number,
+ "user_defined_k": log.block_timestamp,
+ }
+ missing_pool_address_dict[log.address] = call_dict
+
+ factory_list = []
+ token0_list = []
+ token1_list = []
+
+ for call_dict in missing_pool_address_dict.values():
+ abi_module = call_dict.pop("abi_module")
+ factory_list.append(Call(function_abi=abi_module.FACTORY_FUNCTION, **call_dict))
+ token0_list.append(Call(function_abi=abi_module.TOKEN0_FUNCTION, **call_dict))
+ token1_list.append(Call(function_abi=abi_module.TOKEN1_FUNCTION, **call_dict))
+
+ self.multi_call_helper.execute_calls(factory_list)
+ self.multi_call_helper.execute_calls(token0_list)
+ self.multi_call_helper.execute_calls(token1_list)
+
+ for factory_call, token0_call, token1_call in zip(factory_list, token0_list, token1_list):
+ factory_address = factory_call.returns.get("") if factory_call.returns else None
+ token0 = token0_call.returns.get("") if token0_call.returns else None
+ token1 = token1_call.returns.get("") if token1_call.returns else None
+ if factory_address and token0 and token1:
+ pool_address = factory_call.target.lower()
+ uniswap_v_pool_from_swap_event = UniswapV2PoolFromSwapEvent(
+ factory_address=factory_address,
+ pool_address=pool_address,
+ token0_address=token0,
+ token1_address=token1,
+ block_number=factory_call.block_number,
+ block_timestamp=factory_call.user_defined_k,
+ length=-1,
+ )
+
+ self._existing_pools[pool_address] = token0, token1
+ self._collect_domain(uniswap_v_pool_from_swap_event)
+
+ def _process(self, **kwargs):
+ self.get_missing_pools_by_rpc()
+
+ token_prices_dict = self.change_block_token_prices_to_dict()
+
+ transactions = self._data_buff[Transaction.type()]
+ for transaction in transactions:
+ logs = transaction.receipt.logs
+ for log in logs:
+ decoded_dict = None
+ block_number = log.block_number
+ if log.topic0 == UNISWAPV2_SWAP_EVENT.get_signature() and log.address in self._existing_pools:
+ decoded_dict = UNISWAPV2_SWAP_EVENT.decode_log(log)
+ elif log.topic0 == AERODROME_SWAP_EVENT.get_signature() and log.address in self._existing_pools:
+ decoded_dict = AERODROME_SWAP_EVENT.decode_log(log)
+
+ if decoded_dict:
+ amount0_in = decoded_dict["amount0In"]
+ amount1_in = decoded_dict["amount1In"]
+ amount0_out = decoded_dict["amount0Out"]
+ amount1_out = decoded_dict["amount1Out"]
+
+ amount0 = amount0_in - amount0_out
+ amount1 = amount1_in - amount1_out
+
+ amount0_abs = abs(amount0)
+ amount1_abs = abs(amount1)
+
+ token0_address, token1_address = self._existing_pools[log.address]
+
+ tokens0 = self.tokens.get(token0_address)
+ tokens1 = self.tokens.get(token1_address)
+
+ decimals0 = tokens0.get("decimals") if tokens0 else None
+ decimals1 = tokens1.get("decimals") if tokens1 else None
+
+ decimals_conditions = decimals0 and decimals1
+
+ if token0_address in self.stable_tokens and decimals_conditions:
+ token0_price = token_prices_dict.get((token0_address, block_number))
+ amount_usd = amount0_abs / 10**decimals0 * token0_price
+ token1_price = amount_usd / (amount1_abs / 10**decimals1) if amount1_abs > 0 else None
+
+ elif token1_address in self.stable_tokens and decimals_conditions:
+ token1_price = token_prices_dict.get((token1_address, block_number))
+ amount_usd = amount1_abs / 10**decimals1 * token1_price
+ token0_price = amount_usd / (amount0_abs / 10**decimals0) if amount0_abs > 0 else None
+ else:
+ token0_price = None
+ token1_price = None
+ amount_usd = None
+
+ swap_event = UniswapV2SwapEvent(
+ pool_address=log.address,
+ sender=decoded_dict["sender"],
+ to_address=decoded_dict["to"],
+ amount0_in=amount0_in,
+ amount1_in=amount1_in,
+ amount0_out=amount0_out,
+ amount1_out=amount1_out,
+ block_number=block_number,
+ block_timestamp=log.block_timestamp,
+ transaction_hash=log.transaction_hash,
+ transaction_from_address=transaction.from_address,
+ log_index=log.log_index,
+ token0_price=token0_price,
+ token1_price=token1_price,
+ amount_usd=amount_usd,
+ amount0=amount0,
+ amount1=amount1,
+ token0_address=token0_address,
+ token1_address=token1_address,
+ )
+
+ self._collect_domain(swap_event)
+ pass
+
+ def get_existing_pools(self):
+ session = self._service.Session()
+ try:
+ existing_pools = {}
+
+ pools_orm = session.query(UniswapV2Pools).all()
+ for pool in pools_orm:
+ existing_pools[bytes_to_hex_str(pool.pool_address)] = bytes_to_hex_str(
+ pool.token0_address
+ ), bytes_to_hex_str(pool.token1_address)
+
+ except Exception as e:
+ print(e)
+ raise e
+ finally:
+ session.close()
+
+ return existing_pools
diff --git a/hemera_udf/uniswap_v2/jobs/uniswapv2_total_supply_job.py b/hemera_udf/uniswap_v2/jobs/uniswapv2_total_supply_job.py
new file mode 100644
index 000000000..115844ec9
--- /dev/null
+++ b/hemera_udf/uniswap_v2/jobs/uniswapv2_total_supply_job.py
@@ -0,0 +1,90 @@
+import logging
+
+from hemera.common.utils.format_utils import bytes_to_hex_str
+from hemera.indexer.domains.token_transfer import ERC20TokenTransfer
+from hemera.indexer.jobs.base_job import ExtensionJob
+from hemera.indexer.utils.abi_setting import TOKEN_TOTAL_SUPPLY_FUNCTION
+from hemera.indexer.utils.multicall_hemera import Call
+from hemera.indexer.utils.multicall_hemera.multi_call_helper import MultiCallHelper
+from hemera_udf.uniswap_v2.domains import UniswapV2Erc20CurrentTotalSupply, UniswapV2Erc20TotalSupply, UniswapV2Pool
+from hemera_udf.uniswap_v2.models.feature_uniswap_v2_pools import UniswapV2Pools
+
+logger = logging.getLogger(__name__)
+
+
+class ExportUniswapV2TotalSupplyJob(ExtensionJob):
+ dependency_types = [ERC20TokenTransfer, UniswapV2Pool]
+
+ output_types = [UniswapV2Erc20TotalSupply, UniswapV2Erc20CurrentTotalSupply]
+ able_to_reorg = True
+
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+ self.multi_call_helper = MultiCallHelper(self._web3, kwargs, logger)
+ self._existing_pools = self.get_existing_pools()
+
+ def _process(self, **kwargs):
+ uniswap_v2_pools = self._data_buff[UniswapV2Pool.type()]
+ for pool in uniswap_v2_pools:
+ self._existing_pools.add(pool.pool_address)
+
+ erc_20_token_transfers = self._data_buff[ERC20TokenTransfer.type()]
+ uniswapv2_pool_token_transfers = [
+ tt for tt in erc_20_token_transfers if tt.token_address in self._existing_pools
+ ]
+
+ call_dict = {}
+ for token_transfer in uniswapv2_pool_token_transfers:
+ token_address = token_transfer.token_address
+ block_number = token_transfer.block_number
+ call = Call(
+ target=token_address,
+ function_abi=TOKEN_TOTAL_SUPPLY_FUNCTION,
+ block_number=block_number,
+ user_defined_k=token_transfer.block_timestamp,
+ )
+ call_dict[token_address, block_number] = call
+
+ call_list = list(call_dict.values())
+
+ self.multi_call_helper.execute_calls(call_list)
+
+ records = []
+ current_dict = {}
+
+ call_list.sort(key=lambda call: call.block_number)
+
+ for call in call_list:
+ returns = call.returns
+ if returns:
+ total_supply = returns.get("totalSupply")
+
+ token_address = call.target.lower()
+ erc_total_supply = UniswapV2Erc20TotalSupply(
+ token_address=token_address,
+ total_supply=total_supply,
+ block_number=call.block_number,
+ block_timestamp=call.user_defined_k,
+ )
+
+ current_dict[token_address] = UniswapV2Erc20CurrentTotalSupply(**vars(erc_total_supply))
+ records.append(erc_total_supply)
+ self._collect_domains(records)
+ self._collect_domains(current_dict.values())
+
+ def get_existing_pools(self):
+ session = self._service.Session()
+ try:
+ existing_pools = set()
+
+ pools_orm = session.query(UniswapV2Pools).all()
+ for pool in pools_orm:
+ existing_pools.add(bytes_to_hex_str(pool.pool_address))
+
+ except Exception as e:
+ print(e)
+ raise e
+ finally:
+ session.close()
+
+ return existing_pools
diff --git a/indexer/modules/custom/uniswap_v3/tests/__init__.py b/hemera_udf/uniswap_v2/models/__init__.py
similarity index 100%
rename from indexer/modules/custom/uniswap_v3/tests/__init__.py
rename to hemera_udf/uniswap_v2/models/__init__.py
diff --git a/indexer/modules/custom/uniswap_v2/models/af_uniswap_v2_swap_event.py b/hemera_udf/uniswap_v2/models/af_uniswap_v2_swap_event.py
similarity index 68%
rename from indexer/modules/custom/uniswap_v2/models/af_uniswap_v2_swap_event.py
rename to hemera_udf/uniswap_v2/models/af_uniswap_v2_swap_event.py
index 3782870b1..2cfe0981a 100644
--- a/indexer/modules/custom/uniswap_v2/models/af_uniswap_v2_swap_event.py
+++ b/hemera_udf/uniswap_v2/models/af_uniswap_v2_swap_event.py
@@ -1,14 +1,15 @@
from sqlalchemy import INTEGER, Column, PrimaryKeyConstraint, func
from sqlalchemy.dialects.postgresql import BIGINT, BYTEA, NUMERIC, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.uniswap_v2.domain.feature_uniswap_v2 import UniswapV2SwapEvent
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.uniswap_v2.domains import UniswapV2SwapEvent
class AfUniswapV2SwapEvent(HemeraModel):
__tablename__ = "af_uniswap_v2_swap_event"
transaction_hash = Column(BYTEA, primary_key=True)
log_index = Column(INTEGER, primary_key=True)
+ transaction_from_address = Column(BYTEA)
pool_address = Column(BYTEA)
sender = Column(BYTEA)
@@ -19,13 +20,22 @@ class AfUniswapV2SwapEvent(HemeraModel):
amount0_out = Column(NUMERIC)
amount1_out = Column(NUMERIC)
+ amount0 = Column(NUMERIC)
+ amount1 = Column(NUMERIC)
+ token0_price = Column(NUMERIC)
+ token1_price = Column(NUMERIC)
+ amount_usd = Column(NUMERIC)
+
block_number = Column(BIGINT)
- block_timestamp = Column(TIMESTAMP)
+ block_timestamp = Column(TIMESTAMP, primary_key=True)
+
+ token0_address = Column(BYTEA)
+ token1_address = Column(BYTEA)
create_time = Column(TIMESTAMP, server_default=func.now())
update_time = Column(TIMESTAMP, server_default=func.now())
- __table_args__ = (PrimaryKeyConstraint("transaction_hash", "log_index"),)
+ __table_args__ = (PrimaryKeyConstraint("transaction_hash", "log_index", "block_timestamp"),)
@staticmethod
def model_domain_mapping():
diff --git a/indexer/modules/custom/uniswap_v2/models/feature_uniswap_v2_pools.py b/hemera_udf/uniswap_v2/models/feature_uniswap_v2_pools.py
similarity index 72%
rename from indexer/modules/custom/uniswap_v2/models/feature_uniswap_v2_pools.py
rename to hemera_udf/uniswap_v2/models/feature_uniswap_v2_pools.py
index 7e6ba5d6c..5096fe7b2 100644
--- a/indexer/modules/custom/uniswap_v2/models/feature_uniswap_v2_pools.py
+++ b/hemera_udf/uniswap_v2/models/feature_uniswap_v2_pools.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, PrimaryKeyConstraint, func
from sqlalchemy.dialects.postgresql import BIGINT, BYTEA, NUMERIC, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.uniswap_v2.domain.feature_uniswap_v2 import UniswapV2Pool
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.uniswap_v2.domains import UniswapV2Pool, UniswapV2PoolFromSwapEvent
class UniswapV2Pools(HemeraModel):
@@ -31,5 +31,11 @@ def model_domain_mapping():
"conflict_do_update": True,
"update_strategy": None,
"converter": general_converter,
- }
+ },
+ {
+ "domain": UniswapV2PoolFromSwapEvent,
+ "conflict_do_update": True,
+ "update_strategy": None,
+ "converter": general_converter,
+ },
]
diff --git a/hemera_udf/uniswap_v3/CHANGELOG.md b/hemera_udf/uniswap_v3/CHANGELOG.md
new file mode 100644
index 000000000..0736013dc
--- /dev/null
+++ b/hemera_udf/uniswap_v3/CHANGELOG.md
@@ -0,0 +1,16 @@
+### Changelog - Version 1.0.0
+
+#### Improvements
+
+- **Optimized Uniswap V3 Logic**
+ Improved processing efficiency and data parsing accuracy, ensuring more reliable indexing and query results.
+
+- **Renamed `factory_address` and Updated Configurations**
+ Renamed `factory_address` and synchronized configuration updates across multiple chains to ensure consistency and scalability.
+
+- **Added Support for Aerodrome Type**
+ Introduced support for the new Aerodrome type, enabling data parsing for additional DeFi platforms.
+
+- **Added New Job Modules**
+ - Introduced `ExportBlockTokenPriceJob` and `ExportDexBlockTokenPriceJob` tasks to support block-level token price export functionality.
+ - Enhanced integration with Uniswap-related tables by adding DEX price fields, improving data analysis flexibility.
\ No newline at end of file
diff --git a/hemera_udf/uniswap_v3/__init__.py b/hemera_udf/uniswap_v3/__init__.py
new file mode 100644
index 000000000..389e359e6
--- /dev/null
+++ b/hemera_udf/uniswap_v3/__init__.py
@@ -0,0 +1,32 @@
+from __future__ import annotations
+
+import packaging.version
+
+from hemera import __version__ as hemera_version
+
+__all__ = ["__version__"]
+
+__version__ = "0.1.0"
+
+from hemera.common.enumeration.entity_type import DynamicEntityTypeRegistry
+from hemera_udf.uniswap_v3.domains.feature_uniswap_v3 import *
+
+if packaging.version.parse(packaging.version.parse(hemera_version).base_version) < packaging.version.parse("1.0.0"):
+ raise RuntimeError(f"The package `hemera-modules-xxx:{__version__}` needs Hemera 1.0.0+")
+
+
+value = DynamicEntityTypeRegistry.register("UNISWAP_V3")
+DynamicEntityTypeRegistry.register_output_types(
+ value,
+ {
+ UniswapV3Pool,
+ UniswapV3PoolPrice,
+ UniswapV3PoolCurrentPrice,
+ UniswapV3SwapEvent,
+ UniswapV3PoolFromSwapEvent,
+ UniswapV3Token,
+ UniswapV3TokenDetail,
+ UniswapV3TokenCurrentStatus,
+ UniswapV3PoolFromToken,
+ },
+)
diff --git a/hemera_udf/uniswap_v3/abi/__init__.py b/hemera_udf/uniswap_v3/abi/__init__.py
new file mode 100644
index 000000000..2003e3420
--- /dev/null
+++ b/hemera_udf/uniswap_v3/abi/__init__.py
@@ -0,0 +1,6 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time 2024/12/10 17:15
+# @Author will
+# @File __init__.py.py
+# @Brief
diff --git a/hemera_udf/uniswap_v3/abi/aerodrome_abi.py b/hemera_udf/uniswap_v3/abi/aerodrome_abi.py
new file mode 100644
index 000000000..b67910691
--- /dev/null
+++ b/hemera_udf/uniswap_v3/abi/aerodrome_abi.py
@@ -0,0 +1,235 @@
+from hemera.common.utils.abi_code_utils import Event, Function
+
+POSITIONS_FUNCTION = Function(
+ {
+ "inputs": [{"internalType": "uint256", "name": "tokenId", "type": "uint256"}],
+ "name": "positions",
+ "outputs": [
+ {"internalType": "uint96", "name": "nonce", "type": "uint96"},
+ {"internalType": "address", "name": "operator", "type": "address"},
+ {"internalType": "address", "name": "token0", "type": "address"},
+ {"internalType": "address", "name": "token1", "type": "address"},
+ {"internalType": "int24", "name": "tickSpacing", "type": "int24"},
+ {"internalType": "int24", "name": "tickLower", "type": "int24"},
+ {"internalType": "int24", "name": "tickUpper", "type": "int24"},
+ {"internalType": "uint128", "name": "liquidity", "type": "uint128"},
+ {"internalType": "uint256", "name": "feeGrowthInside0LastX128", "type": "uint256"},
+ {"internalType": "uint256", "name": "feeGrowthInside1LastX128", "type": "uint256"},
+ {"internalType": "uint128", "name": "tokensOwed0", "type": "uint128"},
+ {"internalType": "uint128", "name": "tokensOwed1", "type": "uint128"},
+ ],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+GET_POOL_FUNCTION = Function(
+ {
+ "inputs": [
+ {"internalType": "address", "name": "", "type": "address"},
+ {"internalType": "address", "name": "", "type": "address"},
+ {"internalType": "uint24", "name": "", "type": "uint24"},
+ ],
+ "name": "getPool",
+ "outputs": [{"internalType": "address", "name": "", "type": "address"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+SLOT0_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "slot0",
+ "outputs": [
+ {"internalType": "uint160", "name": "sqrtPriceX96", "type": "uint160"},
+ {"internalType": "int24", "name": "tick", "type": "int24"},
+ {"internalType": "uint16", "name": "observationIndex", "type": "uint16"},
+ {"internalType": "uint16", "name": "observationCardinality", "type": "uint16"},
+ {"internalType": "uint16", "name": "observationCardinalityNext", "type": "uint16"},
+ {"internalType": "uint8", "name": "feeProtocol", "type": "uint8"},
+ {"internalType": "bool", "name": "unlocked", "type": "bool"},
+ ],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+POOL_CREATED_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "address", "name": "token0", "type": "address"},
+ {"indexed": True, "internalType": "address", "name": "token1", "type": "address"},
+ {"indexed": True, "internalType": "int24", "name": "tickSpacing", "type": "int24"},
+ {"indexed": False, "internalType": "address", "name": "pool", "type": "address"},
+ ],
+ "name": "PoolCreated",
+ "type": "event",
+ }
+)
+SWAP_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "address", "name": "sender", "type": "address"},
+ {"indexed": True, "internalType": "address", "name": "recipient", "type": "address"},
+ {"indexed": False, "internalType": "int256", "name": "amount0", "type": "int256"},
+ {"indexed": False, "internalType": "int256", "name": "amount1", "type": "int256"},
+ {"indexed": False, "internalType": "uint160", "name": "sqrtPriceX96", "type": "uint160"},
+ {"indexed": False, "internalType": "uint128", "name": "liquidity", "type": "uint128"},
+ {"indexed": False, "internalType": "int24", "name": "tick", "type": "int24"},
+ ],
+ "name": "Swap",
+ "type": "event",
+ }
+)
+OWNER_OF_FUNCTION = Function(
+ {
+ "inputs": [{"internalType": "uint256", "name": "tokenId", "type": "uint256"}],
+ "name": "ownerOf",
+ "outputs": [{"internalType": "address", "name": "owner", "type": "address"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+FACTORY_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "factory",
+ "outputs": [{"internalType": "address", "name": "", "type": "address"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+FEE_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "fee",
+ "outputs": [{"internalType": "uint24", "name": "", "type": "uint24"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+TOKEN0_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "token0",
+ "outputs": [{"internalType": "address", "name": "", "type": "address"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+TOKEN1_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "token1",
+ "outputs": [{"internalType": "address", "name": "", "type": "address"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+TICK_SPACING_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "tickSpacing",
+ "outputs": [{"internalType": "int24", "name": "", "type": "int24"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+INCREASE_LIQUIDITY_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "name": "tokenId", "type": "uint256"},
+ {"indexed": False, "name": "liquidity", "type": "uint128"},
+ {"indexed": False, "name": "amount0", "type": "uint256"},
+ {"indexed": False, "name": "amount1", "type": "uint256"},
+ ],
+ "name": "IncreaseLiquidity",
+ "type": "event",
+ }
+)
+BURN_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "address", "name": "owner", "type": "address"},
+ {"indexed": True, "internalType": "int24", "name": "tickLower", "type": "int24"},
+ {"indexed": True, "internalType": "int24", "name": "tickUpper", "type": "int24"},
+ {"indexed": False, "internalType": "uint128", "name": "amount", "type": "uint128"},
+ {"indexed": False, "internalType": "uint256", "name": "amount0", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "amount1", "type": "uint256"},
+ ],
+ "name": "Burn",
+ "type": "event",
+ }
+)
+UPDATE_LIQUIDITY_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "address", "name": "owner", "type": "address"},
+ {"indexed": False, "internalType": "uint256", "name": "arg2", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "tokenId", "type": "uint256"},
+ {"indexed": False, "internalType": "int128", "name": "arg4", "type": "int128"},
+ {"indexed": False, "internalType": "int24", "name": "arg5", "type": "int24"},
+ {"indexed": False, "internalType": "int24", "name": "arg6", "type": "int24"},
+ ],
+ "name": "UpdateLiquidity",
+ "type": "event",
+ }
+)
+DECREASE_LIQUIDITY_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "name": "tokenId", "type": "uint256"},
+ {"indexed": False, "name": "liquidity", "type": "uint128"},
+ {"indexed": False, "name": "amount0", "type": "uint256"},
+ {"indexed": False, "name": "amount1", "type": "uint256"},
+ ],
+ "name": "DecreaseLiquidity",
+ "type": "event",
+ }
+)
+MINT_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": False, "internalType": "address", "name": "sender", "type": "address"},
+ {"indexed": True, "internalType": "address", "name": "owner", "type": "address"},
+ {"indexed": True, "internalType": "int24", "name": "tickLower", "type": "int24"},
+ {"indexed": True, "internalType": "int24", "name": "tickUpper", "type": "int24"},
+ {"indexed": False, "internalType": "uint128", "name": "amount", "type": "uint128"},
+ {"indexed": False, "internalType": "uint256", "name": "amount0", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "amount1", "type": "uint256"},
+ ],
+ "name": "Mint",
+ "type": "event",
+ }
+)
+
+DEPOSIT_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "address", "name": "user", "type": "address"},
+ {"indexed": True, "internalType": "uint256", "name": "tokenId", "type": "uint256"},
+ {"indexed": True, "internalType": "uint128", "name": "liquidityToStake", "type": "uint128"},
+ ],
+ "name": "Deposit",
+ "type": "event",
+ }
+)
+
+WITHDRAW_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "address", "name": "user", "type": "address"},
+ {"indexed": True, "internalType": "uint256", "name": "tokenId", "type": "uint256"},
+ {"indexed": True, "internalType": "uint128", "name": "liquidityToStake", "type": "uint128"},
+ ],
+ "name": "Withdraw",
+ "type": "event",
+ }
+)
diff --git a/indexer/modules/custom/uniswap_v3/agni_abi.py b/hemera_udf/uniswap_v3/abi/agni_abi.py
similarity index 99%
rename from indexer/modules/custom/uniswap_v3/agni_abi.py
rename to hemera_udf/uniswap_v3/abi/agni_abi.py
index 0bb19acc7..8151c4d98 100644
--- a/indexer/modules/custom/uniswap_v3/agni_abi.py
+++ b/hemera_udf/uniswap_v3/abi/agni_abi.py
@@ -1,4 +1,4 @@
-from common.utils.abi_code_utils import Event, Function
+from hemera.common.utils.abi_code_utils import Event, Function
POSITIONS_FUNCTION = Function(
{
diff --git a/indexer/modules/custom/uniswap_v3/swapsicle_abi.py b/hemera_udf/uniswap_v3/abi/swapsicle_abi.py
similarity index 98%
rename from indexer/modules/custom/uniswap_v3/swapsicle_abi.py
rename to hemera_udf/uniswap_v3/abi/swapsicle_abi.py
index b521528d2..a937a39ba 100644
--- a/indexer/modules/custom/uniswap_v3/swapsicle_abi.py
+++ b/hemera_udf/uniswap_v3/abi/swapsicle_abi.py
@@ -1,4 +1,4 @@
-from common.utils.abi_code_utils import Event, Function
+from hemera.common.utils.abi_code_utils import Event, Function
POOL_CREATED_EVENT = Event(
{
diff --git a/hemera_udf/uniswap_v3/abi/teahouse_abi.py b/hemera_udf/uniswap_v3/abi/teahouse_abi.py
new file mode 100644
index 000000000..84a7151b4
--- /dev/null
+++ b/hemera_udf/uniswap_v3/abi/teahouse_abi.py
@@ -0,0 +1,983 @@
+from hemera.common.utils.abi_code_utils import Event, Function
+
+ADD_LIQUIDITY_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "address", "name": "pool", "type": "address"},
+ {"indexed": False, "internalType": "int24", "name": "tickLower", "type": "int24"},
+ {"indexed": False, "internalType": "int24", "name": "tickUpper", "type": "int24"},
+ {"indexed": False, "internalType": "uint128", "name": "liquidity", "type": "uint128"},
+ {"indexed": False, "internalType": "uint256", "name": "amount0", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "amount1", "type": "uint256"},
+ ],
+ "name": "AddLiquidity",
+ "type": "event",
+ }
+)
+ADMIN_CHANGED_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": False, "internalType": "address", "name": "previousAdmin", "type": "address"},
+ {"indexed": False, "internalType": "address", "name": "newAdmin", "type": "address"},
+ ],
+ "name": "AdminChanged",
+ "type": "event",
+ }
+)
+APPROVAL_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "address", "name": "owner", "type": "address"},
+ {"indexed": True, "internalType": "address", "name": "spender", "type": "address"},
+ {"indexed": False, "internalType": "uint256", "name": "value", "type": "uint256"},
+ ],
+ "name": "Approval",
+ "type": "event",
+ }
+)
+BEACON_UPGRADED_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [{"indexed": True, "internalType": "address", "name": "beacon", "type": "address"}],
+ "name": "BeaconUpgraded",
+ "type": "event",
+ }
+)
+COLLECT_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "address", "name": "pool", "type": "address"},
+ {"indexed": False, "internalType": "int24", "name": "tickLower", "type": "int24"},
+ {"indexed": False, "internalType": "int24", "name": "tickUpper", "type": "int24"},
+ {"indexed": False, "internalType": "uint256", "name": "amount0", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "amount1", "type": "uint256"},
+ ],
+ "name": "Collect",
+ "type": "event",
+ }
+)
+COLLECT_SWAP_FEES_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "address", "name": "pool", "type": "address"},
+ {"indexed": False, "internalType": "uint256", "name": "amount0", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "amount1", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "feeAmount0", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "feeAmount1", "type": "uint256"},
+ ],
+ "name": "CollectSwapFees",
+ "type": "event",
+ }
+)
+DEPOSIT_SHARES_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "address", "name": "shareOwner", "type": "address"},
+ {"indexed": False, "internalType": "uint256", "name": "shares", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "amount0", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "amount1", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "feeAmount0", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "feeAmount1", "type": "uint256"},
+ ],
+ "name": "DepositShares",
+ "type": "event",
+ }
+)
+FEE_CONFIG_CHANGED_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "address", "name": "sender", "type": "address"},
+ {"indexed": False, "internalType": "uint256", "name": "timestamp", "type": "uint256"},
+ {
+ "components": [
+ {"internalType": "address", "name": "vault", "type": "address"},
+ {"internalType": "uint24", "name": "entryFee", "type": "uint24"},
+ {"internalType": "uint24", "name": "exitFee", "type": "uint24"},
+ {"internalType": "uint24", "name": "performanceFee", "type": "uint24"},
+ {"internalType": "uint24", "name": "managementFee", "type": "uint24"},
+ ],
+ "indexed": False,
+ "internalType": "struct ITeaVaultV3Pair.FeeConfig",
+ "name": "feeConfig",
+ "type": "tuple",
+ },
+ ],
+ "name": "FeeConfigChanged",
+ "type": "event",
+ }
+)
+INITIALIZED_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [{"indexed": False, "internalType": "uint8", "name": "version", "type": "uint8"}],
+ "name": "Initialized",
+ "type": "event",
+ }
+)
+MANAGEMENT_FEE_COLLECTED_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [{"indexed": False, "internalType": "uint256", "name": "shares", "type": "uint256"}],
+ "name": "ManagementFeeCollected",
+ "type": "event",
+ }
+)
+MANAGER_CHANGED_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "address", "name": "sender", "type": "address"},
+ {"indexed": True, "internalType": "address", "name": "newManager", "type": "address"},
+ ],
+ "name": "ManagerChanged",
+ "type": "event",
+ }
+)
+OWNERSHIP_TRANSFERRED_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "address", "name": "previousOwner", "type": "address"},
+ {"indexed": True, "internalType": "address", "name": "newOwner", "type": "address"},
+ ],
+ "name": "OwnershipTransferred",
+ "type": "event",
+ }
+)
+REMOVE_LIQUIDITY_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "address", "name": "pool", "type": "address"},
+ {"indexed": False, "internalType": "int24", "name": "tickLower", "type": "int24"},
+ {"indexed": False, "internalType": "int24", "name": "tickUpper", "type": "int24"},
+ {"indexed": False, "internalType": "uint128", "name": "liquidity", "type": "uint128"},
+ {"indexed": False, "internalType": "uint256", "name": "amount0", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "amount1", "type": "uint256"},
+ ],
+ "name": "RemoveLiquidity",
+ "type": "event",
+ }
+)
+SWAP_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "bool", "name": "zeroForOne", "type": "bool"},
+ {"indexed": True, "internalType": "bool", "name": "exactInput", "type": "bool"},
+ {"indexed": False, "internalType": "uint256", "name": "amountIn", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "amountOut", "type": "uint256"},
+ ],
+ "name": "Swap",
+ "type": "event",
+ }
+)
+TEA_VAULT_V3_PAIR_CREATED_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [{"indexed": True, "internalType": "address", "name": "teaVaultAddress", "type": "address"}],
+ "name": "TeaVaultV3PairCreated",
+ "type": "event",
+ }
+)
+TRANSFER_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "address", "name": "from", "type": "address"},
+ {"indexed": True, "internalType": "address", "name": "to", "type": "address"},
+ {"indexed": False, "internalType": "uint256", "name": "value", "type": "uint256"},
+ ],
+ "name": "Transfer",
+ "type": "event",
+ }
+)
+UPGRADED_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [{"indexed": True, "internalType": "address", "name": "implementation", "type": "address"}],
+ "name": "Upgraded",
+ "type": "event",
+ }
+)
+WITHDRAW_SHARES_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "address", "name": "shareOwner", "type": "address"},
+ {"indexed": False, "internalType": "uint256", "name": "shares", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "amount0", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "amount1", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "feeShares", "type": "uint256"},
+ ],
+ "name": "WithdrawShares",
+ "type": "event",
+ }
+)
+DECIMALS_MULTIPLIER_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "DECIMALS_MULTIPLIER",
+ "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+FEE_CAP_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "FEE_CAP",
+ "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+FEE_MULTIPLIER_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "FEE_MULTIPLIER",
+ "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+SECONDS_IN_A_YEAR_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "SECONDS_IN_A_YEAR",
+ "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+ADD_LIQUIDITY_FUNCTION = Function(
+ {
+ "inputs": [
+ {"internalType": "int24", "name": "_tickLower", "type": "int24"},
+ {"internalType": "int24", "name": "_tickUpper", "type": "int24"},
+ {"internalType": "uint128", "name": "_liquidity", "type": "uint128"},
+ {"internalType": "uint256", "name": "_amount0Min", "type": "uint256"},
+ {"internalType": "uint256", "name": "_amount1Min", "type": "uint256"},
+ {"internalType": "uint64", "name": "_deadline", "type": "uint64"},
+ ],
+ "name": "addLiquidity",
+ "outputs": [
+ {"internalType": "uint256", "name": "amount0", "type": "uint256"},
+ {"internalType": "uint256", "name": "amount1", "type": "uint256"},
+ ],
+ "stateMutability": "nonpayable",
+ "type": "function",
+ }
+)
+AGNI_MINT_CALLBACK_FUNCTION = Function(
+ {
+ "inputs": [
+ {"internalType": "uint256", "name": "_amount0Owed", "type": "uint256"},
+ {"internalType": "uint256", "name": "_amount1Owed", "type": "uint256"},
+ {"internalType": "bytes", "name": "_data", "type": "bytes"},
+ ],
+ "name": "agniMintCallback",
+ "outputs": [],
+ "stateMutability": "nonpayable",
+ "type": "function",
+ }
+)
+AGNI_SWAP_CALLBACK_FUNCTION = Function(
+ {
+ "inputs": [
+ {"internalType": "int256", "name": "_amount0Delta", "type": "int256"},
+ {"internalType": "int256", "name": "_amount1Delta", "type": "int256"},
+ {"internalType": "bytes", "name": "_data", "type": "bytes"},
+ ],
+ "name": "agniSwapCallback",
+ "outputs": [],
+ "stateMutability": "nonpayable",
+ "type": "function",
+ }
+)
+ALL_POSITION_INFO_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "allPositionInfo",
+ "outputs": [
+ {"internalType": "uint256", "name": "amount0", "type": "uint256"},
+ {"internalType": "uint256", "name": "amount1", "type": "uint256"},
+ {"internalType": "uint256", "name": "fee0", "type": "uint256"},
+ {"internalType": "uint256", "name": "fee1", "type": "uint256"},
+ ],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+ALLOWANCE_FUNCTION = Function(
+ {
+ "inputs": [
+ {"internalType": "address", "name": "owner", "type": "address"},
+ {"internalType": "address", "name": "spender", "type": "address"},
+ ],
+ "name": "allowance",
+ "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+APPROVE_FUNCTION = Function(
+ {
+ "inputs": [
+ {"internalType": "address", "name": "spender", "type": "address"},
+ {"internalType": "uint256", "name": "amount", "type": "uint256"},
+ ],
+ "name": "approve",
+ "outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
+ "stateMutability": "nonpayable",
+ "type": "function",
+ }
+)
+ASSET_TOKEN0_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "assetToken0",
+ "outputs": [{"internalType": "address", "name": "", "type": "address"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+ASSET_TOKEN1_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "assetToken1",
+ "outputs": [{"internalType": "address", "name": "", "type": "address"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+ASSIGN_MANAGER_FUNCTION = Function(
+ {
+ "inputs": [{"internalType": "address", "name": "_manager", "type": "address"}],
+ "name": "assignManager",
+ "outputs": [],
+ "stateMutability": "nonpayable",
+ "type": "function",
+ }
+)
+ASSIGN_ROUTER1_INCH_FUNCTION = Function(
+ {
+ "inputs": [{"internalType": "address", "name": "_router1Inch", "type": "address"}],
+ "name": "assignRouter1Inch",
+ "outputs": [],
+ "stateMutability": "nonpayable",
+ "type": "function",
+ }
+)
+BALANCE_OF_FUNCTION = Function(
+ {
+ "inputs": [{"internalType": "address", "name": "account", "type": "address"}],
+ "name": "balanceOf",
+ "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+CLIPPER_SWAP_FUNCTION = Function(
+ {
+ "inputs": [
+ {"internalType": "address", "name": "clipperExchange", "type": "address"},
+ {"internalType": "address", "name": "srcToken", "type": "address"},
+ {"internalType": "address", "name": "dstToken", "type": "address"},
+ {"internalType": "uint256", "name": "inputAmount", "type": "uint256"},
+ {"internalType": "uint256", "name": "outputAmount", "type": "uint256"},
+ {"internalType": "uint256", "name": "goodUntil", "type": "uint256"},
+ {"internalType": "bytes32", "name": "r", "type": "bytes32"},
+ {"internalType": "bytes32", "name": "vs", "type": "bytes32"},
+ ],
+ "name": "clipperSwap",
+ "outputs": [{"internalType": "uint256", "name": "returnAmount", "type": "uint256"}],
+ "stateMutability": "nonpayable",
+ "type": "function",
+ }
+)
+COLLECT_ALL_SWAP_FEE_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "collectAllSwapFee",
+ "outputs": [
+ {"internalType": "uint128", "name": "amount0", "type": "uint128"},
+ {"internalType": "uint128", "name": "amount1", "type": "uint128"},
+ ],
+ "stateMutability": "nonpayable",
+ "type": "function",
+ }
+)
+COLLECT_MANAGEMENT_FEE_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "collectManagementFee",
+ "outputs": [{"internalType": "uint256", "name": "collectedShares", "type": "uint256"}],
+ "stateMutability": "nonpayable",
+ "type": "function",
+ }
+)
+COLLECT_POSITION_SWAP_FEE_FUNCTION = Function(
+ {
+ "inputs": [
+ {"internalType": "int24", "name": "_tickLower", "type": "int24"},
+ {"internalType": "int24", "name": "_tickUpper", "type": "int24"},
+ ],
+ "name": "collectPositionSwapFee",
+ "outputs": [
+ {"internalType": "uint128", "name": "amount0", "type": "uint128"},
+ {"internalType": "uint128", "name": "amount1", "type": "uint128"},
+ ],
+ "stateMutability": "nonpayable",
+ "type": "function",
+ }
+)
+DECIMALS_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "decimals",
+ "outputs": [{"internalType": "uint8", "name": "", "type": "uint8"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+DECREASE_ALLOWANCE_FUNCTION = Function(
+ {
+ "inputs": [
+ {"internalType": "address", "name": "spender", "type": "address"},
+ {"internalType": "uint256", "name": "subtractedValue", "type": "uint256"},
+ ],
+ "name": "decreaseAllowance",
+ "outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
+ "stateMutability": "nonpayable",
+ "type": "function",
+ }
+)
+DEPOSIT_FUNCTION = Function(
+ {
+ "inputs": [
+ {"internalType": "uint256", "name": "_shares", "type": "uint256"},
+ {"internalType": "uint256", "name": "_amount0Max", "type": "uint256"},
+ {"internalType": "uint256", "name": "_amount1Max", "type": "uint256"},
+ ],
+ "name": "deposit",
+ "outputs": [
+ {"internalType": "uint256", "name": "depositedAmount0", "type": "uint256"},
+ {"internalType": "uint256", "name": "depositedAmount1", "type": "uint256"},
+ ],
+ "stateMutability": "nonpayable",
+ "type": "function",
+ }
+)
+ESTIMATED_VALUE_IN_TOKEN0_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "estimatedValueInToken0",
+ "outputs": [{"internalType": "uint256", "name": "value0", "type": "uint256"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+ESTIMATED_VALUE_IN_TOKEN1_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "estimatedValueInToken1",
+ "outputs": [{"internalType": "uint256", "name": "value1", "type": "uint256"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+FEE_CONFIG_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "feeConfig",
+ "outputs": [
+ {"internalType": "address", "name": "vault", "type": "address"},
+ {"internalType": "uint24", "name": "entryFee", "type": "uint24"},
+ {"internalType": "uint24", "name": "exitFee", "type": "uint24"},
+ {"internalType": "uint24", "name": "performanceFee", "type": "uint24"},
+ {"internalType": "uint24", "name": "managementFee", "type": "uint24"},
+ ],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+GET_ALL_POSITIONS_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "getAllPositions",
+ "outputs": [
+ {
+ "components": [
+ {"internalType": "int24", "name": "tickLower", "type": "int24"},
+ {"internalType": "int24", "name": "tickUpper", "type": "int24"},
+ {"internalType": "uint128", "name": "liquidity", "type": "uint128"},
+ ],
+ "internalType": "struct ITeaVaultV3Pair.Position[]",
+ "name": "results",
+ "type": "tuple[]",
+ }
+ ],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+GET_AMOUNTS_FOR_LIQUIDITY_FUNCTION = Function(
+ {
+ "inputs": [
+ {"internalType": "int24", "name": "tickLower", "type": "int24"},
+ {"internalType": "int24", "name": "tickUpper", "type": "int24"},
+ {"internalType": "uint128", "name": "liquidity", "type": "uint128"},
+ ],
+ "name": "getAmountsForLiquidity",
+ "outputs": [
+ {"internalType": "uint256", "name": "amount0", "type": "uint256"},
+ {"internalType": "uint256", "name": "amount1", "type": "uint256"},
+ ],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+GET_LIQUIDITY_FOR_AMOUNTS_FUNCTION = Function(
+ {
+ "inputs": [
+ {"internalType": "int24", "name": "tickLower", "type": "int24"},
+ {"internalType": "int24", "name": "tickUpper", "type": "int24"},
+ {"internalType": "uint256", "name": "amount0", "type": "uint256"},
+ {"internalType": "uint256", "name": "amount1", "type": "uint256"},
+ ],
+ "name": "getLiquidityForAmounts",
+ "outputs": [{"internalType": "uint128", "name": "liquidity", "type": "uint128"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+GET_POOL_INFO_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "getPoolInfo",
+ "outputs": [
+ {"internalType": "address", "name": "", "type": "address"},
+ {"internalType": "address", "name": "", "type": "address"},
+ {"internalType": "uint8", "name": "", "type": "uint8"},
+ {"internalType": "uint8", "name": "", "type": "uint8"},
+ {"internalType": "uint24", "name": "", "type": "uint24"},
+ {"internalType": "uint160", "name": "", "type": "uint160"},
+ {"internalType": "int24", "name": "", "type": "int24"},
+ ],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+GET_TOKEN0_BALANCE_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "getToken0Balance",
+ "outputs": [{"internalType": "uint256", "name": "amount", "type": "uint256"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+GET_TOKEN1_BALANCE_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "getToken1Balance",
+ "outputs": [{"internalType": "uint256", "name": "amount", "type": "uint256"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+INCREASE_ALLOWANCE_FUNCTION = Function(
+ {
+ "inputs": [
+ {"internalType": "address", "name": "spender", "type": "address"},
+ {"internalType": "uint256", "name": "addedValue", "type": "uint256"},
+ ],
+ "name": "increaseAllowance",
+ "outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
+ "stateMutability": "nonpayable",
+ "type": "function",
+ }
+)
+INITIALIZE_FUNCTION = Function(
+ {
+ "inputs": [
+ {"internalType": "string", "name": "_name", "type": "string"},
+ {"internalType": "string", "name": "_symbol", "type": "string"},
+ {"internalType": "address", "name": "_factory", "type": "address"},
+ {"internalType": "address", "name": "_token0", "type": "address"},
+ {"internalType": "address", "name": "_token1", "type": "address"},
+ {"internalType": "uint24", "name": "_feeTier", "type": "uint24"},
+ {"internalType": "uint8", "name": "_decimalOffset", "type": "uint8"},
+ {"internalType": "uint24", "name": "_feeCap", "type": "uint24"},
+ {
+ "components": [
+ {"internalType": "address", "name": "vault", "type": "address"},
+ {"internalType": "uint24", "name": "entryFee", "type": "uint24"},
+ {"internalType": "uint24", "name": "exitFee", "type": "uint24"},
+ {"internalType": "uint24", "name": "performanceFee", "type": "uint24"},
+ {"internalType": "uint24", "name": "managementFee", "type": "uint24"},
+ ],
+ "internalType": "struct ITeaVaultV3Pair.FeeConfig",
+ "name": "_feeConfig",
+ "type": "tuple",
+ },
+ {"internalType": "address", "name": "_owner", "type": "address"},
+ ],
+ "name": "initialize",
+ "outputs": [],
+ "stateMutability": "nonpayable",
+ "type": "function",
+ }
+)
+LAST_COLLECT_MANAGEMENT_FEE_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "lastCollectManagementFee",
+ "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+MANAGER_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "manager",
+ "outputs": [{"internalType": "address", "name": "", "type": "address"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+MULTICALL_FUNCTION = Function(
+ {
+ "inputs": [{"internalType": "bytes[]", "name": "data", "type": "bytes[]"}],
+ "name": "multicall",
+ "outputs": [{"internalType": "bytes[]", "name": "results", "type": "bytes[]"}],
+ "stateMutability": "nonpayable",
+ "type": "function",
+ }
+)
+NAME_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "name",
+ "outputs": [{"internalType": "string", "name": "", "type": "string"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+OWNER_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "owner",
+ "outputs": [{"internalType": "address", "name": "", "type": "address"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+POOL_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "pool",
+ "outputs": [{"internalType": "contract IUniswapV3Pool", "name": "", "type": "address"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+POSITION_INFO_FUNCTION = Function(
+ {
+ "inputs": [{"internalType": "uint256", "name": "_index", "type": "uint256"}],
+ "name": "positionInfo",
+ "outputs": [
+ {"internalType": "uint256", "name": "amount0", "type": "uint256"},
+ {"internalType": "uint256", "name": "amount1", "type": "uint256"},
+ {"internalType": "uint256", "name": "fee0", "type": "uint256"},
+ {"internalType": "uint256", "name": "fee1", "type": "uint256"},
+ ],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+POSITION_INFO_FUNCTION = Function(
+ {
+ "inputs": [
+ {"internalType": "int24", "name": "_tickLower", "type": "int24"},
+ {"internalType": "int24", "name": "_tickUpper", "type": "int24"},
+ ],
+ "name": "positionInfo",
+ "outputs": [
+ {"internalType": "uint256", "name": "amount0", "type": "uint256"},
+ {"internalType": "uint256", "name": "amount1", "type": "uint256"},
+ {"internalType": "uint256", "name": "fee0", "type": "uint256"},
+ {"internalType": "uint256", "name": "fee1", "type": "uint256"},
+ ],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+POSITIONS_FUNCTION = Function(
+ {
+ "inputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
+ "name": "positions",
+ "outputs": [
+ {"internalType": "int24", "name": "tickLower", "type": "int24"},
+ {"internalType": "int24", "name": "tickUpper", "type": "int24"},
+ {"internalType": "uint128", "name": "liquidity", "type": "uint128"},
+ ],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+PROXIABLE_UUID_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "proxiableUUID",
+ "outputs": [{"internalType": "bytes32", "name": "", "type": "bytes32"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+REMOVE_LIQUIDITY_FUNCTION = Function(
+ {
+ "inputs": [
+ {"internalType": "int24", "name": "_tickLower", "type": "int24"},
+ {"internalType": "int24", "name": "_tickUpper", "type": "int24"},
+ {"internalType": "uint128", "name": "_liquidity", "type": "uint128"},
+ {"internalType": "uint256", "name": "_amount0Min", "type": "uint256"},
+ {"internalType": "uint256", "name": "_amount1Min", "type": "uint256"},
+ {"internalType": "uint64", "name": "_deadline", "type": "uint64"},
+ ],
+ "name": "removeLiquidity",
+ "outputs": [
+ {"internalType": "uint256", "name": "amount0", "type": "uint256"},
+ {"internalType": "uint256", "name": "amount1", "type": "uint256"},
+ ],
+ "stateMutability": "nonpayable",
+ "type": "function",
+ }
+)
+RENOUNCE_OWNERSHIP_FUNCTION = Function(
+ {"inputs": [], "name": "renounceOwnership", "outputs": [], "stateMutability": "nonpayable", "type": "function"}
+)
+ROUTER1_INCH_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "router1Inch",
+ "outputs": [{"internalType": "contract IGenericRouter1Inch", "name": "", "type": "address"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+SET_FEE_CONFIG_FUNCTION = Function(
+ {
+ "inputs": [
+ {
+ "components": [
+ {"internalType": "address", "name": "vault", "type": "address"},
+ {"internalType": "uint24", "name": "entryFee", "type": "uint24"},
+ {"internalType": "uint24", "name": "exitFee", "type": "uint24"},
+ {"internalType": "uint24", "name": "performanceFee", "type": "uint24"},
+ {"internalType": "uint24", "name": "managementFee", "type": "uint24"},
+ ],
+ "internalType": "struct ITeaVaultV3Pair.FeeConfig",
+ "name": "_feeConfig",
+ "type": "tuple",
+ }
+ ],
+ "name": "setFeeConfig",
+ "outputs": [],
+ "stateMutability": "nonpayable",
+ "type": "function",
+ }
+)
+SIMULATE_SWAP_INPUT_SINGLE_INTERNAL_FUNCTION = Function(
+ {
+ "inputs": [
+ {"internalType": "bool", "name": "_zeroForOne", "type": "bool"},
+ {"internalType": "uint256", "name": "_amountIn", "type": "uint256"},
+ ],
+ "name": "simulateSwapInputSingleInternal",
+ "outputs": [],
+ "stateMutability": "nonpayable",
+ "type": "function",
+ }
+)
+SWAP_FUNCTION = Function(
+ {
+ "inputs": [
+ {"internalType": "address", "name": "executor", "type": "address"},
+ {
+ "components": [
+ {"internalType": "address", "name": "srcToken", "type": "address"},
+ {"internalType": "address", "name": "dstToken", "type": "address"},
+ {"internalType": "address payable", "name": "srcReceiver", "type": "address"},
+ {"internalType": "address payable", "name": "dstReceiver", "type": "address"},
+ {"internalType": "uint256", "name": "amount", "type": "uint256"},
+ {"internalType": "uint256", "name": "minReturnAmount", "type": "uint256"},
+ {"internalType": "uint256", "name": "flags", "type": "uint256"},
+ ],
+ "internalType": "struct IGenericRouter1Inch.SwapDescription",
+ "name": "desc",
+ "type": "tuple",
+ },
+ {"internalType": "bytes", "name": "permit", "type": "bytes"},
+ {"internalType": "bytes", "name": "data", "type": "bytes"},
+ ],
+ "name": "swap",
+ "outputs": [
+ {"internalType": "uint256", "name": "returnAmount", "type": "uint256"},
+ {"internalType": "uint256", "name": "spentAmount", "type": "uint256"},
+ ],
+ "stateMutability": "nonpayable",
+ "type": "function",
+ }
+)
+SWAP_INPUT_SINGLE_FUNCTION = Function(
+ {
+ "inputs": [
+ {"internalType": "bool", "name": "_zeroForOne", "type": "bool"},
+ {"internalType": "uint256", "name": "_amountIn", "type": "uint256"},
+ {"internalType": "uint256", "name": "_amountOutMin", "type": "uint256"},
+ {"internalType": "uint160", "name": "_minPriceInSqrtPriceX96", "type": "uint160"},
+ {"internalType": "uint64", "name": "_deadline", "type": "uint64"},
+ ],
+ "name": "swapInputSingle",
+ "outputs": [{"internalType": "uint256", "name": "amountOut", "type": "uint256"}],
+ "stateMutability": "nonpayable",
+ "type": "function",
+ }
+)
+SWAP_OUTPUT_SINGLE_FUNCTION = Function(
+ {
+ "inputs": [
+ {"internalType": "bool", "name": "_zeroForOne", "type": "bool"},
+ {"internalType": "uint256", "name": "_amountOut", "type": "uint256"},
+ {"internalType": "uint256", "name": "_amountInMax", "type": "uint256"},
+ {"internalType": "uint160", "name": "_maxPriceInSqrtPriceX96", "type": "uint160"},
+ {"internalType": "uint64", "name": "_deadline", "type": "uint64"},
+ ],
+ "name": "swapOutputSingle",
+ "outputs": [{"internalType": "uint256", "name": "amountIn", "type": "uint256"}],
+ "stateMutability": "nonpayable",
+ "type": "function",
+ }
+)
+SYMBOL_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "symbol",
+ "outputs": [{"internalType": "string", "name": "", "type": "string"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+TOTAL_SUPPLY_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "totalSupply",
+ "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+TRANSFER_FUNCTION = Function(
+ {
+ "inputs": [
+ {"internalType": "address", "name": "to", "type": "address"},
+ {"internalType": "uint256", "name": "amount", "type": "uint256"},
+ ],
+ "name": "transfer",
+ "outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
+ "stateMutability": "nonpayable",
+ "type": "function",
+ }
+)
+TRANSFER_FROM_FUNCTION = Function(
+ {
+ "inputs": [
+ {"internalType": "address", "name": "from", "type": "address"},
+ {"internalType": "address", "name": "to", "type": "address"},
+ {"internalType": "uint256", "name": "amount", "type": "uint256"},
+ ],
+ "name": "transferFrom",
+ "outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
+ "stateMutability": "nonpayable",
+ "type": "function",
+ }
+)
+TRANSFER_OWNERSHIP_FUNCTION = Function(
+ {
+ "inputs": [{"internalType": "address", "name": "newOwner", "type": "address"}],
+ "name": "transferOwnership",
+ "outputs": [],
+ "stateMutability": "nonpayable",
+ "type": "function",
+ }
+)
+UNISWAP_V3_SWAP_FUNCTION = Function(
+ {
+ "inputs": [
+ {"internalType": "uint256", "name": "amount", "type": "uint256"},
+ {"internalType": "uint256", "name": "minReturn", "type": "uint256"},
+ {"internalType": "uint256[]", "name": "pools", "type": "uint256[]"},
+ ],
+ "name": "uniswapV3Swap",
+ "outputs": [{"internalType": "uint256", "name": "returnAmount", "type": "uint256"}],
+ "stateMutability": "nonpayable",
+ "type": "function",
+ }
+)
+UNOSWAP_FUNCTION = Function(
+ {
+ "inputs": [
+ {"internalType": "address", "name": "srcToken", "type": "address"},
+ {"internalType": "uint256", "name": "amount", "type": "uint256"},
+ {"internalType": "uint256", "name": "minReturn", "type": "uint256"},
+ {"internalType": "uint256[]", "name": "pools", "type": "uint256[]"},
+ ],
+ "name": "unoswap",
+ "outputs": [{"internalType": "uint256", "name": "returnAmount", "type": "uint256"}],
+ "stateMutability": "nonpayable",
+ "type": "function",
+ }
+)
+UPGRADE_TO_FUNCTION = Function(
+ {
+ "inputs": [{"internalType": "address", "name": "newImplementation", "type": "address"}],
+ "name": "upgradeTo",
+ "outputs": [],
+ "stateMutability": "nonpayable",
+ "type": "function",
+ }
+)
+UPGRADE_TO_AND_CALL_FUNCTION = Function(
+ {
+ "inputs": [
+ {"internalType": "address", "name": "newImplementation", "type": "address"},
+ {"internalType": "bytes", "name": "data", "type": "bytes"},
+ ],
+ "name": "upgradeToAndCall",
+ "outputs": [],
+ "stateMutability": "payable",
+ "type": "function",
+ }
+)
+VAULT_ALL_UNDERLYING_ASSETS_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "vaultAllUnderlyingAssets",
+ "outputs": [
+ {"internalType": "uint256", "name": "amount0", "type": "uint256"},
+ {"internalType": "uint256", "name": "amount1", "type": "uint256"},
+ ],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
diff --git a/indexer/modules/custom/uniswap_v3/uniswapv3_abi.py b/hemera_udf/uniswap_v3/abi/uniswapv3_abi.py
similarity index 99%
rename from indexer/modules/custom/uniswap_v3/uniswapv3_abi.py
rename to hemera_udf/uniswap_v3/abi/uniswapv3_abi.py
index c72e50aac..dddc96dfc 100644
--- a/indexer/modules/custom/uniswap_v3/uniswapv3_abi.py
+++ b/hemera_udf/uniswap_v3/abi/uniswapv3_abi.py
@@ -1,4 +1,4 @@
-from common.utils.abi_code_utils import Event, Function
+from hemera.common.utils.abi_code_utils import Event, Function
POSITIONS_FUNCTION = Function(
{
diff --git a/indexer/modules/user_ops/__init__.py b/hemera_udf/uniswap_v3/domains/__init__.py
similarity index 100%
rename from indexer/modules/user_ops/__init__.py
rename to hemera_udf/uniswap_v3/domains/__init__.py
diff --git a/indexer/modules/custom/uniswap_v3/domains/feature_uniswap_v3.py b/hemera_udf/uniswap_v3/domains/feature_uniswap_v3.py
similarity index 75%
rename from indexer/modules/custom/uniswap_v3/domains/feature_uniswap_v3.py
rename to hemera_udf/uniswap_v3/domains/feature_uniswap_v3.py
index 30181f1ee..92ebbd735 100644
--- a/indexer/modules/custom/uniswap_v3/domains/feature_uniswap_v3.py
+++ b/hemera_udf/uniswap_v3/domains/feature_uniswap_v3.py
@@ -1,10 +1,10 @@
from dataclasses import dataclass
-from indexer.domain import FilterData
+from hemera.indexer.domains import Domain
@dataclass
-class UniswapV3Pool(FilterData):
+class UniswapV3Pool(Domain):
position_token_address: str
factory_address: str
pool_address: str
@@ -17,7 +17,7 @@ class UniswapV3Pool(FilterData):
@dataclass
-class UniswapV3Token(FilterData):
+class UniswapV3Token(Domain):
position_token_address: str
token_id: int
pool_address: str
@@ -29,17 +29,19 @@ class UniswapV3Token(FilterData):
@dataclass
-class UniswapV3PoolPrice(FilterData):
+class UniswapV3PoolPrice(Domain):
factory_address: str
pool_address: str
sqrt_price_x96: int
tick: int
+ token0_price: float
+ token1_price: float
block_number: int
block_timestamp: int
@dataclass
-class UniswapV3TokenDetail(FilterData):
+class UniswapV3TokenDetail(Domain):
position_token_address: str
token_id: int
pool_address: str
@@ -50,17 +52,19 @@ class UniswapV3TokenDetail(FilterData):
@dataclass
-class UniswapV3PoolCurrentPrice(FilterData):
+class UniswapV3PoolCurrentPrice(Domain):
factory_address: str
pool_address: str
sqrt_price_x96: int
tick: int
+ token0_price: float
+ token1_price: float
block_number: int
block_timestamp: int
@dataclass
-class UniswapV3SwapEvent(FilterData):
+class UniswapV3SwapEvent(Domain):
pool_address: str
position_token_address: str
transaction_from_address: str
@@ -68,6 +72,9 @@ class UniswapV3SwapEvent(FilterData):
recipient: str
amount0: int
amount1: int
+ token0_price: float
+ token1_price: float
+ amount_usd: float
liquidity: int
tick: int
sqrt_price_x96: int
@@ -80,7 +87,7 @@ class UniswapV3SwapEvent(FilterData):
@dataclass
-class UniswapV3TokenCurrentStatus(FilterData):
+class UniswapV3TokenCurrentStatus(Domain):
position_token_address: str
token_id: int
pool_address: str
@@ -91,7 +98,7 @@ class UniswapV3TokenCurrentStatus(FilterData):
@dataclass
-class UniswapV3TokenUpdateLiquidity(FilterData):
+class UniswapV3TokenUpdateLiquidity(Domain):
position_token_address: str
token_id: int
owner: str
@@ -109,7 +116,7 @@ class UniswapV3TokenUpdateLiquidity(FilterData):
@dataclass
-class UniswapV3TokenCollectFee(FilterData):
+class UniswapV3TokenCollectFee(Domain):
position_token_address: str
recipient: str
owner: str
@@ -183,3 +190,25 @@ class UniswapV3PoolFromSwapEvent(UniswapV3Pool):
@dataclass
class UniswapV3PoolFromToken(UniswapV3Pool):
pass
+
+
+@dataclass
+class TeahouseLiquidityHist(Domain):
+ position_token_address: str
+ pool_address: str
+ liquidity: int
+ tick_lower: int
+ tick_upper: int
+ block_number: int
+ block_timestamp: int
+
+
+@dataclass
+class TeahouseLiquidityCurrent(Domain):
+ position_token_address: str
+ pool_address: str
+ liquidity: int
+ tick_lower: int
+ tick_upper: int
+ block_number: int
+ block_timestamp: int
diff --git a/indexer/modules/custom/uniswap_v3/endpoints/__init__.py b/hemera_udf/uniswap_v3/endpoints/__init__.py
similarity index 100%
rename from indexer/modules/custom/uniswap_v3/endpoints/__init__.py
rename to hemera_udf/uniswap_v3/endpoints/__init__.py
diff --git a/indexer/modules/custom/uniswap_v3/endpoints/routes.py b/hemera_udf/uniswap_v3/endpoints/routes.py
similarity index 92%
rename from indexer/modules/custom/uniswap_v3/endpoints/routes.py
rename to hemera_udf/uniswap_v3/endpoints/routes.py
index 3360102f1..722620e55 100644
--- a/indexer/modules/custom/uniswap_v3/endpoints/routes.py
+++ b/hemera_udf/uniswap_v3/endpoints/routes.py
@@ -7,21 +7,21 @@
from sqlalchemy import desc
from sqlalchemy.sql import select, tuple_
-from api.app.address.features import register_feature
-from api.app.cache import cache
-from api.app.db_service.tokens import get_token_price_map_by_symbol_list
-from common.models import db
-from common.models.token_hourly_price import TokenHourlyPrices
-from common.models.tokens import Tokens
-from common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes
-from indexer.modules.custom.opensea.endpoint.routes import get_token_daily_price
-from indexer.modules.custom.uniswap_v3.endpoints import uniswap_v3_namespace
-from indexer.modules.custom.uniswap_v3.models.feature_uniswap_v3_liquidity_records import UniswapV3TokenLiquidityRecords
-from indexer.modules.custom.uniswap_v3.models.feature_uniswap_v3_pool_current_prices import UniswapV3PoolCurrentPrices
-from indexer.modules.custom.uniswap_v3.models.feature_uniswap_v3_pools import UniswapV3Pools
-from indexer.modules.custom.uniswap_v3.models.feature_uniswap_v3_swap_records import UniswapV3PoolSwapRecords
-from indexer.modules.custom.uniswap_v3.models.feature_uniswap_v3_token_current_status import UniswapV3TokenCurrentStatus
-from indexer.modules.custom.uniswap_v3.models.feature_uniswap_v3_tokens import UniswapV3Tokens
+from hemera.api.app.address.features import register_feature
+from hemera.api.app.cache import cache
+from hemera.api.app.db_service.tokens import get_token_price_map_by_symbol_list
+from hemera.common.models import db
+from hemera.common.models.token_hourly_price import TokenHourlyPrices
+from hemera.common.models.tokens import Tokens
+from hemera.common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes
+from hemera_udf.opensea.endpoint.routes import get_token_daily_price
+from hemera_udf.uniswap_v3.endpoints import uniswap_v3_namespace
+from hemera_udf.uniswap_v3.models.feature_uniswap_v3_liquidity_records import UniswapV3TokenLiquidityRecords
+from hemera_udf.uniswap_v3.models.feature_uniswap_v3_pool_current_prices import UniswapV3PoolCurrentPrices
+from hemera_udf.uniswap_v3.models.feature_uniswap_v3_pools import UniswapV3Pools
+from hemera_udf.uniswap_v3.models.feature_uniswap_v3_swap_records import UniswapV3PoolSwapRecords
+from hemera_udf.uniswap_v3.models.feature_uniswap_v3_token_current_status import UniswapV3TokenCurrentStatus
+from hemera_udf.uniswap_v3.models.feature_uniswap_v3_tokens import UniswapV3Tokens
Q96 = 2**96
PAGE_SIZE = 10
@@ -97,16 +97,16 @@ def get_uniswap_v3_trading_value(address) -> Optional[Dict[str, Any]]:
if token_map.get(swaps.token0_address):
token = token_map[swaps.token0_address]
total_volume_usd += abs(
- get_token_daily_price(token.symbol, datetime.fromtimestamp(swaps.block_timestamp))
+ get_token_daily_price(token.symbol, swaps.block_timestamp)
* float(swaps.amount0)
/ float(10**token.decimals)
)
elif token_map.get(swaps.token1_address):
token = token_map[swaps.token1_address]
total_volume_usd += abs(
- get_token_daily_price(token.symbol, datetime.fromtimestamp(swaps.block_timestamp))
+ get_token_daily_price(token.symbol, swaps.block_timestamp)
* float(swaps.amount1)
- / float(0**token.decimals)
+ / float(10**token.decimals)
)
else:
continue
@@ -156,7 +156,7 @@ def get_uniswap_v3_trading_events(address, limit=5, offset=0) -> Optional[Dict[s
swap_records.append(
{
"block_number": swap.block_number,
- "block_timestamp": datetime.fromtimestamp(swap.block_timestamp).isoformat("T", "seconds"),
+ "block_timestamp": swap.block_timestamp.isoformat("T", "seconds"),
"transaction_hash": bytes_to_hex_str(swap.transaction_hash),
"pool_address": bytes_to_hex_str(swap.pool_address),
"amount0": "{0:.18f}".format(abs(swap.amount0) / 10**token0.decimals).rstrip("0").rstrip("."),
@@ -301,7 +301,8 @@ def get_uniswap_v3_liquidity_value(address) -> Optional[Dict[str, Any]]:
"pool_address": pool_address,
"position_token_address": position_token_address,
"token_id": str(token_id),
- "block_timestamp": datetime.fromtimestamp(holding.block_timestamp).isoformat("T", "seconds"),
+ # "block_timestamp": datetime.fromtimestamp(holding.block_timestamp).isoformat("T", "seconds"),
+ "block_timestamp": holding.block_timestamp.isoformat("T", "seconds"),
"token0": {
"token0_symbol": token0_info.symbol,
"token0_icon_url": token0_info.icon_url,
@@ -420,7 +421,8 @@ def get_uniswap_v3_liquidity_events(address) -> Optional[Dict[str, Any]]:
"pool_address": pool_address,
"position_token_address": position_token_address,
"token_id": str(token_id),
- "block_timestamp": datetime.fromtimestamp(holding.block_timestamp).isoformat("T", "seconds"),
+ # "block_timestamp": datetime.fromtimestamp(holding.block_timestamp).isoformat("T", "seconds"),
+ "block_timestamp": holding.block_timestamp.isoformat("T", "seconds"),
"token0": {
"token0_symbol": token0_info.symbol,
"token0_icon_url": token0_info.icon_url,
@@ -540,7 +542,8 @@ def get(self, address):
"pool_address": pool_address,
"position_token_address": position_token_address,
"token_id": str(token_id),
- "block_timestamp": datetime.fromtimestamp(holding.block_timestamp).isoformat("T", "seconds"),
+ # "block_timestamp": datetime.fromtimestamp(holding.block_timestamp).isoformat("T", "seconds"),
+ "block_timestamp": holding.block_timestamp.isoformat("T", "seconds"),
"token0": {
"token0_symbol": token0_info.symbol,
"token0_icon_url": token0_info.icon_url,
diff --git a/hemera_udf/uniswap_v3/jobs/__init__.py b/hemera_udf/uniswap_v3/jobs/__init__.py
new file mode 100644
index 000000000..7cff88b52
--- /dev/null
+++ b/hemera_udf/uniswap_v3/jobs/__init__.py
@@ -0,0 +1,6 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time 2024/12/10 17:11
+# @Author will
+# @File __init__.py.py
+# @Brief
diff --git a/hemera_udf/uniswap_v3/jobs/teahouse_liquidity_job.py b/hemera_udf/uniswap_v3/jobs/teahouse_liquidity_job.py
new file mode 100644
index 000000000..7b3d7f4dd
--- /dev/null
+++ b/hemera_udf/uniswap_v3/jobs/teahouse_liquidity_job.py
@@ -0,0 +1,93 @@
+import logging
+
+from hemera.indexer.domains.log import Log
+from hemera.indexer.jobs import FilterTransactionDataJob
+from hemera.indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera.indexer.utils.collection_utils import distinct_collections_by_group
+from hemera.indexer.utils.multicall_hemera import Call
+from hemera.indexer.utils.multicall_hemera.multi_call_helper import MultiCallHelper
+from hemera_udf.uniswap_v3.abi.teahouse_abi import GET_ALL_POSITIONS_FUNCTION
+from hemera_udf.uniswap_v3.domains.feature_uniswap_v3 import TeahouseLiquidityCurrent, TeahouseLiquidityHist
+
+logger = logging.getLogger(__name__)
+
+
+class TeahouseLiquidityJob(FilterTransactionDataJob):
+ dependency_types = [Log]
+ output_types = [TeahouseLiquidityHist, TeahouseLiquidityCurrent]
+ able_to_reorg = True
+
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+ self._service = kwargs["config"].get("db_service")
+ config = kwargs["config"]
+ self._position_pool_dict = config.get("teahouse_job")
+ self.multi_call_helper = MultiCallHelper(self._web3, kwargs, logger)
+
+ def get_filter(self):
+ filter_address_list = list(self._position_pool_dict.keys())
+
+ return TransactionFilterByLogs(
+ [
+ TopicSpecification(addresses=filter_address_list),
+ ]
+ )
+
+ @staticmethod
+ def extract_current_status(records, current_status_domain, keys):
+ results = []
+ last_records = distinct_collections_by_group(collections=records, group_by=keys, max_key="block_number")
+ for last_record in last_records:
+ record = current_status_domain(**vars(last_record))
+ results.append(record)
+ return results
+
+ def _process(self, **kwargs):
+ logs = self._data_buff["log"]
+ call_dict = {}
+
+ for log in logs:
+ if log.address in self._position_pool_dict:
+ position_token_address = log.address
+ block_number = log.block_number
+ call_dict[position_token_address, block_number] = Call(
+ target=position_token_address,
+ function_abi=GET_ALL_POSITIONS_FUNCTION,
+ block_number=block_number,
+ user_defined_k=log.block_timestamp,
+ )
+
+ call_list = list(call_dict.values())
+ self.multi_call_helper.execute_calls(call_list)
+
+ records = []
+
+ for call in call_list:
+ position_token_address = call.target.lower()
+ pool_address = self._position_pool_dict[position_token_address]
+ block_number = call.block_number
+ block_timestamp = call.user_defined_k
+ results = call.returns.get("results")
+ if results:
+ result = results[0]
+ liquidity = result.get("liquidity")
+ tick_lower = result.get("tickLower")
+ tick_upper = result.get("tickUpper")
+
+ record = TeahouseLiquidityHist(
+ position_token_address=position_token_address,
+ pool_address=pool_address,
+ liquidity=liquidity,
+ tick_lower=tick_lower,
+ tick_upper=tick_upper,
+ block_number=block_number,
+ block_timestamp=block_timestamp,
+ )
+ records.append(record)
+
+ current_list = self.extract_current_status(
+ records, TeahouseLiquidityCurrent, ["position_token_address", "pool_address"]
+ )
+ self._collect_domains(records)
+ self._collect_domains(current_list)
+ pass
diff --git a/hemera_udf/uniswap_v3/jobs/uniswap_v3_pool_job.py b/hemera_udf/uniswap_v3/jobs/uniswap_v3_pool_job.py
new file mode 100644
index 000000000..fcb86ed83
--- /dev/null
+++ b/hemera_udf/uniswap_v3/jobs/uniswap_v3_pool_job.py
@@ -0,0 +1,125 @@
+import logging
+
+import hemera_udf.uniswap_v3.abi.aerodrome_abi as aerodrome_abi
+import hemera_udf.uniswap_v3.abi.swapsicle_abi as swapsicle_abi
+import hemera_udf.uniswap_v3.abi.uniswapv3_abi as uniswapv3_abi
+from hemera.common.utils.format_utils import bytes_to_hex_str
+from hemera.indexer.domains.log import Log
+from hemera.indexer.jobs import FilterTransactionDataJob
+from hemera.indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera_udf.uniswap_v3.domains.feature_uniswap_v3 import UniswapV3Pool
+from hemera_udf.uniswap_v3.models.feature_uniswap_v3_pools import UniswapV3Pools
+from hemera_udf.uniswap_v3.util import AddressManager
+
+logger = logging.getLogger(__name__)
+
+
+class ExportUniSwapV3PoolJob(FilterTransactionDataJob):
+ dependency_types = [Log]
+ output_types = [UniswapV3Pool]
+ able_to_reorg = True
+
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+ self._service = kwargs["config"].get("db_service")
+ config = kwargs["config"]["uniswap_v3_job"]
+ jobs = config.get("jobs", [])
+ self._address_manager = AddressManager(jobs)
+ # self._existing_pools = self.get_existing_pools()
+
+ def get_filter(self):
+
+ return TransactionFilterByLogs(
+ [
+ TopicSpecification(
+ topics=[
+ abi_module.POOL_CREATED_EVENT.get_signature()
+ for abi_module in self._address_manager.abi_modules_list
+ ],
+ addresses=self._address_manager.factory_address_list,
+ ),
+ ]
+ )
+
+ def _process(self, **kwargs):
+ self.get_pools()
+
+ def get_pools(self):
+ logs = self._data_buff[Log.type()]
+ for log in logs:
+ pool_dict = {}
+ pool_address = None
+
+ position_token_address = self._address_manager.get_position_by_factory(log.address)
+ if log.topic0 == swapsicle_abi.POOL_CREATED_EVENT.get_signature():
+ decoded_data = swapsicle_abi.POOL_CREATED_EVENT.decode_log(log)
+ pool_address = decoded_data["pool"]
+ # tick_spacing\fee are stored in other logs
+ pool_dict.update(
+ {
+ "factory_address": log.address,
+ "position_token_address": position_token_address,
+ "token0_address": decoded_data["token0"],
+ "token1_address": decoded_data["token1"],
+ "pool_address": pool_address,
+ "block_number": log.block_number,
+ "block_timestamp": log.block_timestamp,
+ "fee": 0,
+ "tick_spacing": 0,
+ }
+ )
+
+ elif log.topic0 == uniswapv3_abi.POOL_CREATED_EVENT.get_signature():
+ decoded_data = uniswapv3_abi.POOL_CREATED_EVENT.decode_log(log)
+ pool_address = decoded_data["pool"]
+ pool_dict.update(
+ {
+ "factory_address": log.address,
+ "position_token_address": position_token_address,
+ "token0_address": decoded_data["token0"],
+ "token1_address": decoded_data["token1"],
+ "fee": decoded_data["fee"],
+ "tick_spacing": decoded_data["tickSpacing"],
+ "pool_address": pool_address,
+ "block_number": log.block_number,
+ "block_timestamp": log.block_timestamp,
+ }
+ )
+
+ elif log.topic0 == aerodrome_abi.POOL_CREATED_EVENT.get_signature():
+ decoded_data = aerodrome_abi.POOL_CREATED_EVENT.decode_log(log)
+ pool_address = decoded_data["pool"]
+ pool_dict.update(
+ {
+ "factory_address": log.address,
+ "position_token_address": position_token_address,
+ "token0_address": decoded_data["token0"],
+ "token1_address": decoded_data["token1"],
+ "fee": 0,
+ "tick_spacing": decoded_data["tickSpacing"],
+ "pool_address": pool_address,
+ "block_number": log.block_number,
+ "block_timestamp": log.block_timestamp,
+ }
+ )
+
+ if pool_address and position_token_address:
+ # self._existing_pools.add(pool_address)
+ uniswap_v3_pool = UniswapV3Pool(**pool_dict)
+ self._collect_domain(uniswap_v3_pool)
+
+ def get_existing_pools(self):
+ session = self._service.Session()
+ try:
+ existing_pools = set()
+ pools_orm = session.query(UniswapV3Pools).all()
+ for pool in pools_orm:
+ existing_pools.add(bytes_to_hex_str(pool.pool_address))
+
+ except Exception as e:
+ print(e)
+ raise e
+ finally:
+ session.close()
+
+ return existing_pools
diff --git a/indexer/modules/custom/uniswap_v3/uniswap_v3_pool_price_job.py b/hemera_udf/uniswap_v3/jobs/uniswap_v3_pool_price_job.py
similarity index 65%
rename from indexer/modules/custom/uniswap_v3/uniswap_v3_pool_price_job.py
rename to hemera_udf/uniswap_v3/jobs/uniswap_v3_pool_price_job.py
index 6efdf67d4..261f5a3d0 100644
--- a/indexer/modules/custom/uniswap_v3/uniswap_v3_pool_price_job.py
+++ b/hemera_udf/uniswap_v3/jobs/uniswap_v3_pool_price_job.py
@@ -1,28 +1,29 @@
import logging
-import indexer.modules.custom.uniswap_v3.agni_abi as agni_abi
-import indexer.modules.custom.uniswap_v3.swapsicle_abi as swapsicle_abi
-import indexer.modules.custom.uniswap_v3.uniswapv3_abi as uniswapv3_abi
-from common.utils.format_utils import bytes_to_hex_str
-from indexer.domain.transaction import Transaction
-from indexer.jobs import FilterTransactionDataJob
-from indexer.modules.custom.uniswap_v3.domains.feature_uniswap_v3 import (
+import hemera_udf.uniswap_v3.abi.agni_abi as agni_abi
+import hemera_udf.uniswap_v3.abi.swapsicle_abi as swapsicle_abi
+import hemera_udf.uniswap_v3.abi.uniswapv3_abi as uniswapv3_abi
+from hemera.common.utils.format_utils import bytes_to_hex_str
+from hemera.indexer.domains.transaction import Transaction
+from hemera.indexer.jobs import FilterTransactionDataJob
+from hemera.indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera.indexer.utils.multicall_hemera import Call
+from hemera.indexer.utils.multicall_hemera.multi_call_helper import MultiCallHelper
+from hemera_udf.token_price.domains import BlockTokenPrice
+from hemera_udf.uniswap_v3.domains.feature_uniswap_v3 import (
UniswapV3PoolCurrentPrice,
UniswapV3PoolFromSwapEvent,
UniswapV3PoolPrice,
UniswapV3SwapEvent,
)
-from indexer.modules.custom.uniswap_v3.models.feature_uniswap_v3_pools import UniswapV3Pools
-from indexer.modules.custom.uniswap_v3.util import AddressManager
-from indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
-from indexer.utils.multicall_hemera import Call
-from indexer.utils.multicall_hemera.multi_call_helper import MultiCallHelper
+from hemera_udf.uniswap_v3.models.feature_uniswap_v3_pools import UniswapV3Pools
+from hemera_udf.uniswap_v3.util import AddressManager
logger = logging.getLogger(__name__)
-class ExportUniSwapV3PoolJob(FilterTransactionDataJob):
- dependency_types = [Transaction]
+class ExportUniSwapV3PoolPriceJob(FilterTransactionDataJob):
+ dependency_types = [Transaction, BlockTokenPrice]
output_types = [UniswapV3PoolPrice, UniswapV3PoolCurrentPrice, UniswapV3SwapEvent, UniswapV3PoolFromSwapEvent]
able_to_reorg = True
@@ -34,7 +35,13 @@ def __init__(self, **kwargs):
self._address_manager = AddressManager(jobs)
self.multi_call_helper = MultiCallHelper(self._web3, kwargs, logger)
- self.pools_requested_by_rpc = []
+ self.pools_requested_by_rpc = set()
+ # self.token_decimals_map = {}
+
+ stable_tokens_config = kwargs["config"].get("export_block_token_price_job", {})
+
+ self.stable_tokens = stable_tokens_config
+ self._exist_pools = self.get_existing_pools()
def get_filter(self):
address_list = self._pool_address if self._pool_address else []
@@ -50,6 +57,20 @@ def get_filter(self):
]
)
+ def change_block_token_prices_to_dict(self):
+ symbol_address_dict = {symbol: address for address, symbol in self.stable_tokens.items()}
+
+ token_prices_dict = {}
+
+ block_token_prices = self._data_buff[BlockTokenPrice.type()]
+ for token_price in block_token_prices:
+ address = symbol_address_dict.get(token_price.token_symbol)
+ if address:
+ block_number = token_price.block_number
+ token_prices_dict[address, block_number] = token_price.token_price
+
+ return token_prices_dict
+
def get_missing_pools_by_rpc(self):
# pool_logs
missing_pool_address_dict = {}
@@ -63,15 +84,15 @@ def get_missing_pools_by_rpc(self):
if log.topic0 == uniswapv3_abi.SWAP_EVENT.get_signature() and log.address not in self._exist_pools:
if log.address not in self.pools_requested_by_rpc:
abi_module = uniswapv3_abi
- self.pools_requested_by_rpc.append(log.address)
+ self.pools_requested_by_rpc.add(log.address)
elif log.topic0 == swapsicle_abi.SWAP_EVENT.get_signature() and log.address not in self._exist_pools:
if log.address not in self.pools_requested_by_rpc:
abi_module = swapsicle_abi
- self.pools_requested_by_rpc.append(log.address)
+ self.pools_requested_by_rpc.add(log.address)
elif log.topic0 == agni_abi.SWAP_EVENT.get_signature() and log.address not in self._exist_pools:
if log.address not in self.pools_requested_by_rpc:
abi_module = agni_abi
- self.pools_requested_by_rpc.append(log.address)
+ self.pools_requested_by_rpc.add(log.address)
if abi_module:
call_dict = {
@@ -105,7 +126,7 @@ def get_missing_pools_by_rpc(self):
for factory_call, fee_call, token0_call, token1_call, tick_spacing_call in zip(
factory_list, fee_list, token0_list, token1_list, tick_spacing_list
):
- factory_address = factory_call.returns.get("")
+ factory_address = factory_call.returns.get("") if factory_call.returns else None
if factory_address:
position_token_address = self._address_manager.get_position_by_factory(factory_address)
if position_token_address:
@@ -139,7 +160,7 @@ def get_missing_pools_by_rpc(self):
self._collect_domain(uniswap_v_pool_from_swap_event)
def _process(self, **kwargs):
- self._exist_pools = self.get_existing_pools()
+ token_prices_dict = self.change_block_token_prices_to_dict()
if not self._pool_address:
self.get_missing_pools_by_rpc()
@@ -157,12 +178,13 @@ def _process(self, **kwargs):
factory_address = pool_data.pop("factory_address")
key_data_dict = {}
decoded_data = {}
+ block_number = log.block_number
if log.topic0 == uniswapv3_abi.SWAP_EVENT.get_signature():
decoded_data = uniswapv3_abi.SWAP_EVENT.decode_log(log)
key_data_dict = {
"tick": decoded_data["tick"],
"sqrt_price_x96": decoded_data["sqrtPriceX96"],
- "block_number": log.block_number,
+ "block_number": block_number,
"block_timestamp": log.block_timestamp,
"pool_address": pool_address,
}
@@ -172,7 +194,7 @@ def _process(self, **kwargs):
key_data_dict = {
"tick": decoded_data["tick"],
"sqrt_price_x96": decoded_data["sqrtPriceX96"],
- "block_number": log.block_number,
+ "block_number": block_number,
"block_timestamp": log.block_timestamp,
"pool_address": pool_address,
}
@@ -183,15 +205,51 @@ def _process(self, **kwargs):
key_data_dict = {
"tick": decoded_data["tick"],
"sqrt_price_x96": decoded_data["price"],
- "block_number": log.block_number,
+ "block_number": block_number,
"block_timestamp": log.block_timestamp,
"pool_address": pool_address,
}
if decoded_data:
- price = UniswapV3PoolPrice(**key_data_dict, factory_address=factory_address)
- price_dict[pool_address, log.block_number] = price
- current_price_dict[pool_address] = UniswapV3PoolCurrentPrice(**vars(price))
+ token0_address = pool_data.get("token0_address")
+ token1_address = pool_data.get("token1_address")
+
+ tokens0 = self.tokens.get(token0_address)
+ tokens1 = self.tokens.get(token1_address)
+
+ decimals0 = tokens0.get("decimals") if tokens0 else None
+ decimals1 = tokens1.get("decimals") if tokens1 else None
+
+ amount0 = decoded_data["amount0"]
+ amount1 = decoded_data["amount1"]
+
+ amount0_abs = abs(amount0)
+ amount1_abs = abs(amount1)
+
+ decimals_conditions = decimals0 and decimals1
+
+ if token0_address in self.stable_tokens and decimals_conditions:
+ token0_price = token_prices_dict.get((token0_address, block_number))
+ amount_usd = amount0_abs / 10**decimals0 * token0_price
+ token1_price = amount_usd / (amount1_abs / 10**decimals1) if amount1_abs > 0 else None
+
+ elif token1_address in self.stable_tokens and decimals_conditions:
+ token1_price = token_prices_dict.get((token1_address, block_number))
+ amount_usd = amount1_abs / 10**decimals1 * token1_price
+ token0_price = amount_usd / (amount0_abs / 10**decimals0) if amount0_abs > 0 else None
+ else:
+ token0_price = None
+ token1_price = None
+ amount_usd = None
+
+ pool_price_item = UniswapV3PoolPrice(
+ **key_data_dict,
+ factory_address=factory_address,
+ token0_price=token0_price,
+ token1_price=token1_price,
+ )
+ price_dict[pool_address, block_number] = pool_price_item
+ current_price_dict[pool_address] = UniswapV3PoolCurrentPrice(**vars(pool_price_item))
self._collect_domain(
UniswapV3SwapEvent(
@@ -200,11 +258,14 @@ def _process(self, **kwargs):
log_index=log.log_index,
sender=decoded_data["sender"],
recipient=decoded_data["recipient"],
- amount0=decoded_data["amount0"],
- amount1=decoded_data["amount1"],
+ amount0=amount0,
+ amount1=amount1,
liquidity=decoded_data["liquidity"],
**key_data_dict,
**pool_data,
+ token0_price=token0_price,
+ token1_price=token1_price,
+ amount_usd=amount_usd,
),
)
diff --git a/indexer/modules/custom/uniswap_v3/uniswap_v3_token_job.py b/hemera_udf/uniswap_v3/jobs/uniswap_v3_token_job.py
similarity index 88%
rename from indexer/modules/custom/uniswap_v3/uniswap_v3_token_job.py
rename to hemera_udf/uniswap_v3/jobs/uniswap_v3_token_job.py
index c77584598..4d08b0c55 100644
--- a/indexer/modules/custom/uniswap_v3/uniswap_v3_token_job.py
+++ b/hemera_udf/uniswap_v3/jobs/uniswap_v3_token_job.py
@@ -1,23 +1,23 @@
import logging
from collections import defaultdict
-from common.utils.format_utils import bytes_to_hex_str, to_int_or_none
-from common.utils.web3_utils import ZERO_ADDRESS
-from indexer.domain.log import Log
-from indexer.domain.token_transfer import ERC721TokenTransfer
-from indexer.jobs import FilterTransactionDataJob
-from indexer.modules.custom.uniswap_v3.domains.feature_uniswap_v3 import (
+from hemera.common.utils.format_utils import bytes_to_hex_str, to_int_or_none
+from hemera.common.utils.web3_utils import ZERO_ADDRESS
+from hemera.indexer.domains.log import Log
+from hemera.indexer.domains.token_transfer import ERC721TokenTransfer
+from hemera.indexer.jobs import FilterTransactionDataJob
+from hemera.indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera.indexer.utils.multicall_hemera import Call
+from hemera.indexer.utils.multicall_hemera.multi_call_helper import MultiCallHelper
+from hemera_udf.uniswap_v3.domains.feature_uniswap_v3 import (
UniswapV3PoolFromToken,
UniswapV3Token,
UniswapV3TokenCurrentStatus,
UniswapV3TokenDetail,
)
-from indexer.modules.custom.uniswap_v3.models.feature_uniswap_v3_pools import UniswapV3Pools
-from indexer.modules.custom.uniswap_v3.models.feature_uniswap_v3_tokens import UniswapV3Tokens
-from indexer.modules.custom.uniswap_v3.util import AddressManager
-from indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
-from indexer.utils.multicall_hemera import Call
-from indexer.utils.multicall_hemera.multi_call_helper import MultiCallHelper
+from hemera_udf.uniswap_v3.models.feature_uniswap_v3_pools import UniswapV3Pools
+from hemera_udf.uniswap_v3.models.feature_uniswap_v3_tokens import UniswapV3Tokens
+from hemera_udf.uniswap_v3.util import AddressManager
logger = logging.getLogger(__name__)
@@ -157,7 +157,11 @@ def _process(self, **kwargs):
block_timestamp = owner_call.user_defined_k
positions = positions_call.returns
- token0, token1, tick_lower, tick_upper, liquidity, fee = self.decode_positions_data(
+
+ if not owner_call.returns or not positions_call.returns:
+ continue
+
+ token0, token1, tick_lower, tick_upper, liquidity, fee, tick_spacing = self.decode_positions_data(
position_token_address, positions
)
data_dict = {
@@ -172,6 +176,7 @@ def _process(self, **kwargs):
"tick_upper": tick_upper,
"liquidity": liquidity,
"fee": fee,
+ "tick_spacing": tick_spacing,
}
positions_data_list.append(data_dict)
self.get_pool_address_by_rpc(positions_data_list)
@@ -199,7 +204,7 @@ def _process(self, **kwargs):
continue
if (position_token_address, token_id) not in self._existing_tokens:
- self._existing_tokens.append((position_token_address, token_id))
+ self._existing_tokens.add((position_token_address, token_id))
token = UniswapV3Token(
position_token_address=position_token_address,
token_id=token_id,
@@ -241,6 +246,7 @@ def get_pool_address_by_rpc(self, positions_data_list):
positions_data.get("position_token_address"),
positions_data.get("token0"),
positions_data.get("token1"),
+ # todo: some pools use tick_spacing
positions_data.get("fee"),
)
pool_address = self.existing_pools.get(key)
@@ -259,6 +265,9 @@ def get_pool_address_by_rpc(self, positions_data_list):
parameters = [positions_data.get("token0"), positions_data.get("token1")]
if uniswapv3_type_str in ("uniswapv3", "agni"):
parameters.append(positions_data.get("fee"))
+ elif uniswapv3_type_str == "aerodrome":
+ parameters.append(positions_data.get("tick_spacing"))
+
call = Call(
target=factory_address,
parameters=parameters,
@@ -276,13 +285,18 @@ def get_pool_address_by_rpc(self, positions_data_list):
factory_address = call.target.lower()
position_token_address = self._address_manager.get_position_by_factory(factory_address)
+ type_str = self._address_manager.get_type_str_by_position(position_token_address)
+
parameters = call.parameters
token0 = parameters[0]
token1 = parameters[1]
- if parameters.__len__() == 3:
+
+ tick_spacing = 0
+ fee = 0
+ if type_str in ("uniswapv3", "agni"):
fee = parameters[2]
- else:
- fee = 0
+ elif type_str == "aerodrome":
+ tick_spacing = parameters[3]
uniswap_v_pool_from_token_positions = UniswapV3PoolFromToken(
position_token_address=position_token_address,
@@ -293,7 +307,7 @@ def get_pool_address_by_rpc(self, positions_data_list):
token1_address=token1,
block_number=call.block_number,
block_timestamp=call.user_defined_k,
- tick_spacing=0,
+ tick_spacing=tick_spacing,
)
self._collect_domain(uniswap_v_pool_from_token_positions)
pool = {
@@ -318,7 +332,8 @@ def decode_positions_data(self, position_token_address, positions):
liquidity = positions.get("liquidity")
# some position has no fee, could use 0/-1
fee = positions.get("fee", 0)
- return token0, token1, tick_lower, tick_upper, liquidity, fee
+ tick_spacing = positions.get("tickSpacing", 0)
+ return token0, token1, tick_lower, tick_upper, liquidity, fee, tick_spacing
def get_existing_tokens(self):
session = self._service.get_service_session()
@@ -327,13 +342,12 @@ def get_existing_tokens(self):
).all()
session.close()
- # position_token_address_token_id_pool_address_dict = {
- # (bytes_to_hex_str(t.position_token_address), bytes_to_hex_str(t.token_id)): bytes_to_hex_str(t.pool_address)
- # for t in tokens_orm}
+ position_token_address_token_id_pool_address_dict = set()
- position_token_address_token_id_pool_address_dict = [
- (bytes_to_hex_str(t.position_token_address), t.token_id) for t in tokens_orm
- ]
+ for t in tokens_orm:
+ position_token_address_token_id_pool_address_dict.add(
+ (bytes_to_hex_str(t.position_token_address), t.token_id)
+ )
return position_token_address_token_id_pool_address_dict
diff --git a/indexer/modules/user_ops/domain/__init__.py b/hemera_udf/uniswap_v3/models/__init__.py
similarity index 100%
rename from indexer/modules/user_ops/domain/__init__.py
rename to hemera_udf/uniswap_v3/models/__init__.py
diff --git a/hemera_udf/uniswap_v3/models/af_teahouse_liquidity_current.py b/hemera_udf/uniswap_v3/models/af_teahouse_liquidity_current.py
new file mode 100644
index 000000000..3607820b4
--- /dev/null
+++ b/hemera_udf/uniswap_v3/models/af_teahouse_liquidity_current.py
@@ -0,0 +1,34 @@
+from sqlalchemy import Column, PrimaryKeyConstraint, func, text
+from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, NUMERIC, TIMESTAMP
+
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.uniswap_v3 import TeahouseLiquidityCurrent
+
+
+class AfTeahouseLiquidityCurrent(HemeraModel):
+ __tablename__ = "af_teahouse_liquidity_current"
+ position_token_address = Column(BYTEA, primary_key=True)
+ pool_address = Column(BYTEA, primary_key=True)
+ block_number = Column(BIGINT)
+ block_timestamp = Column(BIGINT)
+
+ liquidity = Column(NUMERIC(100))
+ tick_lower = Column(NUMERIC(100))
+ tick_upper = Column(NUMERIC(100))
+
+ create_time = Column(TIMESTAMP, server_default=func.now())
+ update_time = Column(TIMESTAMP, server_default=func.now())
+ reorg = Column(BOOLEAN, server_default=text("false"))
+
+ __table_args__ = (PrimaryKeyConstraint("position_token_address", "pool_address"),)
+
+ @staticmethod
+ def model_domain_mapping():
+ return [
+ {
+ "domain": TeahouseLiquidityCurrent,
+ "conflict_do_update": True,
+ "update_strategy": "EXCLUDED.block_number > af_teahouse_liquidity_current.block_number",
+ "converter": general_converter,
+ }
+ ]
diff --git a/hemera_udf/uniswap_v3/models/af_teahouse_liquidity_hist.py b/hemera_udf/uniswap_v3/models/af_teahouse_liquidity_hist.py
new file mode 100644
index 000000000..a081df860
--- /dev/null
+++ b/hemera_udf/uniswap_v3/models/af_teahouse_liquidity_hist.py
@@ -0,0 +1,36 @@
+from sqlalchemy import Column, PrimaryKeyConstraint, func, text
+from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, NUMERIC, TIMESTAMP
+
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.uniswap_v3 import TeahouseLiquidityHist
+
+
+class AfTeahouseLiquidityHist(HemeraModel):
+ __tablename__ = "af_teahouse_liquidity_hist"
+ position_token_address = Column(BYTEA, primary_key=True)
+ pool_address = Column(BYTEA, primary_key=True)
+ block_number = Column(BIGINT, primary_key=True)
+ block_timestamp = Column(BIGINT, primary_key=True)
+
+ liquidity = Column(NUMERIC(100))
+ tick_lower = Column(NUMERIC(100))
+ tick_upper = Column(NUMERIC(100))
+
+ create_time = Column(TIMESTAMP, server_default=func.now())
+ update_time = Column(TIMESTAMP, server_default=func.now())
+ reorg = Column(BOOLEAN, server_default=text("false"))
+
+ __table_args__ = (
+ PrimaryKeyConstraint("position_token_address", "pool_address", "block_timestamp", "block_number"),
+ )
+
+ @staticmethod
+ def model_domain_mapping():
+ return [
+ {
+ "domain": TeahouseLiquidityHist,
+ "conflict_do_update": True,
+ "update_strategy": None,
+ "converter": general_converter,
+ }
+ ]
diff --git a/indexer/modules/custom/uniswap_v3/models/feature_uniswap_v3_collect_fee_records.py b/hemera_udf/uniswap_v3/models/feature_uniswap_v3_collect_fee_records.py
similarity index 90%
rename from indexer/modules/custom/uniswap_v3/models/feature_uniswap_v3_collect_fee_records.py
rename to hemera_udf/uniswap_v3/models/feature_uniswap_v3_collect_fee_records.py
index bf0c91d40..ac7e908ef 100644
--- a/indexer/modules/custom/uniswap_v3/models/feature_uniswap_v3_collect_fee_records.py
+++ b/hemera_udf/uniswap_v3/models/feature_uniswap_v3_collect_fee_records.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, Index, PrimaryKeyConstraint, func, text
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, INTEGER, NUMERIC, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.uniswap_v3.domains.feature_uniswap_v3 import AgniV3TokenCollectFee, UniswapV3TokenCollectFee
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.uniswap_v3.domains.feature_uniswap_v3 import AgniV3TokenCollectFee, UniswapV3TokenCollectFee
class UniswapV3CollectFeeRecords(HemeraModel):
@@ -10,7 +10,7 @@ class UniswapV3CollectFeeRecords(HemeraModel):
position_token_address = Column(BYTEA, primary_key=True)
token_id = Column(NUMERIC(100), primary_key=True)
block_number = Column(BIGINT, primary_key=True)
- block_timestamp = Column(BIGINT, primary_key=True)
+ block_timestamp = Column(TIMESTAMP, primary_key=True)
log_index = Column(INTEGER, primary_key=True)
transaction_hash = Column(BYTEA)
owner = Column(BYTEA)
diff --git a/indexer/modules/custom/uniswap_v3/models/feature_uniswap_v3_liquidity_records.py b/hemera_udf/uniswap_v3/models/feature_uniswap_v3_liquidity_records.py
similarity index 89%
rename from indexer/modules/custom/uniswap_v3/models/feature_uniswap_v3_liquidity_records.py
rename to hemera_udf/uniswap_v3/models/feature_uniswap_v3_liquidity_records.py
index 11bf5e7b8..5ea693f30 100644
--- a/indexer/modules/custom/uniswap_v3/models/feature_uniswap_v3_liquidity_records.py
+++ b/hemera_udf/uniswap_v3/models/feature_uniswap_v3_liquidity_records.py
@@ -1,11 +1,8 @@
from sqlalchemy import Column, Index, PrimaryKeyConstraint, func, text
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, INTEGER, NUMERIC, TIMESTAMP, VARCHAR
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.uniswap_v3.domains.feature_uniswap_v3 import (
- AgniV3TokenUpdateLiquidity,
- UniswapV3TokenUpdateLiquidity,
-)
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.uniswap_v3.domains.feature_uniswap_v3 import AgniV3TokenUpdateLiquidity, UniswapV3TokenUpdateLiquidity
class UniswapV3TokenLiquidityRecords(HemeraModel):
@@ -13,7 +10,7 @@ class UniswapV3TokenLiquidityRecords(HemeraModel):
position_token_address = Column(BYTEA, primary_key=True)
token_id = Column(NUMERIC(100), primary_key=True)
block_number = Column(BIGINT, primary_key=True)
- block_timestamp = Column(BIGINT, primary_key=True)
+ block_timestamp = Column(TIMESTAMP, primary_key=True)
log_index = Column(INTEGER, primary_key=True)
transaction_hash = Column(BYTEA)
owner = Column(BYTEA)
diff --git a/indexer/modules/custom/uniswap_v3/models/feature_uniswap_v3_pool_current_prices.py b/hemera_udf/uniswap_v3/models/feature_uniswap_v3_pool_current_prices.py
similarity index 80%
rename from indexer/modules/custom/uniswap_v3/models/feature_uniswap_v3_pool_current_prices.py
rename to hemera_udf/uniswap_v3/models/feature_uniswap_v3_pool_current_prices.py
index 4cc20f59a..79ace3d86 100644
--- a/indexer/modules/custom/uniswap_v3/models/feature_uniswap_v3_pool_current_prices.py
+++ b/hemera_udf/uniswap_v3/models/feature_uniswap_v3_pool_current_prices.py
@@ -1,22 +1,21 @@
from sqlalchemy import Column, func
from sqlalchemy.dialects.postgresql import BIGINT, BYTEA, NUMERIC, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.uniswap_v3.domains.feature_uniswap_v3 import (
- AgniV3PoolCurrentPrice,
- UniswapV3PoolCurrentPrice,
-)
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.uniswap_v3.domains.feature_uniswap_v3 import AgniV3PoolCurrentPrice, UniswapV3PoolCurrentPrice
class UniswapV3PoolCurrentPrices(HemeraModel):
__tablename__ = "af_uniswap_v3_pool_prices_current"
pool_address = Column(BYTEA, primary_key=True)
block_number = Column(BIGINT)
- block_timestamp = Column(BIGINT)
+ block_timestamp = Column(TIMESTAMP)
factory_address = Column(BYTEA)
sqrt_price_x96 = Column(NUMERIC(100))
tick = Column(NUMERIC(100))
+ token0_price = Column(NUMERIC)
+ token1_price = Column(NUMERIC)
create_time = Column(TIMESTAMP, server_default=func.now())
update_time = Column(TIMESTAMP, server_default=func.now())
diff --git a/indexer/modules/custom/uniswap_v3/models/feature_uniswap_v3_pool_prices.py b/hemera_udf/uniswap_v3/models/feature_uniswap_v3_pool_prices.py
similarity index 80%
rename from indexer/modules/custom/uniswap_v3/models/feature_uniswap_v3_pool_prices.py
rename to hemera_udf/uniswap_v3/models/feature_uniswap_v3_pool_prices.py
index 672dda0db..cd53fc943 100644
--- a/indexer/modules/custom/uniswap_v3/models/feature_uniswap_v3_pool_prices.py
+++ b/hemera_udf/uniswap_v3/models/feature_uniswap_v3_pool_prices.py
@@ -1,18 +1,21 @@
from sqlalchemy import Column, PrimaryKeyConstraint, func, text
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, NUMERIC, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.uniswap_v3.domains.feature_uniswap_v3 import AgniV3PoolPrice, UniswapV3PoolPrice
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.uniswap_v3.domains.feature_uniswap_v3 import AgniV3PoolPrice, UniswapV3PoolPrice
class UniswapV3PoolPrices(HemeraModel):
__tablename__ = "af_uniswap_v3_pool_prices_hist"
pool_address = Column(BYTEA, primary_key=True)
block_number = Column(BIGINT, primary_key=True)
- block_timestamp = Column(BIGINT, primary_key=True)
+ block_timestamp = Column(TIMESTAMP, primary_key=True)
sqrt_price_x96 = Column(NUMERIC(100))
tick = Column(NUMERIC(100))
+ token0_price = Column(NUMERIC)
+ token1_price = Column(NUMERIC)
+
factory_address = Column(BYTEA)
create_time = Column(TIMESTAMP, server_default=func.now())
update_time = Column(TIMESTAMP, server_default=func.now())
diff --git a/indexer/modules/custom/uniswap_v3/models/feature_uniswap_v3_pools.py b/hemera_udf/uniswap_v3/models/feature_uniswap_v3_pools.py
similarity index 90%
rename from indexer/modules/custom/uniswap_v3/models/feature_uniswap_v3_pools.py
rename to hemera_udf/uniswap_v3/models/feature_uniswap_v3_pools.py
index b2306423d..078a45e23 100644
--- a/indexer/modules/custom/uniswap_v3/models/feature_uniswap_v3_pools.py
+++ b/hemera_udf/uniswap_v3/models/feature_uniswap_v3_pools.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, PrimaryKeyConstraint, func
from sqlalchemy.dialects.postgresql import BIGINT, BYTEA, NUMERIC, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.uniswap_v3.domains.feature_uniswap_v3 import (
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.uniswap_v3.domains.feature_uniswap_v3 import (
UniswapV3Pool,
UniswapV3PoolFromSwapEvent,
UniswapV3PoolFromToken,
@@ -23,7 +23,7 @@ class UniswapV3Pools(HemeraModel):
tick_spacing = Column(NUMERIC(100))
block_number = Column(BIGINT)
- block_timestamp = Column(BIGINT)
+ block_timestamp = Column(TIMESTAMP)
create_time = Column(TIMESTAMP, server_default=func.now())
update_time = Column(TIMESTAMP, server_default=func.now())
diff --git a/indexer/modules/custom/uniswap_v3/models/feature_uniswap_v3_swap_records.py b/hemera_udf/uniswap_v3/models/feature_uniswap_v3_swap_records.py
similarity index 81%
rename from indexer/modules/custom/uniswap_v3/models/feature_uniswap_v3_swap_records.py
rename to hemera_udf/uniswap_v3/models/feature_uniswap_v3_swap_records.py
index fc8d65a16..4ec08d6c5 100644
--- a/indexer/modules/custom/uniswap_v3/models/feature_uniswap_v3_swap_records.py
+++ b/hemera_udf/uniswap_v3/models/feature_uniswap_v3_swap_records.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, PrimaryKeyConstraint, func, text
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, INTEGER, NUMERIC, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.uniswap_v3.domains.feature_uniswap_v3 import AgniV3SwapEvent, UniswapV3SwapEvent
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.uniswap_v3.domains.feature_uniswap_v3 import AgniV3SwapEvent, UniswapV3SwapEvent
class UniswapV3PoolSwapRecords(HemeraModel):
@@ -11,7 +11,7 @@ class UniswapV3PoolSwapRecords(HemeraModel):
transaction_hash = Column(BYTEA, primary_key=True)
log_index = Column(INTEGER, primary_key=True)
block_number = Column(BIGINT)
- block_timestamp = Column(BIGINT)
+ block_timestamp = Column(TIMESTAMP, primary_key=True)
position_token_address = Column(BYTEA)
transaction_from_address = Column(BYTEA)
sender = Column(BYTEA)
@@ -22,6 +22,9 @@ class UniswapV3PoolSwapRecords(HemeraModel):
sqrt_price_x96 = Column(NUMERIC(100))
amount0 = Column(NUMERIC(100))
amount1 = Column(NUMERIC(100))
+ token0_price = Column(NUMERIC)
+ token1_price = Column(NUMERIC)
+ amount_usd = Column(NUMERIC)
token0_address = Column(BYTEA)
token1_address = Column(BYTEA)
@@ -30,7 +33,7 @@ class UniswapV3PoolSwapRecords(HemeraModel):
update_time = Column(TIMESTAMP, server_default=func.now())
reorg = Column(BOOLEAN, server_default=text("false"))
- __table_args__ = (PrimaryKeyConstraint("pool_address", "transaction_hash", "log_index"),)
+ __table_args__ = (PrimaryKeyConstraint("pool_address", "transaction_hash", "log_index", "block_timestamp"),)
@staticmethod
def model_domain_mapping():
diff --git a/indexer/modules/custom/uniswap_v3/models/feature_uniswap_v3_token_current_status.py b/hemera_udf/uniswap_v3/models/feature_uniswap_v3_token_current_status.py
similarity index 85%
rename from indexer/modules/custom/uniswap_v3/models/feature_uniswap_v3_token_current_status.py
rename to hemera_udf/uniswap_v3/models/feature_uniswap_v3_token_current_status.py
index 1abd471d4..19b5b5d57 100644
--- a/indexer/modules/custom/uniswap_v3/models/feature_uniswap_v3_token_current_status.py
+++ b/hemera_udf/uniswap_v3/models/feature_uniswap_v3_token_current_status.py
@@ -1,9 +1,9 @@
from sqlalchemy import Column, Index, PrimaryKeyConstraint, desc, func
from sqlalchemy.dialects.postgresql import BIGINT, BYTEA, NUMERIC, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.uniswap_v3.domains.feature_uniswap_v3 import AgniV3TokenCurrentStatus
-from indexer.modules.custom.uniswap_v3.domains.feature_uniswap_v3 import (
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.uniswap_v3.domains.feature_uniswap_v3 import AgniV3TokenCurrentStatus
+from hemera_udf.uniswap_v3.domains.feature_uniswap_v3 import (
UniswapV3TokenCurrentStatus as UniswapV3TokenCurrentStatusDomain,
)
@@ -14,7 +14,7 @@ class UniswapV3TokenCurrentStatus(HemeraModel):
position_token_address = Column(BYTEA, primary_key=True)
token_id = Column(NUMERIC(100), primary_key=True)
block_number = Column(BIGINT)
- block_timestamp = Column(BIGINT)
+ block_timestamp = Column(TIMESTAMP)
wallet_address = Column(BYTEA)
pool_address = Column(BYTEA)
liquidity = Column(NUMERIC(100))
diff --git a/indexer/modules/custom/uniswap_v3/models/feature_uniswap_v3_token_details.py b/hemera_udf/uniswap_v3/models/feature_uniswap_v3_token_details.py
similarity index 88%
rename from indexer/modules/custom/uniswap_v3/models/feature_uniswap_v3_token_details.py
rename to hemera_udf/uniswap_v3/models/feature_uniswap_v3_token_details.py
index 67f813488..6e9994711 100644
--- a/indexer/modules/custom/uniswap_v3/models/feature_uniswap_v3_token_details.py
+++ b/hemera_udf/uniswap_v3/models/feature_uniswap_v3_token_details.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, Index, PrimaryKeyConstraint, desc, func, text
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, NUMERIC, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.uniswap_v3.domains.feature_uniswap_v3 import AgniV3TokenDetail, UniswapV3TokenDetail
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.uniswap_v3.domains.feature_uniswap_v3 import AgniV3TokenDetail, UniswapV3TokenDetail
class UniswapV3TokenDetails(HemeraModel):
@@ -11,7 +11,7 @@ class UniswapV3TokenDetails(HemeraModel):
position_token_address = Column(BYTEA, primary_key=True)
token_id = Column(NUMERIC(100), primary_key=True)
block_number = Column(BIGINT, primary_key=True)
- block_timestamp = Column(BIGINT, primary_key=True)
+ block_timestamp = Column(TIMESTAMP, primary_key=True)
wallet_address = Column(BYTEA)
pool_address = Column(BYTEA)
liquidity = Column(NUMERIC(100))
diff --git a/indexer/modules/custom/uniswap_v3/models/feature_uniswap_v3_tokens.py b/hemera_udf/uniswap_v3/models/feature_uniswap_v3_tokens.py
similarity index 86%
rename from indexer/modules/custom/uniswap_v3/models/feature_uniswap_v3_tokens.py
rename to hemera_udf/uniswap_v3/models/feature_uniswap_v3_tokens.py
index 49b72bbdf..66bdbade8 100644
--- a/indexer/modules/custom/uniswap_v3/models/feature_uniswap_v3_tokens.py
+++ b/hemera_udf/uniswap_v3/models/feature_uniswap_v3_tokens.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, Index, PrimaryKeyConstraint, func
from sqlalchemy.dialects.postgresql import BIGINT, BYTEA, NUMERIC, TIMESTAMP
-from common.models import HemeraModel, general_converter
-from indexer.modules.custom.uniswap_v3.domains.feature_uniswap_v3 import AgniV3Token, UniswapV3Token
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.uniswap_v3.domains.feature_uniswap_v3 import AgniV3Token, UniswapV3Token
class UniswapV3Tokens(HemeraModel):
@@ -17,7 +17,7 @@ class UniswapV3Tokens(HemeraModel):
fee = Column(NUMERIC(100))
block_number = Column(BIGINT)
- block_timestamp = Column(BIGINT)
+ block_timestamp = Column(TIMESTAMP)
create_time = Column(TIMESTAMP, server_default=func.now())
update_time = Column(TIMESTAMP, server_default=func.now())
diff --git a/indexer/modules/custom/uniswap_v3/util.py b/hemera_udf/uniswap_v3/util.py
similarity index 71%
rename from indexer/modules/custom/uniswap_v3/util.py
rename to hemera_udf/uniswap_v3/util.py
index e1e5e4d20..390d39814 100644
--- a/indexer/modules/custom/uniswap_v3/util.py
+++ b/hemera_udf/uniswap_v3/util.py
@@ -1,10 +1,9 @@
import logging
-from web3 import Web3
-
-import indexer.modules.custom.uniswap_v3.agni_abi as agni_abi
-import indexer.modules.custom.uniswap_v3.swapsicle_abi as swapsicle_abi
-import indexer.modules.custom.uniswap_v3.uniswapv3_abi as uniswapv3_abi
+import hemera_udf.uniswap_v3.abi.agni_abi as agni_abi
+import hemera_udf.uniswap_v3.abi.swapsicle_abi as swapsicle_abi
+import hemera_udf.uniswap_v3.abi.uniswapv3_abi as uniswapv3_abi
+from hemera_udf.uniswap_v3.abi import aerodrome_abi
logger = logging.getLogger(__name__)
@@ -40,8 +39,8 @@ def __init__(self, jobs):
def _build_mappings(self, jobs):
for job in jobs:
type_str = job.get("type")
- factory_address = job.get("factory address")
- position_token_address = job.get("position_token_address")
+ factory_address = job.get("factory_address").lower()
+ position_token_address = job.get("position_token_address").lower()
if factory_address not in self.factory_address_list:
self.factory_address_list.append(factory_address)
@@ -72,6 +71,7 @@ def _get_abi_module(self, type_str):
"uniswapv3": uniswapv3_abi,
"swapsicle": swapsicle_abi,
"agni": agni_abi,
+ "aerodrome": aerodrome_abi,
}
return abi_mapping.get(type_str)
@@ -94,31 +94,3 @@ def get_abi_by_position(self, position_token_address):
def get_type_str_by_position(self, position_token_address):
entry = self.position_to_factory.get(position_token_address)
return entry["type"] if entry else None
-
-
-# todo: remove
-def build_no_input_method_data(web3, requests, fn, abi_list, contract_address_key="pool_address"):
- parameters = []
-
- for idx, token in enumerate(requests):
- token["request_id"] = idx
- token_data = {
- "request_id": idx,
- "param_to": token[contract_address_key],
- "param_number": hex(token["block_number"]),
- }
- token.update(token_data)
- try:
- # Encode the ABI for the specific token_id
- token["param_data"] = web3.eth.contract(
- address=Web3.to_checksum_address(token[contract_address_key]), abi=abi_list
- ).encodeABI(fn_name=fn)
- except Exception as e:
- logger.error(
- f"Encoding for function {fn} failed. "
- f"Contract address: {token[contract_address_key]}. "
- f"Exception: {e}."
- )
-
- parameters.append(token)
- return parameters
diff --git a/hemera_udf/uniswap_v4/CHANGELOG.md b/hemera_udf/uniswap_v4/CHANGELOG.md
new file mode 100644
index 000000000..8595a7b45
--- /dev/null
+++ b/hemera_udf/uniswap_v4/CHANGELOG.md
@@ -0,0 +1,14 @@
+# Changelog
+
+All notable changes to this project will be documented in this file.
+
+The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
+and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
+
+## [0.1.0] - 2023-06-01
+
+### Added
+- Initial implementation of Uniswap v4 indexer
+- Support for hooks in Uniswap v4
+- Pool creation and hook event tracking
+- Database models for Uniswap v4 entities
\ No newline at end of file
diff --git a/hemera_udf/uniswap_v4/__init__.py b/hemera_udf/uniswap_v4/__init__.py
new file mode 100644
index 000000000..dcd0c7ac6
--- /dev/null
+++ b/hemera_udf/uniswap_v4/__init__.py
@@ -0,0 +1,28 @@
+from __future__ import annotations
+
+import packaging.version
+
+from hemera import __version__ as hemera_version
+
+__all__ = ["__version__"]
+
+__version__ = "0.1.0"
+
+from hemera.common.enumeration.entity_type import DynamicEntityTypeRegistry
+from hemera_udf.uniswap_v4.domains.feature_uniswap_v4 import *
+
+if packaging.version.parse(packaging.version.parse(hemera_version).base_version) < packaging.version.parse("1.0.0"):
+ raise RuntimeError(f"The package `hemera-modules-uniswap-v4:{__version__}` needs Hemera 1.0.0+")
+
+
+value = DynamicEntityTypeRegistry.register("UNISWAP_V4")
+DynamicEntityTypeRegistry.register_output_types(
+ value,
+ {
+ UniswapV4Pool,
+ UniswapV4PoolPrice,
+ UniswapV4PoolCurrentPrice,
+ UniswapV4SwapEvent,
+ UniswapV4Hook,
+ },
+)
diff --git a/hemera_udf/uniswap_v4/abi/__init__.py b/hemera_udf/uniswap_v4/abi/__init__.py
new file mode 100644
index 000000000..de9ee2d45
--- /dev/null
+++ b/hemera_udf/uniswap_v4/abi/__init__.py
@@ -0,0 +1 @@
+from hemera_udf.uniswap_v4.abi import uniswapv4_abi
diff --git a/hemera_udf/uniswap_v4/abi/uniswapv4_abi.py b/hemera_udf/uniswap_v4/abi/uniswapv4_abi.py
new file mode 100644
index 000000000..e5819a585
--- /dev/null
+++ b/hemera_udf/uniswap_v4/abi/uniswapv4_abi.py
@@ -0,0 +1,309 @@
+from hemera.common.utils.abi_code_utils import Event, Function
+
+# In v4, ADDRESS_ZERO is used to represent native ETH
+ETH_ADDRESS = "0x0000000000000000000000000000000000000000"
+
+POSITIONS_FUNCTION = Function(
+ {
+ "inputs": [{"internalType": "uint256", "name": "tokenId", "type": "uint256"}],
+ "name": "positions",
+ "outputs": [
+ {"internalType": "uint96", "name": "nonce", "type": "uint96"},
+ {"internalType": "address", "name": "operator", "type": "address"},
+ {"internalType": "address", "name": "token0", "type": "address"},
+ {"internalType": "address", "name": "token1", "type": "address"},
+ {"internalType": "uint24", "name": "fee", "type": "uint24"},
+ {"internalType": "int24", "name": "tickLower", "type": "int24"},
+ {"internalType": "int24", "name": "tickUpper", "type": "int24"},
+ {"internalType": "uint128", "name": "liquidity", "type": "uint128"},
+ {"internalType": "uint256", "name": "feeGrowthInside0LastX128", "type": "uint256"},
+ {"internalType": "uint256", "name": "feeGrowthInside1LastX128", "type": "uint256"},
+ {"internalType": "uint128", "name": "tokensOwed0", "type": "uint128"},
+ {"internalType": "uint128", "name": "tokensOwed1", "type": "uint128"},
+ ],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+
+CURRENCY_IS_NATIVE_FUNCTION = Function(
+ {
+ "inputs": [{"internalType": "Currency", "name": "currency", "type": "address"}],
+ "name": "isNative",
+ "outputs": [{"internalType": "bool", "name": "", "type": "bool"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+
+CURRENCY_WRAP_FUNCTION = Function(
+ {
+ "inputs": [{"internalType": "address", "name": "currency", "type": "address"}],
+ "name": "wrap",
+ "outputs": [{"internalType": "Currency", "name": "", "type": "address"}],
+ "stateMutability": "pure",
+ "type": "function",
+ }
+)
+
+SETTLE_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "settle",
+ "outputs": [{"internalType": "uint256", "name": "paid", "type": "uint256"}],
+ "stateMutability": "payable",
+ "type": "function",
+ }
+)
+
+TAKE_FUNCTION = Function(
+ {
+ "inputs": [
+ {"internalType": "Currency", "name": "currency", "type": "address"},
+ {"internalType": "address", "name": "to", "type": "address"},
+ {"internalType": "uint256", "name": "amount", "type": "uint256"},
+ ],
+ "name": "take",
+ "outputs": [],
+ "stateMutability": "nonpayable",
+ "type": "function",
+ }
+)
+
+UNLOCK_FUNCTION = Function(
+ {
+ "inputs": [{"internalType": "bytes", "name": "data", "type": "bytes"}],
+ "name": "unlock",
+ "outputs": [{"internalType": "bytes", "name": "", "type": "bytes"}],
+ "stateMutability": "nonpayable",
+ "type": "function",
+ }
+)
+
+INITIALIZE_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "bytes32", "name": "id", "type": "bytes32"},
+ {"indexed": True, "internalType": "address", "name": "currency0", "type": "address"},
+ {"indexed": True, "internalType": "address", "name": "currency1", "type": "address"},
+ {"indexed": False, "internalType": "uint24", "name": "fee", "type": "uint24"},
+ {"indexed": False, "internalType": "int24", "name": "tickSpacing", "type": "int24"},
+ {"indexed": False, "internalType": "address", "name": "hooks", "type": "address"},
+ {"indexed": False, "internalType": "uint160", "name": "sqrtPriceX96", "type": "uint160"},
+ {"indexed": False, "internalType": "int24", "name": "tick", "type": "int24"},
+ ],
+ "name": "Initialize",
+ "type": "event",
+ }
+)
+
+CREATE_POOL_FUNCTION = Function(
+ {
+ "inputs": [
+ {"internalType": "address", "name": "tokenA", "type": "address"},
+ {"internalType": "address", "name": "tokenB", "type": "address"},
+ {"internalType": "uint24", "name": "fee", "type": "uint24"},
+ {"internalType": "uint160", "name": "sqrtPriceX96", "type": "uint160"},
+ {"internalType": "address[]", "name": "hooks", "type": "address[]"},
+ {"internalType": "bytes[]", "name": "initData", "type": "bytes[]"},
+ ],
+ "name": "createPool",
+ "outputs": [{"internalType": "address", "name": "pool", "type": "address"}],
+ "stateMutability": "nonpayable",
+ "type": "function",
+ }
+)
+
+GET_POOL_FUNCTION = Function(
+ {
+ "inputs": [
+ {"internalType": "address", "name": "", "type": "address"},
+ {"internalType": "address", "name": "", "type": "address"},
+ {"internalType": "uint24", "name": "", "type": "uint24"},
+ ],
+ "name": "getPool",
+ "outputs": [{"internalType": "address", "name": "", "type": "address"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+
+SLOT0_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "slot0",
+ "outputs": [
+ {"internalType": "uint160", "name": "sqrtPriceX96", "type": "uint160"},
+ {"internalType": "int24", "name": "tick", "type": "int24"},
+ {"internalType": "uint16", "name": "observationIndex", "type": "uint16"},
+ {"internalType": "uint16", "name": "observationCardinality", "type": "uint16"},
+ {"internalType": "uint16", "name": "observationCardinalityNext", "type": "uint16"},
+ {"internalType": "uint8", "name": "feeProtocol", "type": "uint8"},
+ {"internalType": "bool", "name": "unlocked", "type": "bool"},
+ ],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+
+POOL_CREATED_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "address", "name": "token0", "type": "address"},
+ {"indexed": True, "internalType": "address", "name": "token1", "type": "address"},
+ {"indexed": True, "internalType": "uint24", "name": "fee", "type": "uint24"},
+ {"indexed": False, "internalType": "int24", "name": "tickSpacing", "type": "int24"},
+ {"indexed": False, "internalType": "address", "name": "pool", "type": "address"},
+ {"indexed": False, "internalType": "address[]", "name": "hooks", "type": "address[]"},
+ ],
+ "name": "PoolCreated",
+ "type": "event",
+ }
+)
+
+SWAP_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "bytes32", "name": "id", "type": "bytes32"},
+ {"indexed": True, "internalType": "address", "name": "sender", "type": "address"},
+ {"indexed": False, "internalType": "int128", "name": "amount0", "type": "int128"},
+ {"indexed": False, "internalType": "int128", "name": "amount1", "type": "int128"},
+ {"indexed": False, "internalType": "uint160", "name": "sqrtPriceX96", "type": "uint160"},
+ {"indexed": False, "internalType": "uint128", "name": "liquidity", "type": "uint128"},
+ {"indexed": False, "internalType": "int24", "name": "tick", "type": "int24"},
+ {"indexed": False, "internalType": "uint24", "name": "fee", "type": "uint24"},
+ ],
+ "name": "Swap",
+ "type": "event",
+ }
+)
+
+MODIFY_LIQUIDITY_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "bytes32", "name": "id", "type": "bytes32"},
+ {"indexed": True, "internalType": "address", "name": "sender", "type": "address"},
+ {"indexed": False, "internalType": "int24", "name": "tickLower", "type": "int24"},
+ {"indexed": False, "internalType": "int24", "name": "tickUpper", "type": "int24"},
+ {"indexed": False, "internalType": "int256", "name": "liquidityDelta", "type": "int256"},
+ {"indexed": False, "internalType": "bytes32", "name": "salt", "type": "bytes32"},
+ ],
+ "name": "ModifyLiquidity",
+ "type": "event",
+ }
+)
+
+OWNER_OF_FUNCTION = Function(
+ {
+ "inputs": [{"internalType": "uint256", "name": "tokenId", "type": "uint256"}],
+ "name": "ownerOf",
+ "outputs": [{"internalType": "address", "name": "owner", "type": "address"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+
+FACTORY_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "factory",
+ "outputs": [{"internalType": "address", "name": "", "type": "address"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+
+FEE_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "fee",
+ "outputs": [{"internalType": "uint24", "name": "", "type": "uint24"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+
+TOKEN0_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "token0",
+ "outputs": [{"internalType": "address", "name": "", "type": "address"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+
+TOKEN1_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "token1",
+ "outputs": [{"internalType": "address", "name": "", "type": "address"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+
+TICK_SPACING_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "tickSpacing",
+ "outputs": [{"internalType": "int24", "name": "", "type": "int24"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+
+HOOKS_FUNCTION = Function(
+ {
+ "inputs": [],
+ "name": "getHooks",
+ "outputs": [{"internalType": "address", "name": "", "type": "address"}],
+ "stateMutability": "view",
+ "type": "function",
+ }
+)
+
+DONATE_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "bytes32", "name": "id", "type": "bytes32"},
+ {"indexed": True, "internalType": "address", "name": "sender", "type": "address"},
+ {"indexed": False, "internalType": "uint256", "name": "amount0", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "amount1", "type": "uint256"},
+ ],
+ "name": "Donate",
+ "type": "event",
+ }
+)
+
+
+BEFORE_INITIALIZE_SELECTOR = "0x60180975"
+AFTER_INITIALIZE_SELECTOR = "0x58c0bbb5"
+BEFORE_ADD_LIQUIDITY_SELECTOR = "0x826309f2"
+AFTER_ADD_LIQUIDITY_SELECTOR = "0x32553a29"
+BEFORE_REMOVE_LIQUIDITY_SELECTOR = "0xb58be774"
+AFTER_REMOVE_LIQUIDITY_SELECTOR = "0x571b684a"
+BEFORE_SWAP_SELECTOR = "0x00e91414"
+AFTER_SWAP_SELECTOR = "0x59b0f6e5"
+BEFORE_DONATE_SELECTOR = "0x7fec8241"
+AFTER_DONATE_SELECTOR = "0x5896b6e8"
+
+
+HOOK_CALLED_EVENT = Event(
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "address", "name": "hook", "type": "address"},
+ {"indexed": True, "internalType": "address", "name": "caller", "type": "address"},
+ {"indexed": False, "internalType": "bytes4", "name": "selector", "type": "bytes4"},
+ {"indexed": False, "internalType": "bytes", "name": "data", "type": "bytes"},
+ ],
+ "name": "HookCalled",
+ "type": "event",
+ }
+)
diff --git a/indexer/modules/user_ops/models/__init__.py b/hemera_udf/uniswap_v4/domains/__init__.py
similarity index 100%
rename from indexer/modules/user_ops/models/__init__.py
rename to hemera_udf/uniswap_v4/domains/__init__.py
diff --git a/hemera_udf/uniswap_v4/domains/feature_uniswap_v4.py b/hemera_udf/uniswap_v4/domains/feature_uniswap_v4.py
new file mode 100644
index 000000000..ba2121fdd
--- /dev/null
+++ b/hemera_udf/uniswap_v4/domains/feature_uniswap_v4.py
@@ -0,0 +1,76 @@
+from dataclasses import dataclass
+
+from hemera.indexer.domains import Domain
+
+
+@dataclass
+class UniswapV4Pool(Domain):
+ position_token_address: str
+ factory_address: str
+ pool_address: str
+ token0_address: str
+ token1_address: str
+ fee: int
+ tick_spacing: int
+ block_number: int
+ block_timestamp: int
+ hook_address: str
+
+
+@dataclass
+class UniswapV4PoolPrice(Domain):
+ factory_address: str
+ pool_address: str
+ sqrt_price_x96: int
+ tick: int
+ token0_price: float
+ token1_price: float
+ block_number: int
+ block_timestamp: int
+
+
+@dataclass
+class UniswapV4PoolCurrentPrice(Domain):
+ factory_address: str
+ pool_address: str
+ sqrt_price_x96: int
+ tick: int
+ token0_price: float
+ token1_price: float
+ block_number: int
+ block_timestamp: int
+
+
+@dataclass
+class UniswapV4SwapEvent(Domain):
+ pool_address: str
+ position_token_address: str
+ transaction_from_address: str
+ sender: str
+ recipient: str
+ amount0: int
+ amount1: int
+ token0_price: float
+ token1_price: float
+ amount_usd: float
+ liquidity: int
+ tick: int
+ sqrt_price_x96: int
+ token0_address: str
+ token1_address: str
+ transaction_hash: str
+ log_index: int
+ block_number: int
+ block_timestamp: int
+ hook_data: str = None # JSON string of hook-related data
+
+
+@dataclass
+class UniswapV4Hook(Domain):
+ hook_address: str
+ factory_address: str
+ pool_address: str
+ hook_type: str # e.g., "fee", "dynamic_fee", "limit_order", etc.
+ hook_data: str # JSON string of hook-specific data
+ block_number: int
+ block_timestamp: int
diff --git a/indexer/tests/bridge/__init__.py b/hemera_udf/uniswap_v4/endpoints/__init__.py
similarity index 100%
rename from indexer/tests/bridge/__init__.py
rename to hemera_udf/uniswap_v4/endpoints/__init__.py
diff --git a/indexer/tests/bridge/bedrock/__init__.py b/hemera_udf/uniswap_v4/jobs/__init__.py
similarity index 100%
rename from indexer/tests/bridge/bedrock/__init__.py
rename to hemera_udf/uniswap_v4/jobs/__init__.py
diff --git a/hemera_udf/uniswap_v4/jobs/uniswap_v4_pool_job.py b/hemera_udf/uniswap_v4/jobs/uniswap_v4_pool_job.py
new file mode 100644
index 000000000..02e57764f
--- /dev/null
+++ b/hemera_udf/uniswap_v4/jobs/uniswap_v4_pool_job.py
@@ -0,0 +1,117 @@
+import json
+import logging
+
+import hemera_udf.uniswap_v4.abi.uniswapv4_abi as uniswapv4_abi
+from hemera.common.utils.format_utils import bytes_to_hex_str
+from hemera.indexer.domains.log import Log
+from hemera.indexer.jobs import FilterTransactionDataJob
+from hemera.indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera_udf.uniswap_v4.domains.feature_uniswap_v4 import UniswapV4Hook, UniswapV4Pool
+from hemera_udf.uniswap_v4.models.feature_uniswap_v4_pools import UniswapV4Pools
+from hemera_udf.uniswap_v4.util import AddressManager
+
+logger = logging.getLogger(__name__)
+
+
+class ExportUniSwapV4PoolJob(FilterTransactionDataJob):
+ dependency_types = [Log]
+ output_types = [UniswapV4Pool, UniswapV4Hook]
+ able_to_reorg = True
+
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+ self._service = kwargs["config"].get("db_service")
+ config = kwargs["config"]["uniswap_v4_job"]
+ self._address_manager = AddressManager(config.get("jobs", []))
+
+ def get_filter(self):
+ return TransactionFilterByLogs(
+ [
+ TopicSpecification(
+ topics=[
+ abi_module.INITIALIZE_EVENT.get_signature()
+ for abi_module in self._address_manager.abi_modules_list
+ ],
+ addresses=self._address_manager.factory_address_list,
+ ),
+ ]
+ )
+
+ def _process(self, **kwargs):
+ self.get_pools()
+
+ def get_pools(self):
+ processed_count = 0
+
+ for log in self._data_buff[Log.type()]:
+ if log.topic0 != uniswapv4_abi.INITIALIZE_EVENT.get_signature():
+ continue
+
+ decoded_data = uniswapv4_abi.INITIALIZE_EVENT.decode_log(log)
+ pool_id = bytes_to_hex_str(decoded_data["id"])
+ if not pool_id:
+ logger.warning(f"Pool ID not found for factory {log.address}, tx hash: {log.tx_hash}")
+ continue
+
+ position_token_address = self._address_manager.get_position_by_factory(log.address)
+ hook_address = decoded_data["hooks"]
+
+ uniswap_v4_pool = UniswapV4Pool(
+ factory_address=log.address,
+ position_token_address=position_token_address,
+ token0_address=decoded_data["currency0"],
+ token1_address=decoded_data["currency1"],
+ fee=decoded_data["fee"],
+ tick_spacing=decoded_data["tickSpacing"],
+ pool_address=pool_id,
+ hook_address=hook_address,
+ block_number=log.block_number,
+ block_timestamp=log.block_timestamp,
+ )
+ self._collect_domain(uniswap_v4_pool)
+ processed_count += 1
+
+ if hook_address and hook_address != "0x0000000000000000000000000000000000000000":
+ hook_type = self.determine_hook_type(hook_address)
+ self._collect_domain(
+ UniswapV4Hook(
+ hook_address=hook_address,
+ factory_address=log.address,
+ pool_address=pool_id,
+ hook_type=hook_type,
+ hook_data=json.dumps({"permissions": hook_type}),
+ block_number=log.block_number,
+ block_timestamp=log.block_timestamp,
+ )
+ )
+
+ logger.info(f"Processed {processed_count} pools")
+
+ def determine_hook_type(self, hook_address):
+ # Convert hook address to integer
+ addr_int = int(hook_address, 16)
+ permissions = []
+
+ permission_flags = {
+ 13: "before_initialize",
+ 12: "after_initialize",
+ 11: "before_add_liquidity",
+ 10: "after_add_liquidity",
+ 9: "before_remove_liquidity",
+ 8: "after_remove_liquidity",
+ 7: "before_swap",
+ 6: "after_swap",
+ 5: "before_donate",
+ 4: "after_donate",
+ }
+
+ for bit, name in permission_flags.items():
+ if addr_int & (1 << bit):
+ permissions.append(name)
+
+ # Common hook types based on permissions
+ if "before_swap" in permissions and "after_swap" in permissions:
+ if addr_int & (1 << 3): # beforeSwapReturnsDelta flag
+ return "fee_hook"
+
+ return "unknown"
diff --git a/hemera_udf/uniswap_v4/jobs/uniswap_v4_pool_price_job.py b/hemera_udf/uniswap_v4/jobs/uniswap_v4_pool_price_job.py
new file mode 100644
index 000000000..b7557e917
--- /dev/null
+++ b/hemera_udf/uniswap_v4/jobs/uniswap_v4_pool_price_job.py
@@ -0,0 +1,200 @@
+import json
+import logging
+
+import hemera_udf.uniswap_v4.abi.uniswapv4_abi as uniswapv4_abi
+from hemera.common.utils.format_utils import bytes_to_hex_str
+from hemera.indexer.domains.transaction import Transaction
+from hemera.indexer.jobs import FilterTransactionDataJob
+from hemera.indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera.indexer.utils.multicall_hemera.multi_call_helper import MultiCallHelper
+from hemera_udf.token_price.domains import BlockTokenPrice
+from hemera_udf.uniswap_v4.domains.feature_uniswap_v4 import (
+ UniswapV4Pool,
+ UniswapV4PoolCurrentPrice,
+ UniswapV4PoolPrice,
+ UniswapV4SwapEvent,
+)
+from hemera_udf.uniswap_v4.models.feature_uniswap_v4_pools import UniswapV4Pools
+from hemera_udf.uniswap_v4.util import AddressManager
+
+logger = logging.getLogger(__name__)
+
+
+class ExportUniSwapV4PoolPriceJob(FilterTransactionDataJob):
+ dependency_types = [Transaction, BlockTokenPrice, UniswapV4Pool]
+ output_types = [UniswapV4PoolPrice, UniswapV4PoolCurrentPrice, UniswapV4SwapEvent]
+ able_to_reorg = True
+
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+ config = kwargs["config"]["uniswap_v4_job"]
+ jobs = config.get("jobs", [])
+ self._pool_address = config.get("pool_address")
+ self._address_manager = AddressManager(jobs)
+
+ # WETH address and native ETH address
+ self.weth_address = config.get("weth_address", "").lower()
+ self.eth_address = uniswapv4_abi.ETH_ADDRESS
+
+ self.multi_call_helper = MultiCallHelper(self._web3, kwargs, logger)
+ self.pools_requested_by_rpc = set()
+ # self.token_decimals_map = {}
+
+ stable_tokens_config = kwargs["config"].get("export_block_token_price_job", {})
+
+ self.stable_tokens = stable_tokens_config
+ self._exist_pools = (
+ self.get_existing_pools()
+ ) # This needs to be populated in real applications, containing token address to decimals mapping
+ self.tokens[self.eth_address] = {**self.tokens.get(self.weth_address, {}), "symbol": "ETH"}
+
+ def get_filter(self):
+ address_list = self._pool_address if self._pool_address else []
+
+ return TransactionFilterByLogs(
+ [
+ TopicSpecification(
+ topics=[uniswapv4_abi.SWAP_EVENT.get_signature()],
+ addresses=address_list,
+ ),
+ ]
+ )
+
+ def change_block_token_prices_to_dict(self):
+ symbol_address_dict = {symbol: address for address, symbol in self.stable_tokens.items()}
+ token_prices_dict = {}
+
+ block_token_prices = self._data_buff[BlockTokenPrice.type()]
+ for token_price in block_token_prices:
+ address = symbol_address_dict.get(token_price.token_symbol)
+ if address:
+ block_number = token_price.block_number
+ token_prices_dict[address, block_number] = token_price.token_price
+
+ return token_prices_dict
+
+ def _process(self, **kwargs):
+ token_prices_dict = self.change_block_token_prices_to_dict()
+
+ pools = self._data_buff[UniswapV4Pool.type()]
+ for pool in pools:
+ self._exist_pools[pool.pool_address.lower()] = {
+ "factory_address": pool.factory_address,
+ "token0_address": pool.token0_address,
+ "token1_address": pool.token1_address,
+ "position_token_address": pool.position_token_address,
+ }
+
+ transactions = self._data_buff["transaction"]
+ current_price_dict = {}
+ price_dict = {}
+
+ for transaction in transactions:
+ logs = transaction.receipt.logs
+ for log in logs:
+ if log.topic0 != uniswapv4_abi.SWAP_EVENT.get_signature():
+ continue
+ decoded_data = uniswapv4_abi.SWAP_EVENT.decode_log(log)
+ pool_id = bytes_to_hex_str(decoded_data["id"]).lower()
+
+ # Check if this pool is in our known pools
+ if pool_id in self._exist_pools:
+ pool_data = self._exist_pools[pool_id].copy()
+ factory_address = pool_data.pop("factory_address")
+ key_data_dict = {
+ "tick": decoded_data["tick"],
+ "sqrt_price_x96": decoded_data["sqrtPriceX96"],
+ "block_number": log.block_number,
+ "block_timestamp": log.block_timestamp,
+ "pool_address": pool_id, # Use pool_id as identifier
+ }
+
+ token0_address = pool_data.get("token0_address")
+ token1_address = pool_data.get("token1_address")
+
+ tokens0 = self.tokens.get(token0_address)
+ tokens1 = self.tokens.get(token1_address)
+
+ decimals0 = tokens0.get("decimals") if tokens0 else None
+ decimals1 = tokens1.get("decimals") if tokens1 else None
+
+ amount0 = decoded_data["amount0"]
+ amount1 = decoded_data["amount1"]
+
+ amount0_abs = abs(amount0)
+ amount1_abs = abs(amount1)
+
+ decimals_conditions = decimals0 and decimals1
+
+ # Price calculation logic, similar to v3
+ if token0_address in self.stable_tokens and decimals_conditions:
+ token0_price = token_prices_dict.get((token0_address, log.block_number))
+ amount_usd = amount0_abs / 10**decimals0 * token0_price if token0_price else None
+ token1_price = (
+ amount_usd / (amount1_abs / 10**decimals1) if amount1_abs > 0 and amount_usd else None
+ )
+ elif token1_address in self.stable_tokens and decimals_conditions:
+ token1_price = token_prices_dict.get((token1_address, log.block_number))
+ amount_usd = amount1_abs / 10**decimals1 * token1_price if token1_price else None
+ token0_price = (
+ amount_usd / (amount0_abs / 10**decimals0) if amount0_abs > 0 and amount_usd else None
+ )
+ else:
+ token0_price = None
+ token1_price = None
+ amount_usd = None
+
+ # Create price record
+ pool_price_item = UniswapV4PoolPrice(
+ **key_data_dict,
+ factory_address=factory_address,
+ token0_price=token0_price,
+ token1_price=token1_price,
+ )
+ price_dict[pool_id, log.block_number] = pool_price_item
+ current_price_dict[pool_id] = UniswapV4PoolCurrentPrice(**vars(pool_price_item))
+
+ # Create swap event record
+ self._collect_domain(
+ UniswapV4SwapEvent(
+ transaction_hash=log.transaction_hash,
+ transaction_from_address=transaction.from_address,
+ log_index=log.log_index,
+ sender=decoded_data["sender"],
+ recipient=None, # v4 SWAP event doesn't include recipient
+ amount0=amount0,
+ amount1=amount1,
+ liquidity=decoded_data["liquidity"],
+ **key_data_dict,
+ **pool_data,
+ token0_price=token0_price,
+ token1_price=token1_price,
+ amount_usd=amount_usd,
+ hook_data=None, # May need to process hookData here
+ ),
+ )
+
+ # Collect all price records
+ self._collect_domains(price_dict.values())
+ self._collect_domains(list(current_price_dict.values()))
+
+ def get_existing_pools(self):
+ session = self._service.Session()
+ try:
+ pools_orm = session.query(UniswapV4Pools).all()
+ existing_pools = {
+ bytes_to_hex_str(p.pool_address).lower(): {
+ "token0_address": bytes_to_hex_str(p.token0_address),
+ "token1_address": bytes_to_hex_str(p.token1_address),
+ "position_token_address": bytes_to_hex_str(p.position_token_address),
+ "factory_address": bytes_to_hex_str(p.factory_address),
+ }
+ for p in pools_orm
+ }
+ except Exception as e:
+ logger.error(f"Failed to get existing pools: {e}")
+ existing_pools = {}
+ finally:
+ session.close()
+
+ return existing_pools
diff --git a/indexer/tests/bridge/bedrock/parser/function_parser/__init__.py b/hemera_udf/uniswap_v4/models/__init__.py
similarity index 100%
rename from indexer/tests/bridge/bedrock/parser/function_parser/__init__.py
rename to hemera_udf/uniswap_v4/models/__init__.py
diff --git a/hemera_udf/uniswap_v4/models/feature_uniswap_v4_pool_current_prices.py b/hemera_udf/uniswap_v4/models/feature_uniswap_v4_pool_current_prices.py
new file mode 100644
index 000000000..da414bcaa
--- /dev/null
+++ b/hemera_udf/uniswap_v4/models/feature_uniswap_v4_pool_current_prices.py
@@ -0,0 +1,37 @@
+from sqlalchemy import Column, PrimaryKeyConstraint, func
+from sqlalchemy.dialects.postgresql import BIGINT, BYTEA, NUMERIC, TIMESTAMP
+
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.uniswap_v4.domains.feature_uniswap_v4 import UniswapV4PoolCurrentPrice
+
+
+class UniswapV4PoolCurrentPrices(HemeraModel):
+ __tablename__ = "af_uniswap_v4_pool_current_prices"
+ pool_address = Column(BYTEA, primary_key=True)
+
+ factory_address = Column(BYTEA)
+
+ sqrt_price_x96 = Column(NUMERIC(100))
+ tick = Column(NUMERIC(100))
+
+ token0_price = Column(NUMERIC(100))
+ token1_price = Column(NUMERIC(100))
+
+ block_number = Column(BIGINT)
+ block_timestamp = Column(TIMESTAMP)
+
+ create_time = Column(TIMESTAMP, server_default=func.now())
+ update_time = Column(TIMESTAMP, server_default=func.now())
+
+ __table_args__ = (PrimaryKeyConstraint("pool_address"),)
+
+ @staticmethod
+ def model_domain_mapping():
+ return [
+ {
+ "domain": UniswapV4PoolCurrentPrice,
+ "conflict_do_update": True,
+ "update_strategy": None,
+ "converter": general_converter,
+ },
+ ]
diff --git a/hemera_udf/uniswap_v4/models/feature_uniswap_v4_pool_prices.py b/hemera_udf/uniswap_v4/models/feature_uniswap_v4_pool_prices.py
new file mode 100644
index 000000000..424ecdb25
--- /dev/null
+++ b/hemera_udf/uniswap_v4/models/feature_uniswap_v4_pool_prices.py
@@ -0,0 +1,38 @@
+from sqlalchemy import Column, PrimaryKeyConstraint, func
+from sqlalchemy.dialects.postgresql import BIGINT, BYTEA, NUMERIC, TIMESTAMP
+
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.uniswap_v4.domains.feature_uniswap_v4 import UniswapV4PoolPrice
+
+
+class UniswapV4PoolPrices(HemeraModel):
+ __tablename__ = "af_uniswap_v4_pool_prices"
+ pool_address = Column(BYTEA, primary_key=True)
+ block_number = Column(BIGINT, primary_key=True)
+ block_timestamp = Column(TIMESTAMP, primary_key=True)
+
+ factory_address = Column(BYTEA)
+
+ sqrt_price_x96 = Column(NUMERIC(100))
+ tick = Column(NUMERIC(100))
+
+ token0_price = Column(NUMERIC(100))
+ token1_price = Column(NUMERIC(100))
+
+
+
+ create_time = Column(TIMESTAMP, server_default=func.now())
+ update_time = Column(TIMESTAMP, server_default=func.now())
+
+ __table_args__ = (PrimaryKeyConstraint("pool_address", "block_number", "block_timestamp"),)
+
+ @staticmethod
+ def model_domain_mapping():
+ return [
+ {
+ "domain": UniswapV4PoolPrice,
+ "conflict_do_update": True,
+ "update_strategy": None,
+ "converter": general_converter,
+ },
+ ]
diff --git a/hemera_udf/uniswap_v4/models/feature_uniswap_v4_pools.py b/hemera_udf/uniswap_v4/models/feature_uniswap_v4_pools.py
new file mode 100644
index 000000000..393219b5e
--- /dev/null
+++ b/hemera_udf/uniswap_v4/models/feature_uniswap_v4_pools.py
@@ -0,0 +1,39 @@
+from sqlalchemy import Column, PrimaryKeyConstraint, func
+from sqlalchemy.dialects.postgresql import BIGINT, BYTEA, NUMERIC, TEXT, TIMESTAMP
+
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.uniswap_v4.domains.feature_uniswap_v4 import UniswapV4Pool
+
+
+class UniswapV4Pools(HemeraModel):
+ __tablename__ = "af_uniswap_v4_pools"
+ position_token_address = Column(BYTEA, primary_key=True)
+ pool_address = Column(BYTEA, primary_key=True)
+
+ factory_address = Column(BYTEA)
+
+ token0_address = Column(BYTEA)
+ token1_address = Column(BYTEA)
+ fee = Column(NUMERIC(100))
+
+ tick_spacing = Column(NUMERIC(100))
+ hook_address = Column(BYTEA)
+
+ block_number = Column(BIGINT)
+ block_timestamp = Column(TIMESTAMP)
+
+ create_time = Column(TIMESTAMP, server_default=func.now())
+ update_time = Column(TIMESTAMP, server_default=func.now())
+
+ __table_args__ = (PrimaryKeyConstraint("position_token_address", "pool_address"),)
+
+ @staticmethod
+ def model_domain_mapping():
+ return [
+ {
+ "domain": UniswapV4Pool,
+ "conflict_do_update": True,
+ "update_strategy": None,
+ "converter": general_converter,
+ },
+ ]
diff --git a/hemera_udf/uniswap_v4/models/feature_uniswap_v4_swap_records.py b/hemera_udf/uniswap_v4/models/feature_uniswap_v4_swap_records.py
new file mode 100644
index 000000000..ce5705f63
--- /dev/null
+++ b/hemera_udf/uniswap_v4/models/feature_uniswap_v4_swap_records.py
@@ -0,0 +1,50 @@
+from sqlalchemy import Column, PrimaryKeyConstraint, func
+from sqlalchemy.dialects.postgresql import BIGINT, BYTEA, NUMERIC, TEXT, TIMESTAMP
+
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.uniswap_v4.domains.feature_uniswap_v4 import UniswapV4SwapEvent
+
+
+class UniswapV4SwapRecords(HemeraModel):
+ __tablename__ = "af_uniswap_v4_swap_hist"
+ pool_address = Column(BYTEA, primary_key=True)
+ transaction_hash = Column(BYTEA, primary_key=True)
+ log_index = Column(BIGINT, primary_key=True)
+
+ position_token_address = Column(BYTEA)
+ transaction_from_address = Column(BYTEA)
+ sender = Column(BYTEA)
+ recipient = Column(BYTEA)
+
+ amount0 = Column(NUMERIC(100))
+ amount1 = Column(NUMERIC(100))
+ token0_price = Column(NUMERIC(100))
+ token1_price = Column(NUMERIC(100))
+ amount_usd = Column(NUMERIC(100))
+
+ liquidity = Column(NUMERIC(100))
+ tick = Column(NUMERIC(100))
+ sqrt_price_x96 = Column(NUMERIC(100))
+
+ token0_address = Column(BYTEA)
+ token1_address = Column(BYTEA)
+ hook_data = Column(TEXT) # JSON string of hook-related data
+
+ block_number = Column(BIGINT)
+ block_timestamp = Column(TIMESTAMP)
+
+ create_time = Column(TIMESTAMP, server_default=func.now())
+ update_time = Column(TIMESTAMP, server_default=func.now())
+
+ __table_args__ = (PrimaryKeyConstraint("pool_address", "transaction_hash", "log_index"),)
+
+ @staticmethod
+ def model_domain_mapping():
+ return [
+ {
+ "domain": UniswapV4SwapEvent,
+ "conflict_do_update": True,
+ "update_strategy": None,
+ "converter": general_converter,
+ },
+ ]
diff --git a/hemera_udf/uniswap_v4/sql/uniswap_v4.sql b/hemera_udf/uniswap_v4/sql/uniswap_v4.sql
new file mode 100644
index 000000000..efd2d5bab
--- /dev/null
+++ b/hemera_udf/uniswap_v4/sql/uniswap_v4.sql
@@ -0,0 +1,160 @@
+CREATE TABLE af_uniswap_v4_pools (
+ position_token_address BYTEA NOT NULL,
+ pool_address BYTEA NOT NULL,
+ factory_address BYTEA,
+ token0_address BYTEA,
+ token1_address BYTEA,
+ fee NUMERIC(100),
+ tick_spacing NUMERIC(100),
+ hook_address BYTEA,
+ block_number BIGINT,
+ block_timestamp TIMESTAMP,
+ create_time TIMESTAMP DEFAULT NOW(),
+ update_time TIMESTAMP DEFAULT NOW(),
+ PRIMARY KEY (position_token_address, pool_address)
+);
+
+CREATE TABLE af_uniswap_v4_hooks (
+ hook_address BYTEA NOT NULL,
+ pool_address BYTEA NOT NULL,
+ factory_address BYTEA,
+ hook_type TEXT, -- e.g., "fee", "dynamic_fee", "limit_order", etc.
+ hook_data TEXT, -- JSON string of hook-specific data
+ block_number BIGINT,
+ block_timestamp TIMESTAMP,
+ create_time TIMESTAMP DEFAULT NOW(),
+ update_time TIMESTAMP DEFAULT NOW(),
+ PRIMARY KEY (hook_address, pool_address)
+);
+
+
+-- Swaps table to track swap events
+CREATE TABLE af_uniswap_v4_swaps (
+ tx_hash BYTEA NOT NULL,
+ log_index BIGINT NOT NULL,
+ pool_address BYTEA NOT NULL,
+ sender_address BYTEA NOT NULL,
+ recipient_address BYTEA NOT NULL,
+ token0_delta NUMERIC(100),
+ token1_delta NUMERIC(100),
+ sqrt_price_x96_before NUMERIC(100),
+ sqrt_price_x96_after NUMERIC(100),
+ liquidity_before NUMERIC(100),
+ liquidity_after NUMERIC(100),
+ tick_before BIGINT,
+ tick_after BIGINT,
+ hook_data TEXT, -- JSON string of hook-specific data for the swap
+ block_number BIGINT,
+ block_timestamp TIMESTAMP,
+ create_time TIMESTAMP DEFAULT NOW(),
+ PRIMARY KEY (tx_hash, log_index)
+);
+
+-- Liquidity changes table to track mint/burn events
+CREATE TABLE af_uniswap_v4_liquidity_changes (
+ tx_hash BYTEA NOT NULL,
+ log_index BIGINT NOT NULL,
+ pool_address BYTEA NOT NULL,
+ position_id BYTEA,
+ sender_address BYTEA NOT NULL,
+ recipient_address BYTEA NOT NULL,
+ lower_tick BIGINT,
+ upper_tick BIGINT,
+ delta_liquidity NUMERIC(100),
+ token0_delta NUMERIC(100),
+ token1_delta NUMERIC(100),
+ event_type TEXT NOT NULL, -- 'mint' or 'burn'
+ block_number BIGINT,
+ block_timestamp TIMESTAMP,
+ create_time TIMESTAMP DEFAULT NOW(),
+ PRIMARY KEY (tx_hash, log_index)
+);
+
+
+
+-- Pool current prices
+CREATE TABLE af_uniswap_v4_pool_current_prices (
+ pool_address BYTEA NOT NULL PRIMARY KEY,
+ token0_address BYTEA NOT NULL,
+ token1_address BYTEA NOT NULL,
+ token0_price NUMERIC(100), -- Price of token0 in terms of token1
+ token1_price NUMERIC(100), -- Price of token1 in terms of token0
+ sqrt_price_x96 NUMERIC(100),
+ tick BIGINT,
+ liquidity NUMERIC(100),
+ token0_price_usd NUMERIC(100),
+ token1_price_usd NUMERIC(100),
+ last_updated_block_number BIGINT,
+ last_updated_timestamp TIMESTAMP,
+ create_time TIMESTAMP DEFAULT NOW(),
+ update_time TIMESTAMP DEFAULT NOW()
+);
+
+-- Pool price history
+CREATE TABLE af_uniswap_v4_pool_prices (
+ pool_address BYTEA NOT NULL,
+ timestamp TIMESTAMP NOT NULL,
+ token0_address BYTEA NOT NULL,
+ token1_address BYTEA NOT NULL,
+ token0_price NUMERIC(100), -- Price of token0 in terms of token1
+ token1_price NUMERIC(100), -- Price of token1 in terms of token0
+ sqrt_price_x96 NUMERIC(100),
+ tick BIGINT,
+ liquidity NUMERIC(100),
+ token0_price_usd NUMERIC(100),
+ token1_price_usd NUMERIC(100),
+ block_number BIGINT,
+ create_time TIMESTAMP DEFAULT NOW(),
+ PRIMARY KEY (pool_address, timestamp)
+);
+
+
+CREATE TABLE af_uniswap_v4_swap_hist(
+ pool_address BYTEA NOT NULL,
+ transaction_hash BYTEA NOT NULL,
+ log_index BIGINT NOT NULL,
+ position_token_address BYTEA,
+ transaction_from_address BYTEA,
+ sender BYTEA,
+ recipient BYTEA,
+ amount0 NUMERIC(100),
+ amount1 NUMERIC(100),
+ token0_price NUMERIC(100),
+ token1_price NUMERIC(100),
+ amount_usd NUMERIC(100),
+ liquidity NUMERIC(100),
+ tick NUMERIC(100),
+ sqrt_price_x96 NUMERIC(100),
+ token0_address BYTEA,
+ token1_address BYTEA,
+ hook_data TEXT, -- JSON string of hook-related data
+ block_number BIGINT,
+ block_timestamp TIMESTAMP,
+ create_time TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ update_time TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ PRIMARY KEY (pool_address, transaction_hash, log_index)
+);
+
+CREATE INDEX idx_af_uniswap_v4_pools_pool_address ON af_uniswap_v4_pools(pool_address);
+CREATE INDEX idx_af_uniswap_v4_pools_factory_address ON af_uniswap_v4_pools(factory_address);
+CREATE INDEX idx_af_uniswap_v4_pools_token0_address ON af_uniswap_v4_pools(token0_address);
+CREATE INDEX idx_af_uniswap_v4_pools_token1_address ON af_uniswap_v4_pools(token1_address);
+
+CREATE INDEX idx_af_uniswap_v4_hooks_hook_address ON af_uniswap_v4_hooks(hook_address);
+CREATE INDEX idx_af_uniswap_v4_hooks_factory_address ON af_uniswap_v4_hooks(factory_address);
+
+-- New indices for additional tables
+
+CREATE INDEX idx_af_uniswap_v4_swaps_pool_address ON af_uniswap_v4_swaps(pool_address);
+CREATE INDEX idx_af_uniswap_v4_swaps_block_timestamp ON af_uniswap_v4_swaps(block_timestamp);
+
+
+-- Indices for pool price tables
+CREATE INDEX idx_af_uniswap_v4_pool_prices_timestamp ON af_uniswap_v4_pool_prices(timestamp);
+CREATE INDEX idx_af_uniswap_v4_pool_prices_token0_address ON af_uniswap_v4_pool_prices(token0_address);
+CREATE INDEX idx_af_uniswap_v4_pool_prices_token1_address ON af_uniswap_v4_pool_prices(token1_address);
+
+-- Indices for ETH swap history
+CREATE INDEX idx_af_uniswap_v4_swap_hist_pool_address ON af_uniswap_v4_swap_hist(pool_address);
+CREATE INDEX idx_af_uniswap_v4_swap_hist_token_address ON af_uniswap_v4_swap_hist(token_address);
+CREATE INDEX idx_af_uniswap_v4_swap_hist_block_timestamp ON af_uniswap_v4_swap_hist(block_timestamp);
diff --git a/hemera_udf/uniswap_v4/util.py b/hemera_udf/uniswap_v4/util.py
new file mode 100644
index 000000000..9228d581f
--- /dev/null
+++ b/hemera_udf/uniswap_v4/util.py
@@ -0,0 +1,80 @@
+import logging
+
+import hemera_udf.uniswap_v4.abi.uniswapv4_abi as uniswapv4_abi
+
+logger = logging.getLogger(__name__)
+
+
+class AddressManager:
+ def __init__(self, jobs):
+ # for filter topics
+ self.abi_modules_list = []
+ self.factory_address_list = []
+ self.position_token_address_list = []
+
+ self.factory_to_position = {}
+ self.position_to_factory = {}
+
+ self._build_mappings(jobs)
+
+ def _build_mappings(self, jobs):
+ for job in jobs:
+ type_str = job.get("type")
+ factory_address = job.get("factory_address").lower()
+ position_token_address = job.get("position_token_address").lower()
+ state_view_address = job.get("state_view_address", "").lower() if job.get("state_view_address") else None
+
+ if factory_address not in self.factory_address_list:
+ self.factory_address_list.append(factory_address)
+
+ if position_token_address not in self.position_token_address_list:
+ self.position_token_address_list.append(position_token_address)
+
+ abi_module = self._get_abi_module(type_str)
+ if abi_module not in self.abi_modules_list:
+ self.abi_modules_list.append(abi_module)
+
+ if not factory_address or not position_token_address or not abi_module:
+ raise ValueError("Factory address, position token address, and ABI module are required")
+
+ entry = {
+ "position_token_address": position_token_address,
+ "factory_address": factory_address,
+ "type": type_str,
+ "abi_module": abi_module,
+ "state_view_address": state_view_address,
+ }
+
+ self.factory_to_position[factory_address] = entry
+ self.position_to_factory[position_token_address] = entry
+
+ def _get_abi_module(self, type_str):
+ return {"uniswapv4": uniswapv4_abi}.get(type_str)
+
+ def get_position_by_factory(self, factory_address):
+ entry = self.factory_to_position.get(factory_address)
+ return entry.get("position_token_address") if entry else None
+
+ def get_factory_by_position(self, position_token_address):
+ entry = self.position_to_factory.get(position_token_address)
+ return entry.get("factory_address") if entry else None
+
+ def get_abi_by_factory(self, factory_address):
+ entry = self.factory_to_position.get(factory_address)
+ return entry.get("abi_module") if entry else None
+
+ def get_abi_by_position(self, position_token_address):
+ entry = self.position_to_factory.get(position_token_address)
+ return entry.get("abi_module") if entry else None
+
+ def get_type_str_by_position(self, position_token_address):
+ entry = self.position_to_factory.get(position_token_address)
+ return entry.get("type") if entry else None
+
+ def get_state_view_address(self, factory_address):
+ """
+ Get the StateView contract address for a factory.
+ The StateView contract provides utility functions to query pool state.
+ """
+ entry = self.factory_to_position.get(factory_address)
+ return entry.get("state_view_address") if entry else None
diff --git a/hemera_udf/user_ops/__init__.py b/hemera_udf/user_ops/__init__.py
new file mode 100644
index 000000000..7b4eb4e7b
--- /dev/null
+++ b/hemera_udf/user_ops/__init__.py
@@ -0,0 +1,18 @@
+from __future__ import annotations
+
+import packaging.version
+
+from hemera import __version__ as hemera_version
+
+__all__ = ["__version__"]
+
+__version__ = "0.1.0"
+
+from hemera.common.enumeration.entity_type import DynamicEntityTypeRegistry
+from hemera_udf.user_ops.domains import UserOperationsResult
+
+if packaging.version.parse(packaging.version.parse(hemera_version).base_version) < packaging.version.parse("1.0.0"):
+ raise RuntimeError(f"The package `hemera-modules-xxx:{__version__}` needs Hemera 1.0.0+")
+
+value = DynamicEntityTypeRegistry.register("USER_OPS")
+DynamicEntityTypeRegistry.register_output_types(value, {UserOperationsResult})
diff --git a/indexer/modules/user_ops/domain/user_operations.py b/hemera_udf/user_ops/domains.py
similarity index 91%
rename from indexer/modules/user_ops/domain/user_operations.py
rename to hemera_udf/user_ops/domains.py
index f004da559..be042653a 100644
--- a/indexer/modules/user_ops/domain/user_operations.py
+++ b/hemera_udf/user_ops/domains.py
@@ -1,11 +1,11 @@
from dataclasses import dataclass
from typing import Optional
-from indexer.domain import FilterData
+from hemera.indexer.domains import Domain
@dataclass
-class UserOperationsResult(FilterData):
+class UserOperationsResult(Domain):
user_op_hash: str
sender: Optional[str]
paymaster: Optional[str]
diff --git a/hemera_udf/user_ops/jobs/__init__.py b/hemera_udf/user_ops/jobs/__init__.py
new file mode 100644
index 000000000..1d2c6c40f
--- /dev/null
+++ b/hemera_udf/user_ops/jobs/__init__.py
@@ -0,0 +1,6 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time 2024/12/10 17:10
+# @Author will
+# @File __init__.py.py
+# @Brief
diff --git a/indexer/modules/user_ops/export_user_ops_job.py b/hemera_udf/user_ops/jobs/export_user_ops_job.py
similarity index 95%
rename from indexer/modules/user_ops/export_user_ops_job.py
rename to hemera_udf/user_ops/jobs/export_user_ops_job.py
index 94e663572..2c63d0d6c 100644
--- a/indexer/modules/user_ops/export_user_ops_job.py
+++ b/hemera_udf/user_ops/jobs/export_user_ops_job.py
@@ -5,12 +5,12 @@
from web3._utils.normalizers import BASE_RETURN_NORMALIZERS
from web3.types import ABIEvent, ABIFunction
-from common.utils.abi_code_utils import decode_log
-from indexer.domain.log import Log
-from indexer.domain.transaction import Transaction
-from indexer.jobs import FilterTransactionDataJob
-from indexer.modules.user_ops.domain.user_operations import UserOperationsResult
-from indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera.common.utils.abi_code_utils import decode_log
+from hemera.indexer.domains.log import Log
+from hemera.indexer.domains.transaction import Transaction
+from hemera.indexer.jobs import FilterTransactionDataJob
+from hemera.indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
+from hemera_udf.user_ops.domains import UserOperationsResult
CONTRACT_ADDRESS = "0x5ff137d4b0fdcd49dca30c7cf57e578a026d2789"
BEFOREEXECUTION_FUNCTION_SIGN = "0xbb47ee3e183a558b1a2ff0874b079f3fc5478b7454eacf2bfc5af2ff5878f972"
diff --git a/indexer/tests/day_mining/__init__.py b/hemera_udf/user_ops/models/__init__.py
similarity index 100%
rename from indexer/tests/day_mining/__init__.py
rename to hemera_udf/user_ops/models/__init__.py
diff --git a/indexer/modules/user_ops/models/user_operation_results.py b/hemera_udf/user_ops/models/user_operation_results.py
similarity index 91%
rename from indexer/modules/user_ops/models/user_operation_results.py
rename to hemera_udf/user_ops/models/user_operation_results.py
index afd0b3a89..fdd0332da 100644
--- a/indexer/modules/user_ops/models/user_operation_results.py
+++ b/hemera_udf/user_ops/models/user_operation_results.py
@@ -1,8 +1,8 @@
from sqlalchemy import Column, Index
from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, INTEGER, NUMERIC, TIMESTAMP, VARCHAR
-from common.models import HemeraModel, general_converter
-from indexer.modules.user_ops.domain.user_operations import UserOperationsResult
+from hemera.common.models import HemeraModel, general_converter
+from hemera_udf.user_ops.domains import UserOperationsResult
class UserOperationResult(HemeraModel):
diff --git a/indexer/aggr_jobs/aggr_base_job.py b/indexer/aggr_jobs/aggr_base_job.py
deleted file mode 100644
index 6efde17a4..000000000
--- a/indexer/aggr_jobs/aggr_base_job.py
+++ /dev/null
@@ -1,35 +0,0 @@
-import os
-from datetime import datetime, timedelta
-
-
-class AggrBaseJob:
- sql_folder = ""
-
- def run(self, **kwargs):
- pass
-
- def get_sql_content(self, file_name, start_date, end_date):
- base_dir = os.path.dirname(__file__)
- if not file_name.endswith(".sql"):
- file_name += ".sql"
- file_path = os.path.join(base_dir, self.sql_folder, file_name)
-
- with open(file_path, "r") as f:
- sql_template = f.read()
- sql = sql_template.format(start_date=start_date, end_date=end_date)
- return sql
-
- @staticmethod
- def generate_date_pairs(start_date, end_date):
- start_date_obj = datetime.strptime(start_date, "%Y-%m-%d")
- end_date_obj = datetime.strptime(end_date, "%Y-%m-%d")
-
- date_pairs = []
- current_date = start_date_obj
- while current_date < end_date_obj:
- next_date = current_date + timedelta(days=1)
- if next_date <= end_date_obj:
- date_pairs.append((current_date.strftime("%Y-%m-%d"), next_date.strftime("%Y-%m-%d")))
- current_date = next_date
-
- return date_pairs
diff --git a/indexer/aggr_jobs/aggr_job_scheduler.py b/indexer/aggr_jobs/aggr_job_scheduler.py
deleted file mode 100644
index 54b490d8c..000000000
--- a/indexer/aggr_jobs/aggr_job_scheduler.py
+++ /dev/null
@@ -1,26 +0,0 @@
-"""
-This scheduler can handle complex relationship dependencies, etc. The current example shows
-AggrDisorderJob -> AggrOrderJob
-"""
-
-from indexer.aggr_jobs.disorder_jobs.disorder_job import AggrDisorderJob
-from indexer.aggr_jobs.order_jobs.order_job import AggrOrderJob
-
-
-class AggrJobScheduler:
- def __init__(self, config):
- self.config = config
- self.jobs = self.instantiate_jobs()
-
- def run_jobs(self, start_date, end_date):
- for job in self.jobs:
- job.run(start_date=start_date, end_date=end_date)
-
- def instantiate_jobs(self):
- jobs = []
- for job_class in [AggrDisorderJob, AggrOrderJob]:
- job = job_class(
- config=self.config,
- )
- jobs.append(job)
- return jobs
diff --git a/indexer/aggr_jobs/disorder_jobs/daily_feature_holding_balance_uniswap_v3.sql b/indexer/aggr_jobs/disorder_jobs/daily_feature_holding_balance_uniswap_v3.sql
deleted file mode 100644
index 574f46c8e..000000000
--- a/indexer/aggr_jobs/disorder_jobs/daily_feature_holding_balance_uniswap_v3.sql
+++ /dev/null
@@ -1,33 +0,0 @@
-begin;
-delete
-from af_uniswap_v3_token_data_daily
-where block_date >= '{start_date}'
- and block_date < '{end_date}';
-insert into af_uniswap_v3_token_data_daily
-select position_token_address,
- TO_TIMESTAMP(block_timestamp)::DATE as block_date,
- token_id,
- wallet_address,
- pool_address,
- liquidity
-from (select *, row_number() over (partition by nft_address, token_id order by block_timestamp desc) rn
- from af_uniswap_v3_token_data_hist
- where TO_TIMESTAMP(block_timestamp) >= '{start_date}'
- and TO_TIMESTAMP(block_timestamp) < '{end_date}') t
-where rn = 1;
-
-
-delete
-from af_uniswap_v3_pool_prices_daily
-where block_date >= '{start_date}'
- and block_date < '{end_date}';
-insert into af_uniswap_v3_pool_prices_daily
-select pool_address,
- TO_TIMESTAMP(block_timestamp)::DATE as block_date,
- sqrt_price_x96
-from (select *, row_number() over (partition by pool_address order by block_timestamp desc) rn
- from af_uniswap_v3_pool_prices_hist
- where TO_TIMESTAMP(block_timestamp) >= '{start_date}'
- and TO_TIMESTAMP(block_timestamp) < '{end_date}') t
-where rn = 1;
-commit
\ No newline at end of file
diff --git a/indexer/aggr_jobs/disorder_jobs/daily_wallet_addresses_aggregates.sql b/indexer/aggr_jobs/disorder_jobs/daily_wallet_addresses_aggregates.sql
deleted file mode 100644
index 7dfb72b09..000000000
--- a/indexer/aggr_jobs/disorder_jobs/daily_wallet_addresses_aggregates.sql
+++ /dev/null
@@ -1,261 +0,0 @@
-Begin;
--- Handle outgoing transactions including errors
-WITH out_txn AS (SELECT from_address AS address,
- DATE(block_timestamp) AS block_date,
- COUNT(DISTINCT hash) AS txn_out_cnt,
- SUM(value) AS txn_out_value,
- SUM(CASE WHEN receipt_status = 0 THEN 1 ELSE 0 END) AS txn_out_error_cnt
- FROM transactions
- WHERE from_address is not null
- and block_timestamp >= '{start_date}'
- and block_timestamp < '{end_date}'
- GROUP BY from_address, DATE(block_timestamp))
-
-INSERT
-INTO daily_wallet_addresses_aggregates
- (address, block_date, txn_out_cnt, txn_out_value, txn_out_error_cnt)
-SELECT address,
- block_date,
- txn_out_cnt,
- txn_out_value,
- txn_out_error_cnt
-FROM out_txn
-
-ON CONFLICT (address, block_date)
- DO UPDATE SET txn_out_cnt = EXCLUDED.txn_out_cnt,
- txn_out_value = EXCLUDED.txn_out_value,
- txn_out_error_cnt = EXCLUDED.txn_out_error_cnt;
-
--- Handle incoming transactions including errors
-WITH in_txn AS (SELECT to_address AS address,
- DATE(block_timestamp) AS block_date,
- COUNT(DISTINCT hash) AS txn_in_cnt,
- SUM(value) AS txn_in_value,
- SUM(CASE WHEN receipt_status = 0 THEN 1 ELSE 0 END) AS txn_in_error_cnt
- FROM transactions
- WHERE to_address is not null
- and block_timestamp >= '{start_date}'
- and block_timestamp < '{end_date}'
- GROUP BY to_address, DATE(block_timestamp))
-
-INSERT
-INTO daily_wallet_addresses_aggregates
- (address, block_date, txn_in_cnt, txn_in_value, txn_in_error_cnt)
-SELECT address,
- block_date,
- txn_in_cnt,
- txn_in_value,
- txn_in_error_cnt
-FROM in_txn
-
-ON CONFLICT (address, block_date)
- DO UPDATE SET txn_in_cnt = EXCLUDED.txn_in_cnt,
- txn_in_value = EXCLUDED.txn_in_value,
- txn_in_error_cnt = EXCLUDED.txn_in_error_cnt;
-
-
--- Handle self transactions including errors
-WITH self_txn AS (SELECT from_address AS address,
- DATE(block_timestamp) AS block_date,
- COUNT(DISTINCT hash) AS txn_self_cnt,
- SUM(CASE WHEN receipt_status = 0 THEN 1 ELSE 0 END) AS txn_self_error_cnt
- FROM transactions
- WHERE from_address = to_address
- and from_address is not null
- and block_timestamp >= '{start_date}'
- and block_timestamp < '{end_date}'
- GROUP BY from_address, DATE(block_timestamp))
-
-INSERT
-INTO daily_wallet_addresses_aggregates
- (address, block_date, txn_self_cnt, txn_self_error_cnt)
-SELECT address,
- block_date,
- txn_self_cnt,
- txn_self_error_cnt
-FROM self_txn
-
-ON CONFLICT (address, block_date)
- DO UPDATE SET txn_self_cnt = EXCLUDED.txn_self_cnt,
- txn_self_error_cnt = EXCLUDED.txn_self_error_cnt;
-
-WITH erc20_in AS (SELECT to_address AS address,
- DATE(block_timestamp) AS block_date,
- COUNT(1) AS cnt
- FROM erc20_token_transfers
- WHERE block_timestamp >= '{start_date}'
- and block_timestamp < '{end_date}'
- GROUP BY to_address, DATE(block_timestamp))
-
-INSERT
-INTO daily_wallet_addresses_aggregates
- (address, block_date, erc20_transfer_in_cnt)
-SELECT address,
- block_date,
- cnt
-FROM erc20_in
-
-ON CONFLICT (address, block_date)
- DO UPDATE SET erc20_transfer_in_cnt = EXCLUDED.erc20_transfer_in_cnt;
-
-
-WITH erc20_out AS (SELECT from_address AS address,
- DATE(block_timestamp) AS block_date,
- COUNT(1) AS cnt
- FROM erc20_token_transfers
- WHERE block_timestamp >= '{start_date}'
- and block_timestamp < '{end_date}'
- GROUP BY from_address, DATE(block_timestamp))
-
-INSERT
-INTO daily_wallet_addresses_aggregates
- (address, block_date, erc20_transfer_out_cnt)
-SELECT address,
- block_date,
- cnt
-FROM erc20_out
-
-ON CONFLICT (address, block_date)
- DO UPDATE SET erc20_transfer_out_cnt = EXCLUDED.erc20_transfer_out_cnt;
-
-
-WITH erc721_in AS (SELECT to_address AS address,
- DATE(block_timestamp) AS block_date,
- COUNT(1) AS cnt
- FROM erc721_token_transfers
- WHERE block_timestamp >= '{start_date}'
- and block_timestamp < '{end_date}'
- GROUP BY to_address, DATE(block_timestamp))
-
-INSERT
-INTO daily_wallet_addresses_aggregates
- (address, block_date, erc721_transfer_in_cnt)
-SELECT address,
- block_date,
- cnt
-FROM erc721_in
-
-ON CONFLICT (address, block_date)
- DO UPDATE SET erc721_transfer_in_cnt = EXCLUDED.erc721_transfer_in_cnt;
-
-
-WITH erc721_out AS (SELECT from_address AS address,
- DATE(block_timestamp) AS block_date,
- COUNT(1) AS cnt
- FROM erc721_token_transfers
- WHERE block_timestamp >= '{start_date}'
- and block_timestamp < '{end_date}'
- GROUP BY from_address, DATE(block_timestamp))
-
-INSERT
-INTO daily_wallet_addresses_aggregates
- (address, block_date, erc721_transfer_out_cnt)
-SELECT address,
- block_date,
- cnt
-FROM erc721_out
-
-ON CONFLICT (address, block_date)
- DO UPDATE SET erc721_transfer_out_cnt = EXCLUDED.erc721_transfer_out_cnt;
-
-WITH erc1155_in AS (SELECT to_address AS address,
- DATE(block_timestamp) AS block_date,
- COUNT(1) AS cnt
- FROM erc1155_token_transfers
- WHERE block_timestamp >= '{start_date}'
- and block_timestamp < '{end_date}'
- GROUP BY to_address, DATE(block_timestamp))
-
-INSERT
-INTO daily_wallet_addresses_aggregates
- (address, block_date, erc1155_transfer_in_cnt)
-SELECT address,
- block_date,
- cnt
-FROM erc1155_in
-
-ON CONFLICT (address, block_date)
- DO UPDATE SET erc1155_transfer_in_cnt = EXCLUDED.erc1155_transfer_in_cnt;
-
-WITH erc1155_out AS (SELECT from_address AS address,
- DATE(block_timestamp) AS block_date,
- COUNT(1) AS cnt
- FROM erc1155_token_transfers
- WHERE block_timestamp >= '{start_date}'
- and block_timestamp < '{end_date}'
- GROUP BY from_address, DATE(block_timestamp))
-
-INSERT
-INTO daily_wallet_addresses_aggregates
- (address, block_date, erc1155_transfer_out_cnt)
-SELECT address,
- block_date,
- cnt
-FROM erc1155_out
-
-ON CONFLICT (address, block_date)
- DO UPDATE SET erc1155_transfer_out_cnt = EXCLUDED.erc1155_transfer_out_cnt;
-
-with contract_deployed_table as (select transaction_from_address as address,
- date(block_timestamp) as block_date,
- count(1) as contract_deployed_cnt
- from contracts
- WHERE block_timestamp >= '{start_date}'
- and block_timestamp < '{end_date}'
- group by 1, 2)
-INSERT
-INTO daily_wallet_addresses_aggregates
- (address, block_date, contract_deployed_cnt)
-SELECT address,
- block_date,
- contract_deployed_cnt
-FROM contract_deployed_table
-
-ON CONFLICT (address, block_date)
- DO UPDATE SET contract_deployed_cnt = EXCLUDED.contract_deployed_cnt;
-
---
-with contract_interacted_detail_table as (
-select date(d2.block_timestamp) as block_date, from_address, to_address, count(1) as contract_interacted_cnt
-from contracts d1
- inner join transactions d2
- on d1.address = d2.to_address
-WHERE d2.block_timestamp >= '{start_date}' and d2.block_timestamp < '{end_date}'
-group by 1, 2, 3
-)
-
-insert into daily_contract_interacted_aggregates(block_date, from_address, to_address, contract_interacted_cnt)
-select
- block_date, from_address, to_address, contract_interacted_cnt
-from contract_interacted_detail_table
-ON CONFLICT (block_date, from_address, to_address)
- DO UPDATE SET contract_interacted_cnt = EXCLUDED.contract_interacted_cnt;
- ;
-
-
-INSERT
-INTO daily_wallet_addresses_aggregates
- (address, block_date, from_address_unique_interacted_cnt)
-SELECT from_address as address,
- block_date,
- count(1) as from_address_unique_interacted_cnt
-FROM daily_contract_interacted_aggregates
-WHERE block_date >= '{start_date}' and block_date < '{end_date}'
-group by 1,2
-ON CONFLICT (address, block_date)
- DO UPDATE SET from_address_unique_interacted_cnt = EXCLUDED.from_address_unique_interacted_cnt;
-
-
-
-INSERT
-INTO daily_wallet_addresses_aggregates
- (address, block_date, to_address_unique_interacted_cnt)
-SELECT to_address as address,
- block_date,
- count(1) as to_address_unique_interacted_cnt
-FROM daily_contract_interacted_aggregates
-WHERE block_date >= '{start_date}' and block_date < '{end_date}'
-group by 1,2
-ON CONFLICT (address, block_date)
- DO UPDATE SET to_address_unique_interacted_cnt = EXCLUDED.to_address_unique_interacted_cnt;
-commit
\ No newline at end of file
diff --git a/indexer/aggr_jobs/disorder_jobs/disorder_job.py b/indexer/aggr_jobs/disorder_jobs/disorder_job.py
deleted file mode 100644
index a168dc5fd..000000000
--- a/indexer/aggr_jobs/disorder_jobs/disorder_job.py
+++ /dev/null
@@ -1,33 +0,0 @@
-from sqlalchemy import text
-
-from indexer.aggr_jobs.aggr_base_job import AggrBaseJob
-from indexer.executors.batch_work_executor import BatchWorkExecutor
-
-
-class AggrDisorderJob(AggrBaseJob):
- sql_folder = "disorder_jobs"
-
- def __init__(self, **kwargs):
- config = kwargs["config"]
- self.db_service = config["db_service"]
- self._batch_work_executor = BatchWorkExecutor(5, 5)
-
- def run(self, **kwargs):
- start_date = kwargs["start_date"]
- end_date = kwargs["end_date"]
-
- execute_sql_list = []
- date_pairs = self.generate_date_pairs(start_date, end_date)
- for date_pair in date_pairs:
- start_date, end_date = date_pair
- sql_content = self.get_sql_content("daily_wallet_addresses_aggregates", start_date, end_date)
- execute_sql_list.append(sql_content)
-
- self._batch_work_executor.execute(execute_sql_list, self.execute_sql, total_items=len(execute_sql_list))
- self._batch_work_executor.wait()
-
- def execute_sql(self, sql_contents):
- session = self.db_service.Session()
- for sql_content in sql_contents:
- session.execute(text(sql_content))
- session.commit()
diff --git a/indexer/aggr_jobs/disorder_jobs/models/daily_feature_uniswap_v3_pool_prices.py b/indexer/aggr_jobs/disorder_jobs/models/daily_feature_uniswap_v3_pool_prices.py
deleted file mode 100644
index 609edc1bd..000000000
--- a/indexer/aggr_jobs/disorder_jobs/models/daily_feature_uniswap_v3_pool_prices.py
+++ /dev/null
@@ -1,21 +0,0 @@
-from sqlalchemy import Column, Index, PrimaryKeyConstraint, func
-from sqlalchemy.dialects.postgresql import BYTEA, DATE, NUMERIC, TIMESTAMP
-
-from common.models import HemeraModel
-
-
-class DailyFeatureUniswapV3PoolPrices(HemeraModel):
- __tablename__ = "af_uniswap_v3_pool_prices_daily"
-
- block_date = Column(DATE, primary_key=True, nullable=False)
- pool_address = Column(BYTEA, primary_key=True, nullable=False)
-
- sqrt_price_x96 = Column(NUMERIC(78))
-
- create_time = Column(TIMESTAMP, server_default=func.now())
-
- __table_args__ = (PrimaryKeyConstraint("block_date", "pool_address"),)
-
-
-# could be replaced by partition in case of huge amount data
-Index("af_uniswap_v3_pool_prices_daily_block_date_index", DailyFeatureUniswapV3PoolPrices.block_date)
diff --git a/indexer/aggr_jobs/disorder_jobs/models/daily_feature_uniswap_v3_token_details.py b/indexer/aggr_jobs/disorder_jobs/models/daily_feature_uniswap_v3_token_details.py
deleted file mode 100644
index 18e62fa64..000000000
--- a/indexer/aggr_jobs/disorder_jobs/models/daily_feature_uniswap_v3_token_details.py
+++ /dev/null
@@ -1,23 +0,0 @@
-from sqlalchemy import Column, Index, PrimaryKeyConstraint, func
-from sqlalchemy.dialects.postgresql import BYTEA, DATE, INTEGER, NUMERIC, TIMESTAMP
-
-from common.models import HemeraModel
-
-
-class DailyFeatureUniswapV3TokenDeatils(HemeraModel):
- __tablename__ = "af_uniswap_v3_token_data_daily"
-
- block_date = Column(DATE, primary_key=True, nullable=False)
- position_token_address = Column(BYTEA, primary_key=True, nullable=False)
- token_id = Column(INTEGER, primary_key=True, nullable=False)
- wallet_address = Column(BYTEA, nullable=False)
- pool_address = Column(BYTEA, nullable=False)
- liquidity = Column(NUMERIC(78))
-
- create_time = Column(TIMESTAMP, server_default=func.now())
-
- __table_args__ = (PrimaryKeyConstraint("block_date", "position_token_address", "token_id"),)
-
-
-# could be replaced by partition in case of huge amount data
-Index("af_uniswap_v3_token_data_daily_index", DailyFeatureUniswapV3TokenDeatils.block_date)
diff --git a/indexer/aggr_jobs/order_jobs/models/period_feature_holding_balance_merchantmoe.py b/indexer/aggr_jobs/order_jobs/models/period_feature_holding_balance_merchantmoe.py
deleted file mode 100644
index 97a6d791b..000000000
--- a/indexer/aggr_jobs/order_jobs/models/period_feature_holding_balance_merchantmoe.py
+++ /dev/null
@@ -1,31 +0,0 @@
-from sqlalchemy import Column, Index, PrimaryKeyConstraint, func
-from sqlalchemy.dialects.postgresql import BYTEA, DATE, NUMERIC, TIMESTAMP, VARCHAR
-
-from common.models import HemeraModel
-
-
-class PeriodFeatureHoldingBalanceMerchantmoe(HemeraModel):
- __tablename__ = "af_holding_balance_merchantmoe_period"
-
- period_date = Column(DATE, primary_key=True, nullable=False)
- protocol_id = Column(VARCHAR, primary_key=True, nullable=False)
- position_token_address = Column(BYTEA, primary_key=True, nullable=False)
- token_id = Column(NUMERIC, primary_key=True, nullable=False)
- wallet_address = Column(BYTEA, primary_key=True, nullable=False)
-
- token0_address = Column(BYTEA, nullable=False)
- token0_symbol = Column(VARCHAR, nullable=False)
- token0_balance = Column(NUMERIC(100, 18))
-
- token1_address = Column(BYTEA, nullable=False)
- token1_symbol = Column(VARCHAR, nullable=False)
- token1_balance = Column(NUMERIC(100, 18))
-
- create_time = Column(TIMESTAMP, server_default=func.now())
-
- __table_args__ = (
- PrimaryKeyConstraint("period_date", "protocol_id", "position_token_address", "token_id", "wallet_address"),
- )
-
-
-Index("af_holding_balance_merchantmoe_period_period_date", PeriodFeatureHoldingBalanceMerchantmoe.period_date)
diff --git a/indexer/aggr_jobs/order_jobs/models/period_feature_holding_balance_uniswap_v3.py b/indexer/aggr_jobs/order_jobs/models/period_feature_holding_balance_uniswap_v3.py
deleted file mode 100644
index 75a61cd23..000000000
--- a/indexer/aggr_jobs/order_jobs/models/period_feature_holding_balance_uniswap_v3.py
+++ /dev/null
@@ -1,29 +0,0 @@
-from sqlalchemy import Column, Index, PrimaryKeyConstraint, func
-from sqlalchemy.dialects.postgresql import BYTEA, DATE, INTEGER, NUMERIC, TIMESTAMP, VARCHAR
-
-from common.models import HemeraModel
-
-
-class PeriodFeatureHoldingBalanceUniswapV3(HemeraModel):
- __tablename__ = "af_holding_balance_uniswap_v3_period"
-
- period_date = Column(DATE, primary_key=True, nullable=False)
- protocol_id = Column(VARCHAR, primary_key=True, nullable=False)
- pool_address = Column(BYTEA, primary_key=True, nullable=False)
- token_id = Column(INTEGER, primary_key=True, nullable=False)
- wallet_address = Column(BYTEA, nullable=False)
- token0_address = Column(BYTEA, nullable=False)
- token0_symbol = Column(VARCHAR, nullable=False)
- token0_balance = Column(NUMERIC(100, 18))
-
- token1_address = Column(BYTEA, nullable=False)
- token1_symbol = Column(VARCHAR, nullable=False)
- token1_balance = Column(NUMERIC(100, 18))
-
- create_time = Column(TIMESTAMP, server_default=func.now())
-
- __table_args__ = (PrimaryKeyConstraint("period_date", "protocol_id", "pool_address", "token_id"),)
-
-
-# could be replaced by partition in case of huge amount data
-Index("af_holding_balance_uniswap_v3_period_period_date", PeriodFeatureHoldingBalanceUniswapV3.period_date)
diff --git a/indexer/aggr_jobs/order_jobs/models/period_feature_merchant_moe_token_bin.py b/indexer/aggr_jobs/order_jobs/models/period_feature_merchant_moe_token_bin.py
deleted file mode 100644
index 94b313ebf..000000000
--- a/indexer/aggr_jobs/order_jobs/models/period_feature_merchant_moe_token_bin.py
+++ /dev/null
@@ -1,19 +0,0 @@
-from sqlalchemy import Column, PrimaryKeyConstraint, func
-from sqlalchemy.dialects.postgresql import BYTEA, DATE, NUMERIC, TIMESTAMP
-
-from common.models import HemeraModel
-
-
-class PeriodFeatureMerchantMoeTokenBinRecords(HemeraModel):
- __tablename__ = "af_merchant_moe_token_bin_hist_period"
-
- period_date = Column(DATE, primary_key=True)
- position_token_address = Column(BYTEA, primary_key=True)
- token_id = Column(NUMERIC(100), primary_key=True)
-
- reserve0_bin = Column(NUMERIC(100))
- reserve1_bin = Column(NUMERIC(100))
-
- create_time = Column(TIMESTAMP, server_default=func.now())
-
- __table_args__ = (PrimaryKeyConstraint("period_date", "position_token_address", "token_id"),)
diff --git a/indexer/aggr_jobs/order_jobs/models/period_feature_uniswap_v3_pool_prices.py b/indexer/aggr_jobs/order_jobs/models/period_feature_uniswap_v3_pool_prices.py
deleted file mode 100644
index be5f423fd..000000000
--- a/indexer/aggr_jobs/order_jobs/models/period_feature_uniswap_v3_pool_prices.py
+++ /dev/null
@@ -1,20 +0,0 @@
-from sqlalchemy import Column, Index, PrimaryKeyConstraint, func
-from sqlalchemy.dialects.postgresql import BYTEA, DATE, NUMERIC, TIMESTAMP
-
-from common.models import HemeraModel
-
-
-class PeriodFeatureUniswapV3PoolPrices(HemeraModel):
- __tablename__ = "af_uniswap_v3_pool_prices_period"
-
- period_date = Column(DATE, primary_key=True, nullable=False)
- pool_address = Column(BYTEA, primary_key=True, nullable=False)
- sqrt_price_x96 = Column(NUMERIC(78))
-
- create_time = Column(TIMESTAMP, server_default=func.now())
-
- __table_args__ = (PrimaryKeyConstraint("period_date", "pool_address"),)
-
-
-# could be replaced by partition in case of huge amount data
-Index("af_uniswap_v3_pool_prices_period_period_date_index", PeriodFeatureUniswapV3PoolPrices.period_date)
diff --git a/indexer/aggr_jobs/order_jobs/models/period_feature_uniswap_v3_token_details.py b/indexer/aggr_jobs/order_jobs/models/period_feature_uniswap_v3_token_details.py
deleted file mode 100644
index 114408d1b..000000000
--- a/indexer/aggr_jobs/order_jobs/models/period_feature_uniswap_v3_token_details.py
+++ /dev/null
@@ -1,23 +0,0 @@
-from sqlalchemy import Column, Index, PrimaryKeyConstraint, func
-from sqlalchemy.dialects.postgresql import BYTEA, DATE, INTEGER, NUMERIC, TIMESTAMP
-
-from common.models import HemeraModel
-
-
-class PeriodFeatureUniswapV3TokenDeatils(HemeraModel):
- __tablename__ = "af_uniswap_v3_token_data_period"
-
- period_date = Column(DATE, primary_key=True, nullable=False)
- position_token_address = Column(BYTEA, primary_key=True, nullable=False)
- token_id = Column(INTEGER, primary_key=True, nullable=False)
- wallet_address = Column(BYTEA, nullable=False)
- pool_address = Column(BYTEA, nullable=False)
- liquidity = Column(NUMERIC(78))
-
- create_time = Column(TIMESTAMP, server_default=func.now())
-
- __table_args__ = (PrimaryKeyConstraint("period_date", "position_token_address", "token_id"),)
-
-
-# could be replaced by partition in case of huge amount data
-Index("af_uniswap_v3_token_data_period_date_index", PeriodFeatureUniswapV3TokenDeatils.period_date)
diff --git a/indexer/aggr_jobs/order_jobs/order_job.py b/indexer/aggr_jobs/order_jobs/order_job.py
deleted file mode 100644
index ea9449346..000000000
--- a/indexer/aggr_jobs/order_jobs/order_job.py
+++ /dev/null
@@ -1,24 +0,0 @@
-from sqlalchemy import text
-
-from indexer.aggr_jobs.aggr_base_job import AggrBaseJob
-
-
-class AggrOrderJob(AggrBaseJob):
- sql_folder = "order_jobs"
-
- def __init__(self, **kwargs):
- config = kwargs["config"]
- self.db_service = config["db_service"]
-
- def run(self, **kwargs):
- start_date = kwargs["start_date"]
- end_date = kwargs["end_date"]
-
- session = self.db_service.Session()
-
- date_pairs = self.generate_date_pairs(start_date, end_date)
- for date_pair in date_pairs:
- start_date, end_date = date_pair
- sql_content = self.get_sql_content("period_wallet_addresses_aggregates", start_date, end_date)
- session.execute(text(sql_content))
- session.commit()
diff --git a/indexer/aggr_jobs/order_jobs/period_feature_holding_balance_merchantmoe.sql b/indexer/aggr_jobs/order_jobs/period_feature_holding_balance_merchantmoe.sql
deleted file mode 100644
index 5f960bd29..000000000
--- a/indexer/aggr_jobs/order_jobs/period_feature_holding_balance_merchantmoe.sql
+++ /dev/null
@@ -1,84 +0,0 @@
-delete
-from af_merchant_moe_token_bin_hist_period
-where period_date >= '{start_date}'
- and period_date < '{end_date}';
-
-insert into af_merchant_moe_token_bin_hist_period(period_date, position_token_address, token_id, reserve0_bin, reserve1_bin)
-select date('{start_date}'), position_token_address, token_id, reserve0_bin, reserve1_bin
-from (select *, row_number() over (partition by position_token_address, token_id order by block_timestamp desc) as rn
- from feature_merchant_moe_token_bin_records
- where to_timestamp(block_timestamp) <= '{start_date}') t
-where rn = 1
-;
-
-
-delete
-from af_holding_balance_merchantmoe_period
-where period_date >= '{start_date}'
- and period_date < '{end_date}';
-insert
-into af_holding_balance_merchantmoe_period(period_date, protocol_id, position_token_address, token_id,
- wallet_address, token0_address, token0_symbol, token0_balance,
- token1_address, token1_symbol, token1_balance)
-with moe_pools_table as (select d0.*,
-
- d4.symbol as token0_symbol,
- d4.decimals as token0_decimals,
- d5.symbol as token1_symbol,
- d5.decimals as token1_decimals
- from af_merchant_moe_pools d0
- inner join tokens d4 on d0.token0_address = d4.address
- inner join tokens d5 on d0.token1_address = d5.address
- where d4.symbol = 'FBTC'
- or d5.symbol = 'FBTC'),
-
- moe_pool_with_records_table as (select d0.*, d1.address, d1.token_id, d1.balance
- from moe_pools_table d0
- inner join
- (select *
- from period_address_token_balances
- where token_type = 'ERC1155') d1
- on d0.token_address = d1.token_address),
-
- detail_table as (select d1.address
- , d1.token_address
- , d1.token_id
- , d1.balance
- , d2.total_supply
- , d3.reserve0_bin
- , d3.reserve1_bin
- , token0_address
- , token0_symbol
- , token0_decimals
- , token1_address
- , token1_symbol
- , token1_decimals
- from moe_pool_with_records_table d1
- inner join
- (select *
- from period_feature_erc1155_token_supply_records -- todo: replace with the new name table
- where period_date = '{start_date}') d2
- on d1.token_address = d2.token_address and d1.token_id = d2.token_id
- inner join (select *
- from af_merchant_moe_token_bin_hist_period
- where period_date = '{start_date}') d3
- on d1.token_address = d3.position_token_address and d1.token_id = d3.token_id)
-
-select date('{start_date}'),
- 'merchantmoe' as protocol_id,
- token_address as position_token_address,
- token_id,
- address,
- token0_address,
- token0_symbol,
- case
- when total_supply > 0 then (balance / total_supply) * reserve0_bin / pow(10, token0_decimals)
- else 0 end as token0_balance,
- token1_address,
- token1_symbol,
- case
- when total_supply > 0 then (balance / total_supply) * reserve1_bin / pow(10, token1_decimals)
- else 0 end as token1_balance
-from detail_table
-;
-
diff --git a/indexer/aggr_jobs/order_jobs/period_feature_holding_balance_uniswap_v3.sql b/indexer/aggr_jobs/order_jobs/period_feature_holding_balance_uniswap_v3.sql
deleted file mode 100644
index 6a353f303..000000000
--- a/indexer/aggr_jobs/order_jobs/period_feature_holding_balance_uniswap_v3.sql
+++ /dev/null
@@ -1,157 +0,0 @@
-delete
-from af_uniswap_v3_token_data_period
-where period_date >= '{start_date}'
- and period_date < '{end_date}';
-
-with today_table as (select *
- from af_uniswap_v3_token_data_daily
- where block_date = '{start_date}'),
- yesterday_table as (select *
- from af_uniswap_v3_token_data_period
- where period_date = '{start_date_previous}')
-
-insert
-into af_uniswap_v3_token_data_period
-select COALESCE(s1.position_token_address, s2.position_token_address) AS position_token_address,
- date('{start_date}') AS period_date,
- COALESCE(s1.token_id, s2.token_id) AS token_id,
- COALESCE(s1.wallet_address, s2.wallet_address) AS wallet_address,
- COALESCE(s1.pool_address, s2.pool_address) AS pool_address,
- COALESCE(s1.liquidity, s2.liquidity, 0) AS liquidity
-from today_table s1
- full join
- yesterday_table s2
- on s1.position_token_address = s2.position_token_address and s1.token_id = s2.token_id;
-
-delete
-from af_uniswap_v3_pool_prices_period
-where period_date >= '{start_date}'
- and period_date < '{end_date}';
-
-with today_table as (select *
- from af_uniswap_v3_pool_prices_daily
- where block_date = '{start_date}'),
- yesterday_table as (select *
- from af_uniswap_v3_pool_prices_period
- where period_date = '{start_date_previous}')
-
-insert
-into af_uniswap_v3_pool_prices_period
-select COALESCE(s1.pool_address, s2.pool_address) AS pool_address,
- date('{start_date}') AS period_date,
- COALESCE(s1.sqrt_price_x96, s2.sqrt_price_x96, 0) AS liquidity
-from today_table s1
- full join
- yesterday_table s2
- on s1.pool_address = s2.pool_address;
-
-delete
-from af_holding_balance_uniswap_v3_period
-where period_date >= '{start_date}'
- and period_date < '{end_date}';
-with period_token_price as (select symbol, price
- from (select symbol,
- price,
- row_number() over (partition by symbol order by timestamp desc) rn
- from token_price
- where timestamp < '{end_date}') t
- where rn = 1),
- tokens_table as (select d1.address, d1.decimals, d1.symbol, d2.price
- from tokens d1
- left join
- period_token_price d2 on d1.symbol = d2.symbol
- where d1.symbol is not null),
- detail_table as (SELECT d1.period_date,
- d1.wallet_address,
- d1.position_token_address,
- d1.liquidity,
- d1.pool_address,
- d1.token_id,
- d2.sqrt_price_x96,
- d3.tick_lower,
- d3.tick_upper,
- d4.token0_address,
- d4.token1_address,
- d5.decimals as toekn0_decimals,
- d5.symbol as token0_symbol,
- d5.price as token0_price,
- d6.decimals as toekn1_decimals,
- d6.symbol as token1_symbol,
- d6.price as token1_price,
- sqrt(EXP(tick_lower * LN(1.0001))) as sqrt_ratio_a,
- sqrt(EXP(tick_upper * LN(1.0001))) as sqrt_ratio_b,
- FLOOR(LOG((sqrt_price_x96 / pow(2, 96)) ^ 2) / LOG(1.0001)) AS current_tick,
- sqrt_price_x96 / pow(2, 96) as sqrt_price
-
- FROM af_uniswap_v3_token_data_period d1
- inner join af_uniswap_v3_pool_prices_period d2 on
- d1.pool_address = d2.pool_address
- inner join af_uniswap_v3_tokens d3
- on d1.position_token_address = d3.position_token_address
- and d1.token_id = d3.token_id
- inner join af_uniswap_v3_pools d4
- on d1.pool_address = d4.pool_address
- inner join tokens_table d5
- on d4.token0_address = d5.address
- inner join tokens_table d6
- on d4.token1_address = d6.address
- where d1.period_date = '{start_date}'
- and d2.period_date = '{start_date}'),
- tick_table as (select period_date,
- wallet_address,
- position_token_address,
- token_id,
- token0_address,
- token0_symbol,
- token1_symbol,
- token1_address,
- toekn0_decimals,
- toekn1_decimals,
- token0_price,
- token1_price,
- liquidity,
- tick_lower,
- tick_upper,
- case
- when current_tick <= tick_lower then
- FLOOR(liquidity * ((sqrt_ratio_b - sqrt_ratio_a) / (sqrt_ratio_a * sqrt_ratio_b)))
- when current_tick > tick_lower and current_tick < tick_upper then
- FLOOR(liquidity * ((sqrt_ratio_b - sqrt_price) / (sqrt_price * sqrt_ratio_b)))
- else 0
- end / pow(10, toekn0_decimals) AS token0_balance,
- case
- when current_tick >= tick_upper then floor(liquidity * (sqrt_ratio_b - sqrt_ratio_a))
- when current_tick > tick_lower and current_tick < tick_upper then
- floor(liquidity * (sqrt_price - sqrt_ratio_a))
- else 0 end / pow(10, toekn1_decimals) AS token1_balance
- from detail_table)
-insert
-into af_holding_balance_uniswap_v3_period(protocol_id,
- position_token_address,
- period_date,
- token_id,
- wallet_address,
- token0_address,
- token0_symbol,
- token0_balance,
- token1_address,
- token1_symbol,
- token1_balance)
-select case
- when position_token_address = '\x218bf598d1453383e2f4aa7b14ffb9bfb102d637'
- then 'agni'
- when position_token_address = '\xaaa78e8c4241990b4ce159e105da08129345946a' then 'cleoexchange'
- when position_token_address = '\xc36442b4a4522e871399cd717abdd847ab11fe88' then 'uniswap_v3'
- else 'uniswap_v3' end as protoco_id,
-
- position_token_address,
- period_date,
- token_id,
- wallet_address,
- token0_address,
- token0_symbol,
- token0_balance,
- token1_address,
- token1_symbol,
- token1_balance
-from tick_table;
\ No newline at end of file
diff --git a/indexer/aggr_jobs/order_jobs/period_wallet_addresses_aggregates.sql b/indexer/aggr_jobs/order_jobs/period_wallet_addresses_aggregates.sql
deleted file mode 100644
index 6d2a1f45f..000000000
--- a/indexer/aggr_jobs/order_jobs/period_wallet_addresses_aggregates.sql
+++ /dev/null
@@ -1,82 +0,0 @@
-begin;
-delete
-from period_wallet_addresses_aggregates
-where period_date = '{end_date}';
-insert into period_wallet_addresses_aggregates(address, period_date, txn_in_cnt, txn_out_cnt, txn_in_value,
- txn_out_value, internal_txn_in_cnt, internal_txn_out_cnt,
- internal_txn_in_value, internal_txn_out_value, erc20_transfer_in_cnt,
- erc721_transfer_in_cnt, erc1155_transfer_in_cnt, erc20_transfer_out_cnt,
- erc721_transfer_out_cnt, erc1155_transfer_out_cnt, txn_self_cnt,
- txn_in_error_cnt, txn_out_error_cnt, txn_self_error_cnt, deposit_cnt,
- withdraw_cnt, gas_in_used, l2_txn_in_fee, l1_txn_in_fee, txn_in_fee,
- gas_out_used, l2_txn_out_fee, l1_txn_out_fee, txn_out_fee,
- contract_deployed_cnt, from_address_unique_interacted_cnt,
- to_address_unique_interacted_cnt)
-WITH today_table AS (SELECT *
- FROM daily_wallet_addresses_aggregates
- WHERE block_date = '{start_date}'),
- yesterday_table AS (SELECT *
- FROM period_wallet_addresses_aggregates
- WHERE period_date = '{end_date}'),
- from_address_unique_interacted_cnt_table as (SELECT from_address as address,
- count(distinct to_address) as from_address_unique_interacted_cnt
- FROM daily_contract_interacted_aggregates
- group by 1),
- to_address_unique_interacted_cnt_table as (SELECT to_address as address,
- count(distinct from_address) as to_address_unique_interacted_cnt
- FROM daily_contract_interacted_aggregates
- group by 1)
-
-SELECT COALESCE(s1.address, s2.address) AS address,
- date('{end_date}') AS block_date,
- COALESCE(s1.txn_in_cnt, 0) + COALESCE(s2.txn_in_cnt, 0) AS txn_in_cnt,
- COALESCE(s1.txn_out_cnt, 0) + COALESCE(s2.txn_out_cnt, 0) AS txn_out_cnt,
- COALESCE(s1.txn_in_value, 0) + COALESCE(s2.txn_in_value, 0) AS txn_in_value,
- COALESCE(s1.txn_out_value, 0) + COALESCE(s2.txn_out_value, 0) AS txn_out_value,
- COALESCE(s1.internal_txn_in_cnt, 0) +
- COALESCE(s2.internal_txn_in_cnt, 0) AS internal_txn_in_cnt,
- COALESCE(s1.internal_txn_out_cnt, 0) +
- COALESCE(s2.internal_txn_out_cnt, 0) AS internal_txn_out_cnt,
- COALESCE(s1.internal_txn_in_value, 0) +
- COALESCE(s2.internal_txn_in_value, 0) AS internal_txn_in_value,
- COALESCE(s1.internal_txn_out_value, 0) +
- COALESCE(s2.internal_txn_out_value, 0) AS internal_txn_out_value,
- COALESCE(s1.erc20_transfer_in_cnt, 0) +
- COALESCE(s2.erc20_transfer_in_cnt, 0) AS erc20_transfer_in_cnt,
- COALESCE(s1.erc721_transfer_in_cnt, 0) +
- COALESCE(s2.erc721_transfer_in_cnt, 0) AS erc721_transfer_in_cnt,
- COALESCE(s1.erc1155_transfer_in_cnt, 0) +
- COALESCE(s2.erc1155_transfer_in_cnt, 0) AS erc1155_transfer_in_cnt,
- COALESCE(s1.erc20_transfer_out_cnt, 0) +
- COALESCE(s2.erc20_transfer_out_cnt, 0) AS erc20_transfer_out_cnt,
- COALESCE(s1.erc721_transfer_out_cnt, 0) +
- COALESCE(s2.erc721_transfer_out_cnt, 0) AS erc721_transfer_out_cnt,
- COALESCE(s1.erc1155_transfer_out_cnt, 0) +
- COALESCE(s2.erc1155_transfer_out_cnt, 0) AS erc1155_transfer_out_cnt,
- COALESCE(s1.txn_self_cnt, 0) + COALESCE(s2.txn_self_cnt, 0) AS txn_self_cnt,
- COALESCE(s1.txn_in_error_cnt, 0) + COALESCE(s2.txn_in_error_cnt, 0) AS txn_in_error_cnt,
- COALESCE(s1.txn_out_error_cnt, 0) +
- COALESCE(s2.txn_out_error_cnt, 0) AS txn_out_error_cnt,
- COALESCE(s1.txn_self_error_cnt, 0) +
- COALESCE(s2.txn_self_error_cnt, 0) AS txn_self_error_cnt,
- COALESCE(s1.deposit_cnt, 0) + COALESCE(s2.deposit_cnt, 0) AS deposit_cnt,
- COALESCE(s1.withdraw_cnt, 0) + COALESCE(s2.withdraw_cnt, 0) AS withdraw_cnt,
- COALESCE(s1.gas_in_used, 0) + COALESCE(s2.gas_in_used, 0) AS gas_in_used,
- COALESCE(s1.l2_txn_in_fee, 0) + COALESCE(s2.l2_txn_in_fee, 0) AS l2_txn_in_fee,
- COALESCE(s1.l1_txn_in_fee, 0) + COALESCE(s2.l1_txn_in_fee, 0) AS l1_txn_in_fee,
- COALESCE(s1.txn_in_fee, 0) + COALESCE(s2.txn_in_fee, 0) AS txn_in_fee,
- COALESCE(s1.gas_out_used, 0) + COALESCE(s2.gas_out_used, 0) AS gas_out_used,
- COALESCE(s1.l2_txn_out_fee, 0) + COALESCE(s2.l2_txn_out_fee, 0) AS l2_txn_out_fee,
- COALESCE(s1.l1_txn_out_fee, 0) + COALESCE(s2.l1_txn_out_fee, 0) AS l1_txn_out_fee,
- COALESCE(s1.txn_out_fee, 0) + COALESCE(s2.txn_out_fee, 0) AS txn_out_fee,
- COALESCE(s1.contract_deployed_cnt, 0) +
- COALESCE(s2.contract_deployed_cnt, 0) AS contract_deployed_cnt,
- COALESCE(s1.from_address_unique_interacted_cnt, 0) +
- COALESCE(s2.from_address_unique_interacted_cnt, 0) AS from_address_unique_interacted_cnt,
- COALESCE(s1.to_address_unique_interacted_cnt, 0) +
- COALESCE(s2.to_address_unique_interacted_cnt, 0) AS to_address_unique_interacted_cnt
-FROM today_table s1
- FULL JOIN yesterday_table s2 ON s1.address = s2.address
- left join from_address_unique_interacted_cnt_table s3 on coalesce(s1.address, s2.address) = s3.address
- left join to_address_unique_interacted_cnt_table s4 on coalesce(s1.address, s2.address) = s4.address;
-commit
\ No newline at end of file
diff --git a/indexer/aggr_jobs/utils.py b/indexer/aggr_jobs/utils.py
deleted file mode 100644
index 8a6302909..000000000
--- a/indexer/aggr_jobs/utils.py
+++ /dev/null
@@ -1,79 +0,0 @@
-from datetime import datetime, timedelta
-
-import click
-from web3 import Web3
-
-from common.models.sync_record import SyncRecord
-
-
-def get_yesterday_date():
- now = datetime.now()
-
- yesterday_datetime = now - timedelta(days=1)
-
- today_str = now.strftime("%Y-%m-%d")
- yesterday_str = yesterday_datetime.strftime("%Y-%m-%d")
-
- return today_str, yesterday_str
-
-
-class DateType(click.ParamType):
- name = "date"
-
- def convert(self, value, param, ctx):
- try:
- if value is not None:
- datetime.strptime(value, "%Y-%m-%d")
- return value
- except ValueError:
- self.fail(f"{value} is not a valid date in YYYY-MM-DD format", param, ctx)
-
-
-def check_data_completeness(db_service, provider_uri, end_date):
- record = read_sync_record(db_service)
- if not record:
- raise click.ClickException("There is something wrong with the sync record")
- web_ = Web3(Web3.HTTPProvider(provider_uri))
- task_end_ts = convert_date_to_timestramp(end_date)
- block = web_.eth.get_block(record)
- block_timestamp = block.timestamp
- if block_timestamp < task_end_ts:
- dt_object = datetime.fromtimestamp(block_timestamp)
- raise click.ClickException(
- f"Incomplete data detected. The latest available data is from {dt_object}, but the provided end_date is {end_date}."
- )
-
-
-def convert_date_to_timestramp(date_string):
- dt_object = datetime.strptime(date_string, "%Y-%m-%d")
- return int(dt_object.timestamp())
-
-
-def read_sync_record_from_file():
- try:
- with open("sync_record", "r") as file:
- sync_record = file.read().strip()
- return int(sync_record)
- except FileNotFoundError:
- print("sync_record file not found.")
- return None
- except ValueError:
- print("sync_record does not contain a valid number.")
- return None
-
-
-def read_sync_record_from_pg(db_service):
- try:
- session = db_service.Session()
- latest_record = session.query(SyncRecord).first()
- record = latest_record.last_block_number
- return record
- except Exception:
- return None
-
-
-def read_sync_record(db_service) -> int:
- record = read_sync_record_from_file()
- if not record:
- record = read_sync_record_from_pg(db_service)
- return record
diff --git a/indexer/controller/aggregates_controller.py b/indexer/controller/aggregates_controller.py
deleted file mode 100644
index 714c0b176..000000000
--- a/indexer/controller/aggregates_controller.py
+++ /dev/null
@@ -1,27 +0,0 @@
-from datetime import datetime, timedelta
-
-from indexer.controller.base_controller import BaseController
-
-
-class AggregatesController(BaseController):
- def __init__(self, job_dispatcher):
- self.job_dispatcher = job_dispatcher
-
- def action(self, start_date, end_date, date_batch_size):
- date_batches = self.split_date_range(start_date, end_date, date_batch_size)
- for date_batch in date_batches:
- start_date, end_date = date_batch
- self.job_dispatcher.run(start_date, end_date)
-
- @staticmethod
- def split_date_range(start_date, end_date, batch_size):
- start_date_obj = datetime.strptime(start_date, "%Y-%m-%d")
- end_date_obj = datetime.strptime(end_date, "%Y-%m-%d")
-
- date_ranges = []
- while start_date_obj < end_date_obj:
- batch_end_date = min(start_date_obj + timedelta(days=batch_size - 1), end_date_obj)
- date_ranges.append((start_date_obj.strftime("%Y-%m-%d"), batch_end_date.strftime("%Y-%m-%d")))
- start_date_obj = batch_end_date + timedelta(days=1)
-
- return date_ranges
diff --git a/indexer/controller/dispatcher/aggregates_dispatcher.py b/indexer/controller/dispatcher/aggregates_dispatcher.py
deleted file mode 100644
index 975605814..000000000
--- a/indexer/controller/dispatcher/aggregates_dispatcher.py
+++ /dev/null
@@ -1,11 +0,0 @@
-from indexer.aggr_jobs.aggr_job_scheduler import AggrJobScheduler
-from indexer.controller.dispatcher.base_dispatcher import BaseDispatcher
-
-
-class AggregatesDispatcher(BaseDispatcher):
- def __init__(self, config):
- super().__init__()
- self._job_scheduler = AggrJobScheduler(config=config)
-
- def run(self, start_date, end_date):
- self._job_scheduler.run_jobs(start_date=start_date, end_date=end_date)
diff --git a/indexer/domain/contract.py b/indexer/domain/contract.py
deleted file mode 100644
index 76c3b2f35..000000000
--- a/indexer/domain/contract.py
+++ /dev/null
@@ -1,40 +0,0 @@
-from dataclasses import dataclass
-
-from indexer.domain import Domain
-
-
-@dataclass
-class Contract(Domain):
- address: str
- name: str
- contract_creator: str
- creation_code: str
- deployed_code: str
- block_number: int
- block_hash: str
- block_timestamp: int
- transaction_index: int
- transaction_hash: str
- transaction_from_address: str
-
- def __init__(self, contract: dict):
- self.dict_to_entity(contract)
-
- def fill_transaction_from_address(self, address: str):
- self.transaction_from_address = address
-
-
-def extract_contract_from_trace(trace):
- contract = {
- "address": trace.to_address,
- "contract_creator": trace.from_address,
- "creation_code": trace.input,
- "deployed_code": trace.output,
- "block_number": trace.block_number,
- "block_hash": trace.block_hash,
- "block_timestamp": trace.block_timestamp,
- "transaction_index": trace.transaction_index,
- "transaction_hash": trace.transaction_hash,
- }
-
- return contract
diff --git a/indexer/jobs/__init__.py b/indexer/jobs/__init__.py
deleted file mode 100644
index e47ba9893..000000000
--- a/indexer/jobs/__init__.py
+++ /dev/null
@@ -1,25 +0,0 @@
-__all__ = [
- "CSVSourceJob",
- "PGSourceJob",
- "ExportBlocksJob",
- "ExportTransactionsAndLogsJob",
- "ExportTokensAndTransfersJob",
- "ExportTokenIdInfosJob",
- "ExportTokenBalancesJob",
- "ExportTracesJob",
- "ExportContractsJob",
- "ExportCoinBalancesJob",
- "FilterTransactionDataJob",
-]
-
-from indexer.jobs.base_job import FilterTransactionDataJob
-from indexer.jobs.export_blocks_job import ExportBlocksJob
-from indexer.jobs.export_coin_balances_job import ExportCoinBalancesJob
-from indexer.jobs.export_contracts_job import ExportContractsJob
-from indexer.jobs.export_token_balances_job import ExportTokenBalancesJob
-from indexer.jobs.export_token_id_infos_job import ExportTokenIdInfosJob
-from indexer.jobs.export_tokens_and_transfers_job import ExportTokensAndTransfersJob
-from indexer.jobs.export_traces_job import ExportTracesJob
-from indexer.jobs.export_transactions_and_logs_job import ExportTransactionsAndLogsJob
-from indexer.jobs.source_job.csv_source_job import CSVSourceJob
-from indexer.jobs.source_job.pg_source_job import PGSourceJob
diff --git a/indexer/jobs/export_transactions_and_logs_job.py b/indexer/jobs/export_transactions_and_logs_job.py
deleted file mode 100644
index 4ded1b6a4..000000000
--- a/indexer/jobs/export_transactions_and_logs_job.py
+++ /dev/null
@@ -1,72 +0,0 @@
-import logging
-from typing import List, Union
-
-import orjson
-
-from indexer.domain.block import Block
-from indexer.domain.log import Log
-from indexer.domain.receipt import Receipt
-from indexer.domain.transaction import Transaction
-from indexer.executors.batch_work_executor import BatchWorkExecutor
-from indexer.jobs.base_job import BaseExportJob, Collector
-from indexer.utils.json_rpc_requests import generate_get_receipt_json_rpc
-from indexer.utils.rpc_utils import rpc_response_batch_to_results
-
-logger = logging.getLogger(__name__)
-
-
-# Exports transactions and logs
-class ExportTransactionsAndLogsJob(BaseExportJob):
- able_to_reorg = True
-
- def __init__(self, **kwargs):
- super().__init__(**kwargs)
-
- self._batch_work_executor = BatchWorkExecutor(
- kwargs["batch_size"],
- kwargs["max_workers"],
- job_name=self.__class__.__name__,
- )
- self._is_batch = kwargs["batch_size"] > 1
-
- def request_for_receipt(self, transactions: List[Transaction], output: Collector):
- transaction_hash_mapper = {transaction.hash: transaction for transaction in transactions}
- results = receipt_rpc_requests(
- self._batch_web3_provider.make_request,
- transaction_hash_mapper.keys(),
- self._is_batch,
- )
-
- for receipt in results:
- transaction = transaction_hash_mapper[receipt["transactionHash"]]
- receipt_entity = Receipt.from_rpc(
- receipt,
- transaction.block_timestamp,
- transaction.block_hash,
- transaction.block_number,
- )
- transaction.fill_with_receipt(receipt_entity)
-
- for log in transaction.receipt.logs:
- output.collect(log)
-
- def _udf(self, blocks: List[Block], output: Collector[Union[Transaction, Log]]):
- transactions: List[Transaction] = [transaction for block in blocks for transaction in block.transactions]
- self._batch_work_executor.execute(
- transactions, self.request_for_receipt, collector=output, total_items=len(transactions)
- )
- self._batch_work_executor.wait()
-
- self._data_buff[Log.type()].sort(key=lambda x: (x.block_number, x.log_index))
-
-
-def receipt_rpc_requests(make_request, transaction_hashes, is_batch):
- receipts_rpc = list(generate_get_receipt_json_rpc(transaction_hashes))
-
- if is_batch:
- response = make_request(params=orjson.dumps(receipts_rpc))
- else:
- response = [make_request(params=orjson.dumps(receipts_rpc[0]))]
-
- results = rpc_response_batch_to_results(response)
- return results
diff --git a/indexer/modules/custom/address_index/domain/__init__.py b/indexer/modules/custom/address_index/domain/__init__.py
deleted file mode 100644
index 634cec918..000000000
--- a/indexer/modules/custom/address_index/domain/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-__all__ = [
- "AddressNftTransfer",
- "AddressTokenHolder",
- "AddressTokenTransfer",
- "TokenAddressNftInventory",
- "AddressTransaction",
-]
-
-from indexer.modules.custom.address_index.domain.address_nft_transfer import AddressNftTransfer
-from indexer.modules.custom.address_index.domain.address_token_holder import AddressTokenHolder
-from indexer.modules.custom.address_index.domain.address_token_transfer import AddressTokenTransfer
-from indexer.modules.custom.address_index.domain.address_transaction import AddressTransaction
-from indexer.modules.custom.address_index.domain.token_address_nft_inventory import TokenAddressNftInventory
diff --git a/indexer/modules/custom/address_index/domain/address_contract_operation.py b/indexer/modules/custom/address_index/domain/address_contract_operation.py
deleted file mode 100644
index 020823352..000000000
--- a/indexer/modules/custom/address_index/domain/address_contract_operation.py
+++ /dev/null
@@ -1,32 +0,0 @@
-from dataclasses import dataclass
-
-from indexer.domain import Domain
-
-
-@dataclass
-class AddressContractOperation(Domain):
- address: str
-
- trace_from_address: str
- contract_address: str
-
- trace_id: str
- block_number: int
- transaction_index: int
- transaction_hash: str
- block_timestamp: int
- block_hash: str
-
- error: str
- status: int
-
- creation_code: str
- deployed_code: str
-
- gas: int
- gas_used: int
-
- trace_type: str
- call_type: str
-
- transaction_receipt_status: int
diff --git a/indexer/modules/custom/address_index/domain/address_internal_transaction.py b/indexer/modules/custom/address_index/domain/address_internal_transaction.py
deleted file mode 100644
index 713d8ef7d..000000000
--- a/indexer/modules/custom/address_index/domain/address_internal_transaction.py
+++ /dev/null
@@ -1,32 +0,0 @@
-from dataclasses import dataclass
-
-from indexer.domain import Domain
-
-
-@dataclass
-class AddressInternalTransaction(Domain):
- address: str
-
- trace_id: str
- block_number: int
- transaction_index: int
- transaction_hash: str
- block_timestamp: int
- block_hash: str
-
- error: str
- status: int
-
- input_method: str
-
- value: int
- gas: int
- gas_used: int
-
- trace_type: str
- call_type: str
-
- txn_type: int
- related_address: str
-
- transaction_receipt_status: int
diff --git a/indexer/modules/custom/address_index/domain/address_nft_1155_holders.py b/indexer/modules/custom/address_index/domain/address_nft_1155_holders.py
deleted file mode 100644
index 30eb73fc2..000000000
--- a/indexer/modules/custom/address_index/domain/address_nft_1155_holders.py
+++ /dev/null
@@ -1,11 +0,0 @@
-from dataclasses import dataclass
-
-from indexer.domain import Domain
-
-
-@dataclass
-class AddressNft1155Holder(Domain):
- address: str
- token_address: str
- token_id: int
- balance_of: str
diff --git a/indexer/modules/custom/address_index/domain/address_nft_transfer.py b/indexer/modules/custom/address_index/domain/address_nft_transfer.py
deleted file mode 100644
index 500441712..000000000
--- a/indexer/modules/custom/address_index/domain/address_nft_transfer.py
+++ /dev/null
@@ -1,18 +0,0 @@
-from dataclasses import dataclass
-
-from indexer.domain import Domain
-
-
-@dataclass
-class AddressNftTransfer(Domain):
- address: str
- block_number: int
- log_index: int
- transaction_hash: str
- block_timestamp: int
- block_hash: str
- token_address: str
- related_address: str
- transfer_type: int
- token_id: int
- value: int
diff --git a/indexer/modules/custom/address_index/domain/address_token_holder.py b/indexer/modules/custom/address_index/domain/address_token_holder.py
deleted file mode 100644
index 11f4b7077..000000000
--- a/indexer/modules/custom/address_index/domain/address_token_holder.py
+++ /dev/null
@@ -1,10 +0,0 @@
-from dataclasses import dataclass
-
-from indexer.domain import Domain
-
-
-@dataclass
-class AddressTokenHolder(Domain):
- address: str
- token_address: str
- balance_of: str
diff --git a/indexer/modules/custom/address_index/domain/address_token_transfer.py b/indexer/modules/custom/address_index/domain/address_token_transfer.py
deleted file mode 100644
index edf186a35..000000000
--- a/indexer/modules/custom/address_index/domain/address_token_transfer.py
+++ /dev/null
@@ -1,17 +0,0 @@
-from dataclasses import dataclass
-
-from indexer.domain import Domain
-
-
-@dataclass
-class AddressTokenTransfer(Domain):
- address: str
- block_number: int
- log_index: int
- transaction_hash: str
- block_timestamp: int
- block_hash: str
- token_address: str
- related_address: str
- transfer_type: int
- value: int
diff --git a/indexer/modules/custom/address_index/domain/address_transaction.py b/indexer/modules/custom/address_index/domain/address_transaction.py
deleted file mode 100644
index c7bbc2837..000000000
--- a/indexer/modules/custom/address_index/domain/address_transaction.py
+++ /dev/null
@@ -1,19 +0,0 @@
-from dataclasses import dataclass
-
-from indexer.domain import Domain
-
-
-@dataclass
-class AddressTransaction(Domain):
- address: str
- block_number: int
- transaction_index: int
- transaction_hash: str
- block_timestamp: int
- block_hash: str
- txn_type: int
- related_address: str
- value: int
- transaction_fee: int
- receipt_status: int
- method: str
diff --git a/indexer/modules/custom/address_index/domain/token_address_nft_inventory.py b/indexer/modules/custom/address_index/domain/token_address_nft_inventory.py
deleted file mode 100644
index d7d3c8e69..000000000
--- a/indexer/modules/custom/address_index/domain/token_address_nft_inventory.py
+++ /dev/null
@@ -1,10 +0,0 @@
-from dataclasses import dataclass
-
-from indexer.domain import Domain
-
-
-@dataclass
-class TokenAddressNftInventory(Domain):
- token_address: str
- token_id: int
- wallet_address: str
diff --git a/indexer/modules/custom/deposit_to_l2/domain/address_token_deposit.py b/indexer/modules/custom/deposit_to_l2/domain/address_token_deposit.py
deleted file mode 100644
index ad7574a47..000000000
--- a/indexer/modules/custom/deposit_to_l2/domain/address_token_deposit.py
+++ /dev/null
@@ -1,14 +0,0 @@
-from dataclasses import dataclass
-
-from indexer.domain import FilterData
-
-
-@dataclass
-class AddressTokenDeposit(FilterData):
- wallet_address: str
- chain_id: int
- contract_address: str
- token_address: str
- value: int
- block_number: int
- block_timestamp: int
diff --git a/indexer/modules/custom/deposit_to_l2/domain/token_deposit_transaction.py b/indexer/modules/custom/deposit_to_l2/domain/token_deposit_transaction.py
deleted file mode 100644
index fc61bd612..000000000
--- a/indexer/modules/custom/deposit_to_l2/domain/token_deposit_transaction.py
+++ /dev/null
@@ -1,15 +0,0 @@
-from dataclasses import dataclass
-
-from indexer.domain import FilterData
-
-
-@dataclass
-class TokenDepositTransaction(FilterData):
- transaction_hash: str
- wallet_address: str
- chain_id: int
- contract_address: str
- token_address: str
- value: int
- block_number: int
- block_timestamp: int
diff --git a/indexer/modules/custom/etherfi/domains/lrts.py b/indexer/modules/custom/etherfi/domains/lrts.py
deleted file mode 100644
index e24701327..000000000
--- a/indexer/modules/custom/etherfi/domains/lrts.py
+++ /dev/null
@@ -1,10 +0,0 @@
-from dataclasses import dataclass
-
-from indexer.domain import Domain
-
-
-@dataclass
-class EtherFiLrtExchangeRateD(Domain):
- exchange_rate: int
- token_address: str
- block_number: int
diff --git a/indexer/modules/custom/hemera_ens/ens_abi.py b/indexer/modules/custom/hemera_ens/ens_abi.py
deleted file mode 100644
index fb514e61e..000000000
--- a/indexer/modules/custom/hemera_ens/ens_abi.py
+++ /dev/null
@@ -1,19 +0,0 @@
-import json
-import os
-
-
-def get_absolute_path(relative_path):
- current_dir = os.path.dirname(os.path.abspath(__file__))
- absolute_path = os.path.join(current_dir, relative_path)
- return absolute_path
-
-
-abi_map = {}
-
-relative_path = "abi"
-absolute_path = get_absolute_path(relative_path)
-fs = os.listdir(absolute_path)
-for a_f in fs:
- with open(os.path.join(absolute_path, a_f), "r") as data_file:
- dic = json.load(data_file)
- abi_map[dic["address"].lower()] = json.dumps(dic["abi"])
diff --git a/indexer/modules/custom/karak/__init__.py b/indexer/modules/custom/karak/__init__.py
deleted file mode 100644
index e873e8e55..000000000
--- a/indexer/modules/custom/karak/__init__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-# @Time 2024/9/19 15:18
-# @Author will
-# @File __init__.py.py
-# @Brief
-"""Currently, this job only support Deposit, StartWithDraw, FinishWithDraw, more events coming soon"""
diff --git a/indexer/modules/custom/merchant_moe/domains/erc1155_token_holding.py b/indexer/modules/custom/merchant_moe/domains/erc1155_token_holding.py
deleted file mode 100644
index db53a4cf4..000000000
--- a/indexer/modules/custom/merchant_moe/domains/erc1155_token_holding.py
+++ /dev/null
@@ -1,41 +0,0 @@
-from dataclasses import dataclass
-
-from indexer.domain import Domain, FilterData
-
-
-@dataclass
-class MerchantMoeErc1155TokenHolding(FilterData):
- position_token_address: str
- wallet_address: str
- token_id: int
- balance: int
- block_number: int
- block_timestamp: int
-
-
-@dataclass
-class MerchantMoeErc1155TokenCurrentHolding(FilterData):
- position_token_address: str
- wallet_address: str
- token_id: int
- balance: int
- block_number: int
- block_timestamp: int
-
-
-@dataclass
-class MerchantMoeErc1155TokenSupply(FilterData):
- position_token_address: str
- token_id: int
- total_supply: int
- block_number: int
- block_timestamp: int
-
-
-@dataclass
-class MerchantMoeErc1155TokenCurrentSupply(FilterData):
- position_token_address: str
- token_id: int
- total_supply: int
- block_number: int
- block_timestamp: int
diff --git a/indexer/modules/custom/merchant_moe/domains/merchant_moe.py b/indexer/modules/custom/merchant_moe/domains/merchant_moe.py
deleted file mode 100644
index a1cc81a22..000000000
--- a/indexer/modules/custom/merchant_moe/domains/merchant_moe.py
+++ /dev/null
@@ -1,50 +0,0 @@
-from dataclasses import dataclass
-
-from indexer.domain import Domain, FilterData
-
-
-@dataclass
-class MerchantMoeTokenBin(FilterData):
- position_token_address: str
- token_id: int
- reserve0_bin: int
- reserve1_bin: int
- block_number: int
- block_timestamp: int
-
-
-@dataclass
-class MerchantMoeTokenCurrentBin(FilterData):
- position_token_address: str
- token_id: int
- reserve0_bin: int
- reserve1_bin: int
- block_number: int
- block_timestamp: int
-
-
-@dataclass
-class MerchantMoePool(FilterData):
- position_token_address: str
- token0_address: str
- token1_address: str
- block_number: int
- block_timestamp: int
-
-
-@dataclass
-class MerchantMoePoolRecord(FilterData):
- pool_address: str
- active_id: int
- bin_step: int
- block_number: int
- block_timestamp: int
-
-
-@dataclass
-class MerchantMoePoolCurrentStatus(FilterData):
- pool_address: str
- active_id: int
- bin_step: int
- block_number: int
- block_timestamp: int
diff --git a/indexer/modules/custom/opensea/domain/opensea_order.py b/indexer/modules/custom/opensea/domain/opensea_order.py
deleted file mode 100644
index 4caee89fb..000000000
--- a/indexer/modules/custom/opensea/domain/opensea_order.py
+++ /dev/null
@@ -1,20 +0,0 @@
-from dataclasses import dataclass
-
-from indexer.domain import Domain
-
-
-@dataclass
-class OpenseaOrder(Domain):
- order_hash: str
- zone: str
- offerer: str
- recipient: str
- offer: dict
- consideration: dict
- block_timestamp: int
- block_hash: str
- transaction_hash: str
- log_index: int
- block_number: int
-
- protocol_version: str = "1.6"
diff --git a/indexer/modules/custom/staking_fbtc/__init__.py b/indexer/modules/custom/staking_fbtc/__init__.py
deleted file mode 100644
index 57901b774..000000000
--- a/indexer/modules/custom/staking_fbtc/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-"""
-Not only index FBTC, but also cmETH and even other tokens
-"""
diff --git a/indexer/modules/custom/staking_fbtc/domain/af_staked_transferred_balance.py b/indexer/modules/custom/staking_fbtc/domain/af_staked_transferred_balance.py
deleted file mode 100644
index ac58f30f8..000000000
--- a/indexer/modules/custom/staking_fbtc/domain/af_staked_transferred_balance.py
+++ /dev/null
@@ -1,28 +0,0 @@
-from dataclasses import dataclass
-
-from indexer.domain import FilterData
-
-
-# records for all token
-@dataclass
-class AfStakedTransferredBalanceHistDomain(FilterData):
- contract_address: str
- protocol_id: str
- wallet_address: str
- token_address: str
- block_transfer_value: int
- block_cumulative_value: int
- block_number: int
- block_timestamp: int
-
-
-@dataclass
-class AfStakedTransferredBalanceCurrentDomain(FilterData):
- contract_address: str
- protocol_id: str
- wallet_address: str
- token_address: str
- block_transfer_value: int
- block_cumulative_value: int
- block_number: int
- block_timestamp: int
diff --git a/indexer/modules/custom/staking_fbtc/domain/feature_staked_fbtc_detail.py b/indexer/modules/custom/staking_fbtc/domain/feature_staked_fbtc_detail.py
deleted file mode 100644
index 148c88f89..000000000
--- a/indexer/modules/custom/staking_fbtc/domain/feature_staked_fbtc_detail.py
+++ /dev/null
@@ -1,47 +0,0 @@
-from dataclasses import dataclass
-
-from indexer.domain import FilterData
-
-
-@dataclass
-class StakedFBTCDetail(FilterData):
- vault_address: str
- protocol_id: str
- wallet_address: str
- amount: int
- changed_amount: int
- block_number: int
- block_timestamp: int
-
-
-@dataclass
-class StakedFBTCCurrentStatus(FilterData):
- vault_address: str
- protocol_id: str
- wallet_address: str
- amount: int
- changed_amount: int
- block_number: int
- block_timestamp: int
-
-
-@dataclass
-class TransferredFBTCDetail(FilterData):
- vault_address: str
- protocol_id: str
- wallet_address: str
- amount: int
- changed_amount: int
- block_number: int
- block_timestamp: int
-
-
-@dataclass
-class TransferredFBTCCurrentStatus(FilterData):
- vault_address: str
- protocol_id: str
- wallet_address: str
- amount: int
- changed_amount: int
- block_number: int
- block_timestamp: int
diff --git a/indexer/modules/custom/uniswap_v2/uniswapv2_swap_event.py b/indexer/modules/custom/uniswap_v2/uniswapv2_swap_event.py
deleted file mode 100644
index f152e9531..000000000
--- a/indexer/modules/custom/uniswap_v2/uniswapv2_swap_event.py
+++ /dev/null
@@ -1,64 +0,0 @@
-import logging
-
-from indexer.domain.log import Log
-from indexer.jobs import FilterTransactionDataJob
-from indexer.modules.custom.uniswap_v2.aerodrome_abi import SWAP_EVENT as AERODROME_SWAP_EVENT
-from indexer.modules.custom.uniswap_v2.domain.feature_uniswap_v2 import UniswapV2SwapEvent
-from indexer.modules.custom.uniswap_v2.uniswapv2_abi import SWAP_EVENT as UNISWAPV2_SWAP_EVENT
-from indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
-
-logger = logging.getLogger(__name__)
-
-
-class ExportUniSwapV2SwapEventJob(FilterTransactionDataJob):
- dependency_types = [Log]
- output_types = [UniswapV2SwapEvent]
-
- def __init__(self, **kwargs):
- super().__init__(**kwargs)
-
- def get_filter(self):
- return TransactionFilterByLogs(
- [
- TopicSpecification(topics=[UNISWAPV2_SWAP_EVENT.get_signature(), AERODROME_SWAP_EVENT.get_signature()]),
- ]
- )
-
- def _process(self, **kwargs):
- logs = self._data_buff[Log.type()]
- for log in logs:
- swap_event = None
-
- if log.topic0 == UNISWAPV2_SWAP_EVENT.get_signature():
- decoded_dict = UNISWAPV2_SWAP_EVENT.decode_log(log)
- swap_event = UniswapV2SwapEvent(
- pool_address=log.address,
- sender=decoded_dict["sender"],
- to_address=decoded_dict["to"],
- amount0_in=decoded_dict["amount0In"],
- amount1_in=decoded_dict["amount1In"],
- amount0_out=decoded_dict["amount0Out"],
- amount1_out=decoded_dict["amount1Out"],
- block_number=log.block_number,
- block_timestamp=log.block_timestamp,
- transaction_hash=log.transaction_hash,
- log_index=log.log_index,
- )
- elif log.topic0 == AERODROME_SWAP_EVENT.get_signature():
- decoded_dict = AERODROME_SWAP_EVENT.decode_log(log)
- swap_event = UniswapV2SwapEvent(
- pool_address=log.address,
- sender=decoded_dict["sender"],
- to_address=decoded_dict["to"],
- amount0_in=decoded_dict["amount0In"],
- amount1_in=decoded_dict["amount1In"],
- amount0_out=decoded_dict["amount0Out"],
- amount1_out=decoded_dict["amount1Out"],
- block_number=log.block_number,
- block_timestamp=log.block_timestamp,
- transaction_hash=log.transaction_hash,
- log_index=log.log_index,
- )
-
- if swap_event:
- self._collect_domain(swap_event)
diff --git a/indexer/modules/custom/uniswap_v2/uniswapv2_total_supply.py b/indexer/modules/custom/uniswap_v2/uniswapv2_total_supply.py
deleted file mode 100644
index 75e618ef9..000000000
--- a/indexer/modules/custom/uniswap_v2/uniswapv2_total_supply.py
+++ /dev/null
@@ -1,84 +0,0 @@
-import logging
-
-from common.utils.format_utils import bytes_to_hex_str
-from indexer.domain.token_transfer import ERC20TokenTransfer
-from indexer.jobs.base_job import ExtensionJob
-from indexer.modules.custom.uniswap_v2.domain.feature_uniswap_v2 import (
- UniswapV2Erc20CurrentTotalSupply,
- UniswapV2Erc20TotalSupply,
- UniswapV2Pool,
-)
-from indexer.modules.custom.uniswap_v2.models.feature_uniswap_v2_pools import UniswapV2Pools
-from indexer.utils.abi_setting import TOKEN_TOTAL_SUPPLY_FUNCTION
-from indexer.utils.multicall_hemera import Call
-from indexer.utils.multicall_hemera.multi_call_helper import MultiCallHelper
-
-logger = logging.getLogger(__name__)
-
-
-class ExportUniswapV2TotalSupplyJob(ExtensionJob):
- dependency_types = [ERC20TokenTransfer, UniswapV2Pool]
-
- output_types = [UniswapV2Erc20TotalSupply, UniswapV2Erc20CurrentTotalSupply]
- able_to_reorg = True
-
- def __init__(self, **kwargs):
- super().__init__(**kwargs)
- self.multi_call_helper = MultiCallHelper(self._web3, kwargs, logger)
-
- def _process(self, **kwargs):
- existing_pools = self.get_existing_pools()
-
- erc_20_token_transfers = self._data_buff[ERC20TokenTransfer.type()]
- uniswapv2_pool_token_transfers = [tt for tt in erc_20_token_transfers if tt.token_address in existing_pools]
-
- call_dict = {}
- for token_transfer in uniswapv2_pool_token_transfers:
- token_address = token_transfer.token_address
- block_number = token_transfer.block_number
- call = Call(
- target=token_address,
- function_abi=TOKEN_TOTAL_SUPPLY_FUNCTION,
- block_number=block_number,
- user_defined_k=token_transfer.block_timestamp,
- )
- call_dict[token_address, block_number] = call
-
- call_list = list(call_dict.values())
-
- self.multi_call_helper.execute_calls(call_list)
-
- records = []
- current_dict = {}
-
- call_list.sort(key=lambda call: call.block_number)
-
- for call in call_list:
- total_supply = call.returns.get("totalSupply")
-
- token_address = call.target.lower()
- erc_total_supply = UniswapV2Erc20TotalSupply(
- token_address=token_address,
- total_supply=total_supply,
- block_number=call.block_number,
- block_timestamp=call.user_defined_k,
- )
-
- current_dict[token_address] = UniswapV2Erc20CurrentTotalSupply(**vars(erc_total_supply))
- records.append(erc_total_supply)
- self._collect_domains(records)
- self._collect_domains(current_dict.values())
-
- def get_existing_pools(self):
- session = self._service.Session()
- try:
- pools_orm = session.query(UniswapV2Pools).all()
- existing_pools = [bytes_to_hex_str(p.pool_address) for p in pools_orm]
-
- except Exception as e:
- print(e)
- raise e
- finally:
- session.close()
-
- return existing_pools
diff --git a/indexer/modules/custom/uniswap_v3/constants.py b/indexer/modules/custom/uniswap_v3/constants.py
deleted file mode 100644
index 7bc131ec3..000000000
--- a/indexer/modules/custom/uniswap_v3/constants.py
+++ /dev/null
@@ -1 +0,0 @@
-ZERO_ADDRESS = "0x0000000000000000000000000000000000000000"
diff --git a/indexer/modules/custom/uniswap_v3/uniswap_v3_pool_job.py b/indexer/modules/custom/uniswap_v3/uniswap_v3_pool_job.py
deleted file mode 100644
index 12249da94..000000000
--- a/indexer/modules/custom/uniswap_v3/uniswap_v3_pool_job.py
+++ /dev/null
@@ -1,108 +0,0 @@
-import logging
-
-import indexer.modules.custom.uniswap_v3.swapsicle_abi as swapsicle_abi
-import indexer.modules.custom.uniswap_v3.uniswapv3_abi as uniswapv3_abi
-from common.utils.format_utils import bytes_to_hex_str
-from indexer.domain.transaction import Transaction
-from indexer.jobs import FilterTransactionDataJob
-from indexer.modules.custom.uniswap_v3.domains.feature_uniswap_v3 import UniswapV3Pool
-from indexer.modules.custom.uniswap_v3.models.feature_uniswap_v3_pools import UniswapV3Pools
-from indexer.modules.custom.uniswap_v3.util import AddressManager
-from indexer.specification.specification import TopicSpecification, TransactionFilterByLogs
-
-logger = logging.getLogger(__name__)
-
-
-class ExportUniSwapV3PoolJob(FilterTransactionDataJob):
- dependency_types = [Transaction]
- output_types = [UniswapV3Pool]
- able_to_reorg = True
-
- def __init__(self, **kwargs):
- super().__init__(**kwargs)
- self._service = kwargs["config"].get("db_service")
- config = kwargs["config"]["uniswap_v3_job"]
- jobs = config.get("jobs", [])
- self._address_manager = AddressManager(jobs)
- self._existing_pools = self.get_existing_pools()
-
- def get_filter(self):
-
- return TransactionFilterByLogs(
- [
- TopicSpecification(
- topics=[
- abi_module.POOL_CREATED_EVENT.get_signature()
- for abi_module in self._address_manager.abi_modules_list
- ],
- addresses=self._address_manager.factory_address_list,
- ),
- ]
- )
-
- def _process(self, **kwargs):
- self.get_pools()
-
- def get_pools(self):
- transactions = self._data_buff["transaction"]
- for transaction in transactions:
- logs = transaction.receipt.logs
- for log in logs:
- if log.topic0 == swapsicle_abi.POOL_CREATED_EVENT.get_signature():
- pool_dict = {}
- decoded_data = swapsicle_abi.POOL_CREATED_EVENT.decode_log(log)
- pool_address = decoded_data["pool"]
- # tick_spacing\fee are stored in other logs
- pool_dict.update(
- {
- "factory_address": log.address,
- "position_token_address": transaction.to_address,
- "token0_address": decoded_data["token0"],
- "token1_address": decoded_data["token1"],
- "pool_address": pool_address,
- "block_number": log.block_number,
- "block_timestamp": log.block_timestamp,
- "fee": 0,
- "tick_spacing": 0,
- }
- )
- if pool_address not in self._existing_pools:
- self._existing_pools.append(pool_address)
- uniswap_v3_pool = UniswapV3Pool(**pool_dict)
- self._collect_domain(uniswap_v3_pool)
-
- elif log.topic0 == uniswapv3_abi.POOL_CREATED_EVENT.get_signature():
- pool_dict = {}
- decoded_data = uniswapv3_abi.POOL_CREATED_EVENT.decode_log(log)
- pool_address = decoded_data["pool"]
- pool_dict.update(
- {
- "factory_address": log.address,
- "position_token_address": transaction.to_address,
- "token0_address": decoded_data["token0"],
- "token1_address": decoded_data["token1"],
- "fee": decoded_data["fee"],
- "tick_spacing": decoded_data["tickSpacing"],
- "pool_address": pool_address,
- "block_number": log.block_number,
- "block_timestamp": log.block_timestamp,
- }
- )
- if pool_address not in self._existing_pools:
- self._existing_pools.append(pool_address)
- uniswap_v3_pool = UniswapV3Pool(**pool_dict)
- self._collect_domain(uniswap_v3_pool)
-
- def get_existing_pools(self):
- session = self._service.Session()
- try:
- pools_orm = session.query(UniswapV3Pools).all()
- existing_pools = [bytes_to_hex_str(p.pool_address) for p in pools_orm]
-
- except Exception as e:
- print(e)
- raise e
- finally:
- session.close()
-
- return existing_pools
diff --git a/indexer/tests/jobs/test_export_transactions_job.py b/indexer/tests/jobs/test_export_transactions_job.py
deleted file mode 100644
index 13342b869..000000000
--- a/indexer/tests/jobs/test_export_transactions_job.py
+++ /dev/null
@@ -1,33 +0,0 @@
-import pytest
-
-from indexer.controller.scheduler.job_scheduler import JobScheduler
-from indexer.domain.log import Log
-from indexer.exporters.console_item_exporter import ConsoleItemExporter
-from indexer.tests import ETHEREUM_PUBLIC_NODE_RPC_URL
-from indexer.utils.provider import get_provider_from_uri
-from indexer.utils.thread_local_proxy import ThreadLocalProxy
-
-
-@pytest.mark.indexer
-@pytest.mark.indexer_exporter
-@pytest.mark.serial
-def test_export_transaction_job():
- job_scheduler = JobScheduler(
- batch_web3_provider=ThreadLocalProxy(lambda: get_provider_from_uri(ETHEREUM_PUBLIC_NODE_RPC_URL, batch=True)),
- batch_web3_debug_provider=ThreadLocalProxy(
- lambda: get_provider_from_uri(ETHEREUM_PUBLIC_NODE_RPC_URL, batch=True)
- ),
- item_exporters=[ConsoleItemExporter()],
- batch_size=100,
- debug_batch_size=1,
- max_workers=5,
- config={},
- required_output_types=[Log],
- )
-
- job_scheduler.run_jobs(
- start_block=20273057,
- end_block=20273058,
- )
-
- job_scheduler.clear_data_buff()
diff --git a/indexer/utils/multicall_hemera/__init__.py b/indexer/utils/multicall_hemera/__init__.py
deleted file mode 100644
index 370817634..000000000
--- a/indexer/utils/multicall_hemera/__init__.py
+++ /dev/null
@@ -1,6 +0,0 @@
-from indexer.utils.multicall_hemera.call import Call
-from indexer.utils.multicall_hemera.multi_call import Multicall
-
-"""
-This package provides API for multicall3 smart contract
-"""
diff --git a/indexer/utils/sync_recorder.py b/indexer/utils/sync_recorder.py
deleted file mode 100644
index 57189ae4c..000000000
--- a/indexer/utils/sync_recorder.py
+++ /dev/null
@@ -1,100 +0,0 @@
-import os
-from datetime import datetime, timezone
-
-from sqlalchemy import func
-from sqlalchemy.dialects.postgresql import insert
-
-from common.models.sync_record import SyncRecord
-from common.utils.file_utils import smart_open, write_to_file
-
-
-class BaseRecorder(object):
- def set_last_synced_block(self, last_synced_block):
- pass
-
- def get_last_synced_block(self):
- pass
-
-
-class FileSyncRecorder(BaseRecorder):
-
- def __init__(self, file_name):
- self.file_name = file_name
-
- def set_last_synced_block(self, last_synced_block):
- write_to_file(self.file_name, str(last_synced_block) + "\n")
-
- def get_last_synced_block(self):
- if not os.path.isfile(self.file_name):
- self.set_last_synced_block(0)
- return 0
- with smart_open(self.file_name, "r") as last_synced_block_file:
- return int(last_synced_block_file.read())
-
-
-class PGSyncRecorder(BaseRecorder):
-
- def __init__(self, key, service):
- self.key = key
- self.service = service
-
- def set_last_synced_block(self, last_synced_block):
- session = self.service.get_service_session()
- update_time = func.to_timestamp(int(datetime.now(timezone.utc).timestamp()))
- try:
- statement = (
- insert(SyncRecord)
- .values(
- {
- "mission_sign": self.key,
- "last_block_number": last_synced_block,
- "update_time": update_time,
- }
- )
- .on_conflict_do_update(
- index_elements=[SyncRecord.mission_sign],
- set_={
- "last_block_number": last_synced_block,
- "update_time": update_time,
- },
- )
- )
- session.execute(statement)
- session.commit()
- except Exception as e:
- raise e
- finally:
- session.close()
-
- def get_last_synced_block(self):
- session = self.service.get_service_session()
- try:
- result = session.query(SyncRecord.last_block_number).filter(SyncRecord.mission_sign == self.key).scalar()
- except Exception as e:
- raise e
- finally:
- session.close()
- if result is not None:
- return result
- return 0
-
-
-def create_recorder(sync_recorder: str, config: dict) -> BaseRecorder:
- recorder_sign = sync_recorder.find(":")
- if recorder_sign == -1:
- raise ValueError(f"Invalid sync recorder: {sync_recorder}" "")
-
- recorder = sync_recorder.split(":")
-
- if recorder[0] == "pg":
- try:
- service = config["db_service"]
- except KeyError:
- raise ValueError(f"postgresql sync record must provide pg config.")
- return PGSyncRecorder(recorder[1], service)
-
- elif recorder[0] == "file":
- return FileSyncRecorder(recorder[1])
-
- else:
- raise ValueError("Unable to determine sync recorder type: " + sync_recorder)
diff --git a/poetry.lock b/poetry.lock
index 26cab7040..085adfab4 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,4 +1,4 @@
-# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand.
+# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand.
[[package]]
name = "aiohappyeyeballs"
@@ -160,13 +160,13 @@ tz = ["backports.zoneinfo"]
[[package]]
name = "aniso8601"
-version = "9.0.1"
+version = "10.0.0"
description = "A library for parsing ISO 8601 strings."
optional = false
python-versions = "*"
files = [
- {file = "aniso8601-9.0.1-py2.py3-none-any.whl", hash = "sha256:1d2b7ef82963909e93c4f24ce48d4de9e66009a21bf1c1e1c85bdd0812fe412f"},
- {file = "aniso8601-9.0.1.tar.gz", hash = "sha256:72e3117667eedf66951bb2d93f4296a56b94b078a8a95905a052611fb3f1b973"},
+ {file = "aniso8601-10.0.0-py2.py3-none-any.whl", hash = "sha256:3c943422efaa0229ebd2b0d7d223effb5e7c89e24d2267ebe76c61a2d8e290cb"},
+ {file = "aniso8601-10.0.0.tar.gz", hash = "sha256:ff1d0fc2346688c62c0151547136ac30e322896ed8af316ef7602c47da9426cf"},
]
[package.extras]
@@ -185,19 +185,19 @@ files = [
[[package]]
name = "attrs"
-version = "24.2.0"
+version = "25.1.0"
description = "Classes Without Boilerplate"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"},
- {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"},
+ {file = "attrs-25.1.0-py3-none-any.whl", hash = "sha256:c75a69e28a550a7e93789579c22aa26b0f5b83b75dc4e08fe092980051e1090a"},
+ {file = "attrs-25.1.0.tar.gz", hash = "sha256:1c97078a80c814273a76b2a298a932eb681c87415c11dee0a6921de7f1b02c3e"},
]
[package.extras]
benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
-dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
+dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"]
tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"]
@@ -388,6 +388,17 @@ files = [
{file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"},
]
+[[package]]
+name = "cached-property"
+version = "2.0.1"
+description = "A decorator for caching properties in classes."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "cached_property-2.0.1-py3-none-any.whl", hash = "sha256:f617d70ab1100b7bcf6e42228f9ddcb78c676ffa167278d9f730d1c2fba69ccb"},
+ {file = "cached_property-2.0.1.tar.gz", hash = "sha256:484d617105e3ee0e4f1f58725e72a8ef9e93deee462222dbd51cd91230897641"},
+]
+
[[package]]
name = "cachelib"
version = "0.9.0"
@@ -401,127 +412,114 @@ files = [
[[package]]
name = "certifi"
-version = "2024.8.30"
+version = "2025.1.31"
description = "Python package for providing Mozilla's CA Bundle."
optional = false
python-versions = ">=3.6"
files = [
- {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"},
- {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"},
+ {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"},
+ {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"},
]
[[package]]
name = "charset-normalizer"
-version = "3.4.0"
+version = "3.4.1"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
optional = false
-python-versions = ">=3.7.0"
-files = [
- {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"},
- {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"},
- {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"},
- {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"},
- {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"},
- {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"},
- {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"},
- {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"},
- {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"},
- {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"},
- {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"},
- {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"},
- {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"},
- {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"},
- {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"},
- {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"},
- {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"},
- {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"},
- {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"},
- {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"},
- {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"},
- {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"},
- {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"},
- {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"},
- {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"},
- {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"},
- {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"},
- {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"},
- {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"},
- {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"},
- {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"},
- {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"},
- {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"},
- {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"},
- {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"},
- {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"},
- {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"},
- {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"},
- {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"},
- {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"},
- {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"},
- {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"},
- {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"},
- {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"},
- {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"},
- {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"},
- {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"},
- {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"},
- {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"},
- {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"},
- {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"},
- {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"},
- {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"},
- {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"},
- {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"},
- {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"},
- {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"},
- {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"},
- {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"},
- {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"},
- {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"},
- {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"},
- {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"},
- {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"},
- {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"},
- {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"},
- {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"},
- {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"},
- {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"},
- {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"},
- {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"},
- {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"},
- {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"},
- {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"},
- {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"},
- {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"},
- {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"},
- {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"},
- {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"},
- {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"},
- {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"},
- {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"},
- {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"},
- {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"},
- {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"},
- {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"},
- {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"},
- {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"},
- {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"},
- {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"},
- {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"},
- {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"},
- {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"},
- {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"},
- {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"},
- {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"},
- {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"},
- {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"},
- {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"},
- {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"},
- {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"},
- {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"},
- {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"},
- {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"},
- {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"},
+python-versions = ">=3.7"
+files = [
+ {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"},
+ {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"},
+ {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"},
+ {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"},
+ {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"},
+ {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"},
+ {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"},
+ {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"},
+ {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"},
+ {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"},
+ {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"},
+ {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"},
+ {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"},
+ {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"},
+ {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"},
+ {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"},
+ {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"},
+ {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"},
+ {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"},
+ {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"},
+ {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"},
]
[[package]]
@@ -620,13 +618,13 @@ files = [
[[package]]
name = "click"
-version = "8.1.7"
+version = "8.1.8"
description = "Composable command line interface toolkit"
optional = false
python-versions = ">=3.7"
files = [
- {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"},
- {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"},
+ {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"},
+ {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"},
]
[package.dependencies]
@@ -732,97 +730,111 @@ toml = ["tomli"]
[[package]]
name = "cytoolz"
-version = "1.0.0"
+version = "1.0.1"
description = "Cython implementation of Toolz: High performance functional utilities"
optional = false
python-versions = ">=3.8"
files = [
- {file = "cytoolz-1.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ecf5a887acb8f079ab1b81612b1c889bcbe6611aa7804fd2df46ed310aa5a345"},
- {file = "cytoolz-1.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ef0ef30c1e091d4d59d14d8108a16d50bd227be5d52a47da891da5019ac2f8e4"},
- {file = "cytoolz-1.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7df2dfd679f0517a96ced1cdd22f5c6c6aeeed28d928a82a02bf4c3fd6fd7ac4"},
- {file = "cytoolz-1.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c51452c938e610f57551aa96e34924169c9100c0448bac88c2fb395cbd3538c"},
- {file = "cytoolz-1.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6433f03910c5e5345d82d6299457c26bf33821224ebb837c6b09d9cdbc414a6c"},
- {file = "cytoolz-1.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:389ec328bb535f09e71dfe658bf0041f17194ca4cedaacd39bafe7893497a819"},
- {file = "cytoolz-1.0.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c64658e1209517ce4b54c1c9269a508b289d8d55fc742760e4b8579eacf09a33"},
- {file = "cytoolz-1.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f6039a9bd5bb988762458b9ca82b39e60ca5e5baae2ba93913990dcc5d19fa88"},
- {file = "cytoolz-1.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:85c9c8c4465ed1b2c8d67003809aec9627b129cb531d2f6cf0bbfe39952e7e4d"},
- {file = "cytoolz-1.0.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:49375aad431d76650f94877afb92f09f58b6ff9055079ef4f2cd55313f5a1b39"},
- {file = "cytoolz-1.0.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:4c45106171c824a61e755355520b646cb35a1987b34bbf5789443823ee137f63"},
- {file = "cytoolz-1.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3b319a7f0fed5db07d189db4046162ebc183c108df3562a65ba6ebe862d1f634"},
- {file = "cytoolz-1.0.0-cp310-cp310-win32.whl", hash = "sha256:9770e1b09748ad0d751853d994991e2592a9f8c464a87014365f80dac2e83faa"},
- {file = "cytoolz-1.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:20194dd02954c00c1f0755e636be75a20781f91a4ac9270c7f747e82d3c7f5a5"},
- {file = "cytoolz-1.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dffc22fd2c91be64dbdbc462d0786f8e8ac9a275cfa1869a1084d1867d4f67e0"},
- {file = "cytoolz-1.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a99e7e29274e293f4ffe20e07f76c2ac753a78f1b40c1828dfc54b2981b2f6c4"},
- {file = "cytoolz-1.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c507a3e0a45c41d66b43f96797290d75d1e7a8549aa03a4a6b8854fdf3f7b8d8"},
- {file = "cytoolz-1.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:643a593ec272ef7429099e1182a22f64ec2696c00d295d2a5be390db1b7ff176"},
- {file = "cytoolz-1.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6ce38e2e42cbae30446190c59b92a8a9029e1806fd79eaf88f48b0fe33003893"},
- {file = "cytoolz-1.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:810a6a168b8c5ecb412fbae3dd6f7ed6c6253a63caf4174ee9794ebd29b2224f"},
- {file = "cytoolz-1.0.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0ce8a2a85c0741c1b19b16e6782c4a5abc54c3caecda66793447112ab2fa9884"},
- {file = "cytoolz-1.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ea4ac72e6b830861035c4c7999af8e55813f57c6d1913a3d93cc4a6babc27bf7"},
- {file = "cytoolz-1.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a09cdfb21dfb38aa04df43e7546a41f673377eb5485da88ceb784e327ec7603b"},
- {file = "cytoolz-1.0.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:658dd85deb375ff7af990a674e5c9058cef1c9d1f5dc89bc87b77be499348144"},
- {file = "cytoolz-1.0.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9715d1ff5576919d10b68f17241375f6a1eec8961c25b78a83e6ef1487053f39"},
- {file = "cytoolz-1.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f370a1f1f1afc5c1c8cc5edc1cfe0ba444263a0772af7ce094be8e734f41769d"},
- {file = "cytoolz-1.0.0-cp311-cp311-win32.whl", hash = "sha256:dbb2ec1177dca700f3db2127e572da20de280c214fc587b2a11c717fc421af56"},
- {file = "cytoolz-1.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:0983eee73df86e54bb4a79fcc4996aa8b8368fdbf43897f02f9c3bf39c4dc4fb"},
- {file = "cytoolz-1.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:10e3986066dc379e30e225b230754d9f5996aa8d84c2accc69c473c21d261e46"},
- {file = "cytoolz-1.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:16576f1bb143ee2cb9f719fcc4b845879fb121f9075c7c5e8a5ff4854bd02fc6"},
- {file = "cytoolz-1.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3faa25a1840b984315e8b3ae517312375f4273ffc9a2f035f548b7f916884f37"},
- {file = "cytoolz-1.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:781fce70a277b20fd95dc66811d1a97bb07b611ceea9bda8b7dd3c6a4b05d59a"},
- {file = "cytoolz-1.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7a562c25338eb24d419d1e80a7ae12133844ce6fdeb4ab54459daf250088a1b2"},
- {file = "cytoolz-1.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f29d8330aaf070304f7cd5cb7e73e198753624eb0aec278557cccd460c699b5b"},
- {file = "cytoolz-1.0.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:98a96c54aa55ed9c7cdb23c2f0df39a7b4ee518ac54888480b5bdb5ef69c7ef0"},
- {file = "cytoolz-1.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:287d6d7f475882c2ddcbedf8da9a9b37d85b77690779a2d1cdceb5ae3998d52e"},
- {file = "cytoolz-1.0.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:05a871688df749b982839239fcd3f8ec3b3b4853775d575ff9cd335fa7c75035"},
- {file = "cytoolz-1.0.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:28bb88e1e2f7d6d4b8e0890b06d292c568984d717de3e8381f2ca1dd12af6470"},
- {file = "cytoolz-1.0.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:576a4f1fc73d8836b10458b583f915849da6e4f7914f4ecb623ad95c2508cad5"},
- {file = "cytoolz-1.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:509ed3799c47e4ada14f63e41e8f540ac6e2dab97d5d7298934e6abb9d3830ec"},
- {file = "cytoolz-1.0.0-cp312-cp312-win32.whl", hash = "sha256:9ce25f02b910630f6dc2540dd1e26c9326027ddde6c59f8cab07c56acc70714c"},
- {file = "cytoolz-1.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:7e53cfcce87e05b7f0ae2fb2b3e5820048cd0bb7b701e92bd8f75c9fbb7c9ae9"},
- {file = "cytoolz-1.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7d56569dfe67a39ce74ffff0dc12cf0a3d1aae709667a303fe8f2dd5fd004fdf"},
- {file = "cytoolz-1.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:035c8bb4706dcf93a89fb35feadff67e9301935bf6bb864cd2366923b69d9a29"},
- {file = "cytoolz-1.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27c684799708bdc7ee7acfaf464836e1b4dec0996815c1d5efd6a92a4356a562"},
- {file = "cytoolz-1.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44ab57cfc922b15d94899f980d76759ef9e0256912dfab70bf2561bea9cd5b19"},
- {file = "cytoolz-1.0.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:478af5ecc066da093d7660b23d0b465a7f44179739937afbded8af00af412eb6"},
- {file = "cytoolz-1.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da1f82a7828a42468ea2820a25b6e56461361390c29dcd4d68beccfa1b71066b"},
- {file = "cytoolz-1.0.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c371b3114d38ee717780b239179e88d5d358fe759a00dcf07691b8922bbc762"},
- {file = "cytoolz-1.0.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:90b343b2f3b3e77c3832ba19b0b17e95412a5b2e715b05c23a55ba525d1fca49"},
- {file = "cytoolz-1.0.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89a554a9ba112403232a54e15e46ff218b33020f3f45c4baf6520ab198b7ad93"},
- {file = "cytoolz-1.0.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:0d603f5e2b1072166745ecdd81384a75757a96a704a5642231eb51969f919d5f"},
- {file = "cytoolz-1.0.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:122ef2425bd3c0419e6e5260d0b18cd25cf74de589cd0184e4a63b24a4641e2e"},
- {file = "cytoolz-1.0.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:8819f1f97ebe36efcaf4b550e21677c46ac8a41bed482cf66845f377dd20700d"},
- {file = "cytoolz-1.0.0-cp38-cp38-win32.whl", hash = "sha256:fcddbb853770dd6e270d89ea8742f0aa42c255a274b9e1620eb04e019b79785e"},
- {file = "cytoolz-1.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:ca526905a014a38cc23ae78635dc51d0462c5c24425b22c08beed9ff2ee03845"},
- {file = "cytoolz-1.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:05df5ff1cdd198fb57e7368623662578c950be0b14883cadfb9ee4098415e1e5"},
- {file = "cytoolz-1.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:04a84778f48ebddb26948971dc60948907c876ba33b13f9cbb014fe65b341fc2"},
- {file = "cytoolz-1.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f65283b618b4c4df759f57bcf8483865a73f7f268e6d76886c743407c8d26c1c"},
- {file = "cytoolz-1.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:388cd07ee9a9e504c735a0a933e53c98586a1c301a64af81f7aa7ff40c747520"},
- {file = "cytoolz-1.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:06d09e9569cfdfc5c082806d4b4582db8023a3ce034097008622bcbac7236f38"},
- {file = "cytoolz-1.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9502bd9e37779cc9893cbab515a474c2ab6af61ed22ac2f7e16033db18fcaa85"},
- {file = "cytoolz-1.0.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:364c2fda148def38003b2c86e8adde1d2aab12411dd50872c244a815262e2fda"},
- {file = "cytoolz-1.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9b2e945617325242687189966335e785dc0fae316f4c1825baacf56e5a97e65f"},
- {file = "cytoolz-1.0.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:0f16907fdc724c55b16776bdb7e629deae81d500fe48cfc3861231753b271355"},
- {file = "cytoolz-1.0.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:d3206c81ca3ba2d7b8fe78f2e116e3028e721148be753308e88dcbbc370bca52"},
- {file = "cytoolz-1.0.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:becce4b13e110b5ac6b23753dcd0c977f4fdccffa31898296e13fd1109e517e3"},
- {file = "cytoolz-1.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:69a7e5e98fd446079b8b8ec5987aec9a31ec3570a6f494baefa6800b783eaf22"},
- {file = "cytoolz-1.0.0-cp39-cp39-win32.whl", hash = "sha256:b1707b6c3a91676ac83a28a231a14b337dbb4436b937e6b3e4fd44209852a48b"},
- {file = "cytoolz-1.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:11d48b8521ef5fe92e099f4fc00717b5d0789c3c90d5d84031b6d3b17dee1700"},
- {file = "cytoolz-1.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e672712d5dc3094afc6fb346dd4e9c18c1f3c69608ddb8cf3b9f8428f9c26a5c"},
- {file = "cytoolz-1.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86fb208bfb7420e1d0d20065d661310e4a8a6884851d4044f47d37ed4cd7410e"},
- {file = "cytoolz-1.0.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6dbe5fe3b835859fc559eb59bf2775b5a108f7f2cfab0966f3202859d787d8fd"},
- {file = "cytoolz-1.0.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cace092dfda174eed09ed871793beb5b65633963bcda5b1632c73a5aceea1ce"},
- {file = "cytoolz-1.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f7a9d816af3be9725c70efe0a6e4352a45d3877751b395014b8eb2f79d7d8d9d"},
- {file = "cytoolz-1.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:caa7ef840847a23b379e6146760e3a22f15f445656af97e55a435c592125cfa5"},
- {file = "cytoolz-1.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:921082fff09ff6e40c12c87b49be044492b2d6bb01d47783995813b76680c7b2"},
- {file = "cytoolz-1.0.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a32f1356f3b64dda883583383966948604ac69ca0b7fbcf5f28856e5f9133b4e"},
- {file = "cytoolz-1.0.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9af793b1738e4191d15a92e1793f1ffea9f6461022c7b2442f3cb1ea0a4f758a"},
- {file = "cytoolz-1.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:51dfda3983fcc59075c534ce54ca041bb3c80e827ada5d4f25ff7b4049777f94"},
- {file = "cytoolz-1.0.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:acfb8780c04d29423d14aaab74cd1b7b4beaba32f676e7ace02c9acfbf532aba"},
- {file = "cytoolz-1.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99f39dcc46416dca3eb23664b73187b77fb52cd8ba2ddd8020a292d8f449db67"},
- {file = "cytoolz-1.0.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c0d56b3721977806dcf1a68b0ecd56feb382fdb0f632af1a9fc5ab9b662b32c6"},
- {file = "cytoolz-1.0.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45d346620abc8c83ae634136e700432ad6202faffcc24c5ab70b87392dcda8a1"},
- {file = "cytoolz-1.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:df0c81197fc130de94c09fc6f024a6a19c98ba8fe55c17f1e45ebba2e9229079"},
- {file = "cytoolz-1.0.0.tar.gz", hash = "sha256:eb453b30182152f9917a5189b7d99046b6ce90cdf8aeb0feff4b2683e600defd"},
+ {file = "cytoolz-1.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cec9af61f71fc3853eb5dca3d42eb07d1f48a4599fa502cbe92adde85f74b042"},
+ {file = "cytoolz-1.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:140bbd649dbda01e91add7642149a5987a7c3ccc251f2263de894b89f50b6608"},
+ {file = "cytoolz-1.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e90124bdc42ff58b88cdea1d24a6bc5f776414a314cc4d94f25c88badb3a16d1"},
+ {file = "cytoolz-1.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e74801b751e28f7c5cc3ad264c123954a051f546f2fdfe089f5aa7a12ccfa6da"},
+ {file = "cytoolz-1.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:582dad4545ddfb5127494ef23f3fa4855f1673a35d50c66f7638e9fb49805089"},
+ {file = "cytoolz-1.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd7bd0618e16efe03bd12f19c2a26a27e6e6b75d7105adb7be1cd2a53fa755d8"},
+ {file = "cytoolz-1.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d74cca6acf1c4af58b2e4a89cc565ed61c5e201de2e434748c93e5a0f5c541a5"},
+ {file = "cytoolz-1.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:823a3763828d8d457f542b2a45d75d6b4ced5e470b5c7cf2ed66a02f508ed442"},
+ {file = "cytoolz-1.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:51633a14e6844c61db1d68c1ffd077cf949f5c99c60ed5f1e265b9e2966f1b52"},
+ {file = "cytoolz-1.0.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f3ec9b01c45348f1d0d712507d54c2bfd69c62fbd7c9ef555c9d8298693c2432"},
+ {file = "cytoolz-1.0.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:1855022b712a9c7a5bce354517ab4727a38095f81e2d23d3eabaf1daeb6a3b3c"},
+ {file = "cytoolz-1.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9930f7288c4866a1dc1cc87174f0c6ff4cad1671eb1f6306808aa6c445857d78"},
+ {file = "cytoolz-1.0.1-cp310-cp310-win32.whl", hash = "sha256:a9baad795d72fadc3445ccd0f122abfdbdf94269157e6d6d4835636dad318804"},
+ {file = "cytoolz-1.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:ad95b386a84e18e1f6136f6d343d2509d4c3aae9f5a536f3dc96808fcc56a8cf"},
+ {file = "cytoolz-1.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2d958d4f04d9d7018e5c1850790d9d8e68b31c9a2deebca74b903706fdddd2b6"},
+ {file = "cytoolz-1.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0f445b8b731fc0ecb1865b8e68a070084eb95d735d04f5b6c851db2daf3048ab"},
+ {file = "cytoolz-1.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f546a96460a7e28eb2ec439f4664fa646c9b3e51c6ebad9a59d3922bbe65e30"},
+ {file = "cytoolz-1.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0317681dd065532d21836f860b0563b199ee716f55d0c1f10de3ce7100c78a3b"},
+ {file = "cytoolz-1.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0c0ef52febd5a7821a3fd8d10f21d460d1a3d2992f724ba9c91fbd7a96745d41"},
+ {file = "cytoolz-1.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5ebaf419acf2de73b643cf96108702b8aef8e825cf4f63209ceb078d5fbbbfd"},
+ {file = "cytoolz-1.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5f7f04eeb4088947585c92d6185a618b25ad4a0f8f66ea30c8db83cf94a425e3"},
+ {file = "cytoolz-1.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f61928803bb501c17914b82d457c6f50fe838b173fb40d39c38d5961185bd6c7"},
+ {file = "cytoolz-1.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d2960cb4fa01ccb985ad1280db41f90dc97a80b397af970a15d5a5de403c8c61"},
+ {file = "cytoolz-1.0.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:b2b407cc3e9defa8df5eb46644f6f136586f70ba49eba96f43de67b9a0984fd3"},
+ {file = "cytoolz-1.0.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:8245f929144d4d3bd7b972c9593300195c6cea246b81b4c46053c48b3f044580"},
+ {file = "cytoolz-1.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e37385db03af65763933befe89fa70faf25301effc3b0485fec1c15d4ce4f052"},
+ {file = "cytoolz-1.0.1-cp311-cp311-win32.whl", hash = "sha256:50f9c530f83e3e574fc95c264c3350adde8145f4f8fc8099f65f00cc595e5ead"},
+ {file = "cytoolz-1.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:b7f6b617454b4326af7bd3c7c49b0fc80767f134eb9fd6449917a058d17a0e3c"},
+ {file = "cytoolz-1.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fcb8f7d0d65db1269022e7e0428471edee8c937bc288ebdcb72f13eaa67c2fe4"},
+ {file = "cytoolz-1.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:207d4e4b445e087e65556196ff472ff134370d9a275d591724142e255f384662"},
+ {file = "cytoolz-1.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21cdf6bac6fd843f3b20280a66fd8df20dea4c58eb7214a2cd8957ec176f0bb3"},
+ {file = "cytoolz-1.0.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a55ec098036c0dea9f3bdc021f8acd9d105a945227d0811589f0573f21c9ce1"},
+ {file = "cytoolz-1.0.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a13ab79ff4ce202e03ab646a2134696988b554b6dc4b71451e948403db1331d8"},
+ {file = "cytoolz-1.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e2d944799026e1ff08a83241f1027a2d9276c41f7a74224cd98b7df6e03957d"},
+ {file = "cytoolz-1.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88ba85834cd523b91fdf10325e1e6d71c798de36ea9bdc187ca7bd146420de6f"},
+ {file = "cytoolz-1.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5a750b1af7e8bf6727f588940b690d69e25dc47cce5ce467925a76561317eaf7"},
+ {file = "cytoolz-1.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:44a71870f7eae31d263d08b87da7c2bf1176f78892ed8bdade2c2850478cb126"},
+ {file = "cytoolz-1.0.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c8231b9abbd8e368e036f4cc2e16902c9482d4cf9e02a6147ed0e9a3cd4a9ab0"},
+ {file = "cytoolz-1.0.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:aa87599ccc755de5a096a4d6c34984de6cd9dc928a0c5eaa7607457317aeaf9b"},
+ {file = "cytoolz-1.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:67cd16537df51baabde3baa770ab7b8d16839c4d21219d5b96ac59fb012ebd2d"},
+ {file = "cytoolz-1.0.1-cp312-cp312-win32.whl", hash = "sha256:fb988c333f05ee30ad4693fe4da55d95ec0bb05775d2b60191236493ea2e01f9"},
+ {file = "cytoolz-1.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:8f89c48d8e5aec55ffd566a8ec858706d70ed0c6a50228eca30986bfa5b4da8b"},
+ {file = "cytoolz-1.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6944bb93b287032a4c5ca6879b69bcd07df46f3079cf8393958cf0b0454f50c0"},
+ {file = "cytoolz-1.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e027260fd2fc5cb041277158ac294fc13dca640714527219f702fb459a59823a"},
+ {file = "cytoolz-1.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88662c0e07250d26f5af9bc95911e6137e124a5c1ec2ce4a5d74de96718ab242"},
+ {file = "cytoolz-1.0.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:309dffa78b0961b4c0cf55674b828fbbc793cf2d816277a5c8293c0c16155296"},
+ {file = "cytoolz-1.0.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:edb34246e6eb40343c5860fc51b24937698e4fa1ee415917a73ad772a9a1746b"},
+ {file = "cytoolz-1.0.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a54da7a8e4348a18d45d4d5bc84af6c716d7f131113a4f1cc45569d37edff1b"},
+ {file = "cytoolz-1.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:241c679c3b1913c0f7259cf1d9639bed5084c86d0051641d537a0980548aa266"},
+ {file = "cytoolz-1.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5bfc860251a8f280ac79696fc3343cfc3a7c30b94199e0240b6c9e5b6b01a2a5"},
+ {file = "cytoolz-1.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:c8edd1547014050c1bdad3ff85d25c82bd1c2a3c96830c6181521eb78b9a42b3"},
+ {file = "cytoolz-1.0.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b349bf6162e8de215403d7f35f8a9b4b1853dc2a48e6e1a609a5b1a16868b296"},
+ {file = "cytoolz-1.0.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:1b18b35256219b6c3dd0fa037741b85d0bea39c552eab0775816e85a52834140"},
+ {file = "cytoolz-1.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:738b2350f340ff8af883eb301054eb724997f795d20d90daec7911c389d61581"},
+ {file = "cytoolz-1.0.1-cp313-cp313-win32.whl", hash = "sha256:9cbd9c103df54fcca42be55ef40e7baea624ac30ee0b8bf1149f21146d1078d9"},
+ {file = "cytoolz-1.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:90e577e08d3a4308186d9e1ec06876d4756b1e8164b92971c69739ea17e15297"},
+ {file = "cytoolz-1.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f3a509e4ac8e711703c368476b9bbce921fcef6ebb87fa3501525f7000e44185"},
+ {file = "cytoolz-1.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a7eecab6373e933dfbf4fdc0601d8fd7614f8de76793912a103b5fccf98170cd"},
+ {file = "cytoolz-1.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e55ed62087f6e3e30917b5f55350c3b6be6470b849c6566018419cd159d2cebc"},
+ {file = "cytoolz-1.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:43de33d99a4ccc07234cecd81f385456b55b0ea9c39c9eebf42f024c313728a5"},
+ {file = "cytoolz-1.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:139bed875828e1727018aa0982aa140e055cbafccb7fd89faf45cbb4f2a21514"},
+ {file = "cytoolz-1.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22c12671194b518aa8ce2f4422bd5064f25ab57f410ba0b78705d0a219f4a97a"},
+ {file = "cytoolz-1.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:79888f2f7dc25709cd5d37b032a8833741e6a3692c8823be181d542b5999128e"},
+ {file = "cytoolz-1.0.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:51628b4eb41fa25bd428f8f7b5b74fbb05f3ae65fbd265019a0dd1ded4fdf12a"},
+ {file = "cytoolz-1.0.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:1db9eb7179285403d2fb56ba1ff6ec35a44921b5e2fa5ca19d69f3f9f0285ea5"},
+ {file = "cytoolz-1.0.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:08ab7efae08e55812340bfd1b3f09f63848fe291675e2105eab1aa5327d3a16e"},
+ {file = "cytoolz-1.0.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:e5fdc5264f884e7c0a1711a81dff112708a64b9c8561654ee578bfdccec6be09"},
+ {file = "cytoolz-1.0.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:90d6a2e6ab891043ee655ec99d5e77455a9bee9e1131bdfcfb745edde81200dd"},
+ {file = "cytoolz-1.0.1-cp38-cp38-win32.whl", hash = "sha256:08946e083faa5147751b34fbf78ab931f149ef758af5c1092932b459e18dcf5c"},
+ {file = "cytoolz-1.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:a91b4e10a9c03796c0dc93e47ebe25bb41ecc6fafc3cf5197c603cf767a3d44d"},
+ {file = "cytoolz-1.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:980c323e626ba298b77ae62871b2de7c50b9d7219e2ddf706f52dd34b8be7349"},
+ {file = "cytoolz-1.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:45f6fa1b512bc2a0f2de5123db932df06c7f69d12874fe06d67772b2828e2c8b"},
+ {file = "cytoolz-1.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f93f42d9100c415155ad1f71b0de362541afd4ac95e3153467c4c79972521b6b"},
+ {file = "cytoolz-1.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a76d20dec9c090cdf4746255bbf06a762e8cc29b5c9c1d138c380bbdb3122ade"},
+ {file = "cytoolz-1.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:239039585487c69aa50c5b78f6a422016297e9dea39755761202fb9f0530fe87"},
+ {file = "cytoolz-1.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c28307640ca2ab57b9fbf0a834b9bf563958cd9e038378c3a559f45f13c3c541"},
+ {file = "cytoolz-1.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:454880477bb901cee3a60f6324ec48c95d45acc7fecbaa9d49a5af737ded0595"},
+ {file = "cytoolz-1.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:902115d1b1f360fd81e44def30ac309b8641661150fcbdde18ead446982ada6a"},
+ {file = "cytoolz-1.0.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e68e6b38473a3a79cee431baa22be31cac39f7df1bf23eaa737eaff42e213883"},
+ {file = "cytoolz-1.0.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:32fba3f63fcb76095b0a22f4bdcc22bc62a2bd2d28d58bf02fd21754c155a3ec"},
+ {file = "cytoolz-1.0.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:0724ba4cf41eb40b6cf75250820ab069e44bdf4183ff78857aaf4f0061551075"},
+ {file = "cytoolz-1.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c42420e0686f887040d5230420ed44f0e960ccbfa29a0d65a3acd9ca52459209"},
+ {file = "cytoolz-1.0.1-cp39-cp39-win32.whl", hash = "sha256:4ba8b16358ea56b1fe8e637ec421e36580866f2e787910bac1cf0a6997424a34"},
+ {file = "cytoolz-1.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:92d27f84bf44586853d9562bfa3610ecec000149d030f793b4cb614fd9da1813"},
+ {file = "cytoolz-1.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:83d19d55738ad9c60763b94f3f6d3c6e4de979aeb8d76841c1401081e0e58d96"},
+ {file = "cytoolz-1.0.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f112a71fad6ea824578e6393765ce5c054603afe1471a5c753ff6c67fd872d10"},
+ {file = "cytoolz-1.0.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a515df8f8aa6e1eaaf397761a6e4aff2eef73b5f920aedf271416d5471ae5ee"},
+ {file = "cytoolz-1.0.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92c398e7b7023460bea2edffe5fcd0a76029580f06c3f6938ac3d198b47156f3"},
+ {file = "cytoolz-1.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3237e56211e03b13df47435b2369f5df281e02b04ad80a948ebd199b7bc10a47"},
+ {file = "cytoolz-1.0.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ba0d1da50aab1909b165f615ba1125c8b01fcc30d606c42a61c42ea0269b5e2c"},
+ {file = "cytoolz-1.0.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25b6e8dec29aa5a390092d193abd673e027d2c0b50774ae816a31454286c45c7"},
+ {file = "cytoolz-1.0.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:36cd6989ebb2f18fe9af8f13e3c61064b9f741a40d83dc5afeb0322338ad25f2"},
+ {file = "cytoolz-1.0.1-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a47394f8ab7fca3201f40de61fdeea20a2baffb101485ae14901ea89c3f6c95d"},
+ {file = "cytoolz-1.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d00ac423542af944302e034e618fb055a0c4e87ba704cd6a79eacfa6ac83a3c9"},
+ {file = "cytoolz-1.0.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a5ca923d1fa632f7a4fb33c0766c6fba7f87141a055c305c3e47e256fb99c413"},
+ {file = "cytoolz-1.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:058bf996bcae9aad3acaeeb937d42e0c77c081081e67e24e9578a6a353cb7fb2"},
+ {file = "cytoolz-1.0.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:69e2a1f41a3dad94a17aef4a5cc003323359b9f0a9d63d4cc867cb5690a2551d"},
+ {file = "cytoolz-1.0.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67daeeeadb012ec2b59d63cb29c4f2a2023b0c4957c3342d354b8bb44b209e9a"},
+ {file = "cytoolz-1.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:54d3d36bbf0d4344d1afa22c58725d1668e30ff9de3a8f56b03db1a6da0acb11"},
+ {file = "cytoolz-1.0.1.tar.gz", hash = "sha256:89cc3161b89e1bb3ed7636f74ed2e55984fd35516904fc878cae216e42b2c7d6"},
]
[package.dependencies]
@@ -852,20 +864,20 @@ yaml = ["PyYAML (>=5.3)"]
[[package]]
name = "deprecated"
-version = "1.2.15"
+version = "1.2.18"
description = "Python @deprecated decorator to deprecate old python classes, functions or methods."
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7"
files = [
- {file = "Deprecated-1.2.15-py2.py3-none-any.whl", hash = "sha256:353bc4a8ac4bfc96800ddab349d89c25dec1079f65fd53acdcc1e0b975b21320"},
- {file = "deprecated-1.2.15.tar.gz", hash = "sha256:683e561a90de76239796e6b6feac66b99030d2dd3fcf61ef996330f14bbb9b0d"},
+ {file = "Deprecated-1.2.18-py2.py3-none-any.whl", hash = "sha256:bd5011788200372a32418f888e326a09ff80d0214bd961147cfed01b5c018eec"},
+ {file = "deprecated-1.2.18.tar.gz", hash = "sha256:422b6f6d859da6f2ef57857761bfb392480502a64c3028ca9bbe86085d72115d"},
]
[package.dependencies]
wrapt = ">=1.10,<2"
[package.extras]
-dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "jinja2 (>=3.0.3,<3.1.0)", "setuptools", "sphinx (<2)", "tox"]
+dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "setuptools", "tox"]
[[package]]
name = "dill"
@@ -895,13 +907,13 @@ files = [
[[package]]
name = "eth-abi"
-version = "5.1.0"
+version = "5.2.0"
description = "eth_abi: Python utilities for working with Ethereum ABI definitions, especially encoding and decoding"
optional = false
python-versions = "<4,>=3.8"
files = [
- {file = "eth_abi-5.1.0-py3-none-any.whl", hash = "sha256:84cac2626a7db8b7d9ebe62b0fdca676ab1014cc7f777189e3c0cd721a4c16d8"},
- {file = "eth_abi-5.1.0.tar.gz", hash = "sha256:33ddd756206e90f7ddff1330cc8cac4aa411a824fe779314a0a52abea2c8fc14"},
+ {file = "eth_abi-5.2.0-py3-none-any.whl", hash = "sha256:17abe47560ad753f18054f5b3089fcb588f3e3a092136a416b6c1502cb7e8877"},
+ {file = "eth_abi-5.2.0.tar.gz", hash = "sha256:178703fa98c07d8eecd5ae569e7e8d159e493ebb6eeb534a8fe973fbc4e40ef0"},
]
[package.dependencies]
@@ -910,10 +922,10 @@ eth-utils = ">=2.0.0"
parsimonious = ">=0.10.0,<0.11.0"
[package.extras]
-dev = ["build (>=0.9.0)", "bumpversion (>=0.5.3)", "eth-hash[pycryptodome]", "hypothesis (>=4.18.2,<5.0.0)", "ipython", "pre-commit (>=3.4.0)", "pytest (>=7.0.0)", "pytest-pythonpath (>=0.7.1)", "pytest-timeout (>=2.0.0)", "pytest-xdist (>=2.4.0)", "sphinx (>=6.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)", "tox (>=4.0.0)", "twine", "wheel"]
-docs = ["sphinx (>=6.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)"]
-test = ["eth-hash[pycryptodome]", "hypothesis (>=4.18.2,<5.0.0)", "pytest (>=7.0.0)", "pytest-pythonpath (>=0.7.1)", "pytest-timeout (>=2.0.0)", "pytest-xdist (>=2.4.0)"]
-tools = ["hypothesis (>=4.18.2,<5.0.0)"]
+dev = ["build (>=0.9.0)", "bump_my_version (>=0.19.0)", "eth-hash[pycryptodome]", "hypothesis (>=6.22.0,<6.108.7)", "ipython", "mypy (==1.10.0)", "pre-commit (>=3.4.0)", "pytest (>=7.0.0)", "pytest-pythonpath (>=0.7.1)", "pytest-timeout (>=2.0.0)", "pytest-xdist (>=2.4.0)", "sphinx (>=6.0.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx_rtd_theme (>=1.0.0)", "towncrier (>=24,<25)", "tox (>=4.0.0)", "twine", "wheel"]
+docs = ["sphinx (>=6.0.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx_rtd_theme (>=1.0.0)", "towncrier (>=24,<25)"]
+test = ["eth-hash[pycryptodome]", "hypothesis (>=6.22.0,<6.108.7)", "pytest (>=7.0.0)", "pytest-pythonpath (>=0.7.1)", "pytest-timeout (>=2.0.0)", "pytest-xdist (>=2.4.0)"]
+tools = ["hypothesis (>=6.22.0,<6.108.7)"]
[[package]]
name = "eth-account"
@@ -944,55 +956,56 @@ test = ["coverage", "hypothesis (>=4.18.0,<5)", "pytest (>=7.0.0)", "pytest-xdis
[[package]]
name = "eth-hash"
-version = "0.7.0"
+version = "0.7.1"
description = "eth-hash: The Ethereum hashing function, keccak256, sometimes (erroneously) called sha3"
optional = false
-python-versions = ">=3.8, <4"
+python-versions = "<4,>=3.8"
files = [
- {file = "eth-hash-0.7.0.tar.gz", hash = "sha256:bacdc705bfd85dadd055ecd35fd1b4f846b671add101427e089a4ca2e8db310a"},
- {file = "eth_hash-0.7.0-py3-none-any.whl", hash = "sha256:b8d5a230a2b251f4a291e3164a23a14057c4a6de4b0aa4a16fa4dc9161b57e2f"},
+ {file = "eth_hash-0.7.1-py3-none-any.whl", hash = "sha256:0fb1add2adf99ef28883fd6228eb447ef519ea72933535ad1a0b28c6f65f868a"},
+ {file = "eth_hash-0.7.1.tar.gz", hash = "sha256:d2411a403a0b0a62e8247b4117932d900ffb4c8c64b15f92620547ca5ce46be5"},
]
[package.dependencies]
pycryptodome = {version = ">=3.6.6,<4", optional = true, markers = "extra == \"pycryptodome\""}
[package.extras]
-dev = ["build (>=0.9.0)", "bumpversion (>=0.5.3)", "ipython", "pre-commit (>=3.4.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)", "sphinx (>=6.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)", "tox (>=4.0.0)", "twine", "wheel"]
-docs = ["sphinx (>=6.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)"]
+dev = ["build (>=0.9.0)", "bump_my_version (>=0.19.0)", "ipython", "mypy (==1.10.0)", "pre-commit (>=3.4.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)", "sphinx (>=6.0.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx_rtd_theme (>=1.0.0)", "towncrier (>=24,<25)", "tox (>=4.0.0)", "twine", "wheel"]
+docs = ["sphinx (>=6.0.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx_rtd_theme (>=1.0.0)", "towncrier (>=24,<25)"]
pycryptodome = ["pycryptodome (>=3.6.6,<4)"]
pysha3 = ["pysha3 (>=1.0.0,<2.0.0)", "safe-pysha3 (>=1.0.0)"]
test = ["pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"]
[[package]]
name = "eth-keyfile"
-version = "0.8.1"
+version = "0.9.1"
description = "eth-keyfile: A library for handling the encrypted keyfiles used to store ethereum private keys"
optional = false
python-versions = "<4,>=3.8"
files = [
- {file = "eth_keyfile-0.8.1-py3-none-any.whl", hash = "sha256:65387378b82fe7e86d7cb9f8d98e6d639142661b2f6f490629da09fddbef6d64"},
- {file = "eth_keyfile-0.8.1.tar.gz", hash = "sha256:9708bc31f386b52cca0969238ff35b1ac72bd7a7186f2a84b86110d3c973bec1"},
+ {file = "eth_keyfile-0.9.1-py3-none-any.whl", hash = "sha256:9789c3b4fa0bb6e2616cdc2bdd71b8755b42947d78ef1e900a0149480fabb5c2"},
+ {file = "eth_keyfile-0.9.1.tar.gz", hash = "sha256:c7a8bc6af4527d1ab2eb1d1b949d59925252e17663eaf90087da121327b51df6"},
]
[package.dependencies]
eth-keys = ">=0.4.0"
eth-utils = ">=2"
+py_ecc = ">=5.2.0"
pycryptodome = ">=3.6.6,<4"
[package.extras]
-dev = ["build (>=0.9.0)", "bumpversion (>=0.5.3)", "ipython", "pre-commit (>=3.4.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)", "towncrier (>=21,<22)", "tox (>=4.0.0)", "twine", "wheel"]
-docs = ["towncrier (>=21,<22)"]
+dev = ["build (>=0.9.0)", "bump_my_version (>=0.19.0)", "ipython", "mypy (==1.10.0)", "pre-commit (>=3.4.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)", "towncrier (>=24,<25)", "tox (>=4.0.0)", "twine", "wheel"]
+docs = ["towncrier (>=24,<25)"]
test = ["pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"]
[[package]]
name = "eth-keys"
-version = "0.6.0"
+version = "0.6.1"
description = "eth-keys: Common API for Ethereum key operations"
optional = false
python-versions = "<4,>=3.8"
files = [
- {file = "eth_keys-0.6.0-py3-none-any.whl", hash = "sha256:b396fdfe048a5bba3ef3990739aec64901eb99901c03921caa774be668b1db6e"},
- {file = "eth_keys-0.6.0.tar.gz", hash = "sha256:ba33230f851d02c894e83989185b21d76152c49b37e35b61b1d8a6d9f1d20430"},
+ {file = "eth_keys-0.6.1-py3-none-any.whl", hash = "sha256:7deae4cd56e862e099ec58b78176232b931c4ea5ecded2f50c7b1ccbc10c24cf"},
+ {file = "eth_keys-0.6.1.tar.gz", hash = "sha256:a43e263cbcabfd62fa769168efc6c27b1f5603040e4de22bb84d12567e4fd962"},
]
[package.dependencies]
@@ -1001,8 +1014,8 @@ eth-utils = ">=2"
[package.extras]
coincurve = ["coincurve (>=12.0.0)"]
-dev = ["asn1tools (>=0.146.2)", "build (>=0.9.0)", "bumpversion (>=0.5.3)", "coincurve (>=12.0.0)", "eth-hash[pysha3]", "factory-boy (>=3.0.1)", "hypothesis (>=5.10.3)", "ipython", "pre-commit (>=3.4.0)", "pyasn1 (>=0.4.5)", "pytest (>=7.0.0)", "towncrier (>=21,<22)", "tox (>=4.0.0)", "twine", "wheel"]
-docs = ["towncrier (>=21,<22)"]
+dev = ["asn1tools (>=0.146.2)", "build (>=0.9.0)", "bump_my_version (>=0.19.0)", "coincurve (>=12.0.0)", "eth-hash[pysha3]", "factory-boy (>=3.0.1)", "hypothesis (>=5.10.3)", "ipython", "mypy (==1.10.0)", "pre-commit (>=3.4.0)", "pyasn1 (>=0.4.5)", "pytest (>=7.0.0)", "towncrier (>=24,<25)", "tox (>=4.0.0)", "twine", "wheel"]
+docs = ["towncrier (>=24,<25)"]
test = ["asn1tools (>=0.146.2)", "eth-hash[pysha3]", "factory-boy (>=3.0.1)", "hypothesis (>=5.10.3)", "pyasn1 (>=0.4.5)", "pytest (>=7.0.0)"]
[[package]]
@@ -1545,13 +1558,13 @@ files = [
[[package]]
name = "jinja2"
-version = "3.1.4"
+version = "3.1.5"
description = "A very fast and expressive template engine."
optional = false
python-versions = ">=3.7"
files = [
- {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"},
- {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"},
+ {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"},
+ {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"},
]
[package.dependencies]
@@ -1598,6 +1611,20 @@ files = [
importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""}
referencing = ">=0.31.0"
+[[package]]
+name = "kafka-python"
+version = "2.0.2"
+description = "Pure Python client for Apache Kafka"
+optional = false
+python-versions = "*"
+files = [
+ {file = "kafka-python-2.0.2.tar.gz", hash = "sha256:04dfe7fea2b63726cd6f3e79a2d86e709d608d74406638c5da33a01d45a9d7e3"},
+ {file = "kafka_python-2.0.2-py2.py3-none-any.whl", hash = "sha256:2d92418c7cb1c298fa6c7f0fb3519b520d0d7526ac6cb7ae2a4fc65a51a94b6e"},
+]
+
+[package.extras]
+crc32c = ["crc32c"]
+
[[package]]
name = "limits"
version = "3.13.0"
@@ -1723,13 +1750,13 @@ test = ["pytest"]
[[package]]
name = "mako"
-version = "1.3.7"
+version = "1.3.9"
description = "A super-fast templating language that borrows the best ideas from the existing templating languages."
optional = false
python-versions = ">=3.8"
files = [
- {file = "Mako-1.3.7-py3-none-any.whl", hash = "sha256:d18f990ad57f800ce8e76cbfb0b74afe471c293517e9f5003ace6dad5aa72c36"},
- {file = "mako-1.3.7.tar.gz", hash = "sha256:20405b1232e0759f0e7d87b01f6bb94fce0761747f1cb876ecf90bd512d0b639"},
+ {file = "Mako-1.3.9-py3-none-any.whl", hash = "sha256:95920acccb578427a9aa38e37a186b1e43156c87260d7ba18ca63aa4c7cbd3a1"},
+ {file = "mako-1.3.9.tar.gz", hash = "sha256:b5d65ff3462870feec922dbccf38f6efb44e5714d7b593a656be86663d8600ac"},
]
[package.dependencies]
@@ -2407,6 +2434,20 @@ mmh3 = "*"
redis = ">=4,<5"
typing-extensions = "*"
+[[package]]
+name = "prometheus-client"
+version = "0.21.1"
+description = "Python client for the Prometheus monitoring system."
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "prometheus_client-0.21.1-py3-none-any.whl", hash = "sha256:594b45c410d6f4f8888940fe80b5cc2521b305a1fafe1c58609ef715a001f301"},
+ {file = "prometheus_client-0.21.1.tar.gz", hash = "sha256:252505a722ac04b0456be05c05f75f45d760c2911ffc45f2a06bcaed9f3ae3fb"},
+]
+
+[package.extras]
+twisted = ["twisted"]
+
[[package]]
name = "propcache"
version = "0.2.0"
@@ -2516,22 +2557,22 @@ files = [
[[package]]
name = "protobuf"
-version = "5.29.1"
+version = "5.29.3"
description = ""
optional = false
python-versions = ">=3.8"
files = [
- {file = "protobuf-5.29.1-cp310-abi3-win32.whl", hash = "sha256:22c1f539024241ee545cbcb00ee160ad1877975690b16656ff87dde107b5f110"},
- {file = "protobuf-5.29.1-cp310-abi3-win_amd64.whl", hash = "sha256:1fc55267f086dd4050d18ef839d7bd69300d0d08c2a53ca7df3920cc271a3c34"},
- {file = "protobuf-5.29.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:d473655e29c0c4bbf8b69e9a8fb54645bc289dead6d753b952e7aa660254ae18"},
- {file = "protobuf-5.29.1-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:b5ba1d0e4c8a40ae0496d0e2ecfdbb82e1776928a205106d14ad6985a09ec155"},
- {file = "protobuf-5.29.1-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:8ee1461b3af56145aca2800e6a3e2f928108c749ba8feccc6f5dd0062c410c0d"},
- {file = "protobuf-5.29.1-cp38-cp38-win32.whl", hash = "sha256:50879eb0eb1246e3a5eabbbe566b44b10348939b7cc1b267567e8c3d07213853"},
- {file = "protobuf-5.29.1-cp38-cp38-win_amd64.whl", hash = "sha256:027fbcc48cea65a6b17028510fdd054147057fa78f4772eb547b9274e5219331"},
- {file = "protobuf-5.29.1-cp39-cp39-win32.whl", hash = "sha256:5a41deccfa5e745cef5c65a560c76ec0ed8e70908a67cc8f4da5fce588b50d57"},
- {file = "protobuf-5.29.1-cp39-cp39-win_amd64.whl", hash = "sha256:012ce28d862ff417fd629285aca5d9772807f15ceb1a0dbd15b88f58c776c98c"},
- {file = "protobuf-5.29.1-py3-none-any.whl", hash = "sha256:32600ddb9c2a53dedc25b8581ea0f1fd8ea04956373c0c07577ce58d312522e0"},
- {file = "protobuf-5.29.1.tar.gz", hash = "sha256:683be02ca21a6ffe80db6dd02c0b5b2892322c59ca57fd6c872d652cb80549cb"},
+ {file = "protobuf-5.29.3-cp310-abi3-win32.whl", hash = "sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888"},
+ {file = "protobuf-5.29.3-cp310-abi3-win_amd64.whl", hash = "sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a"},
+ {file = "protobuf-5.29.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a8434404bbf139aa9e1300dbf989667a83d42ddda9153d8ab76e0d5dcaca484e"},
+ {file = "protobuf-5.29.3-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:daaf63f70f25e8689c072cfad4334ca0ac1d1e05a92fc15c54eb9cf23c3efd84"},
+ {file = "protobuf-5.29.3-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:c027e08a08be10b67c06bf2370b99c811c466398c357e615ca88c91c07f0910f"},
+ {file = "protobuf-5.29.3-cp38-cp38-win32.whl", hash = "sha256:84a57163a0ccef3f96e4b6a20516cedcf5bb3a95a657131c5c3ac62200d23252"},
+ {file = "protobuf-5.29.3-cp38-cp38-win_amd64.whl", hash = "sha256:b89c115d877892a512f79a8114564fb435943b59067615894c3b13cd3e1fa107"},
+ {file = "protobuf-5.29.3-cp39-cp39-win32.whl", hash = "sha256:0eb32bfa5219fc8d4111803e9a690658aa2e6366384fd0851064b963b6d1f2a7"},
+ {file = "protobuf-5.29.3-cp39-cp39-win_amd64.whl", hash = "sha256:6ce8cc3389a20693bfde6c6562e03474c40851b44975c9b2bf6df7d8c4f864da"},
+ {file = "protobuf-5.29.3-py3-none-any.whl", hash = "sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f"},
+ {file = "protobuf-5.29.3.tar.gz", hash = "sha256:5da0f41edaf117bde316404bad1a486cb4ededf8e4a54891296f648e8e076620"},
]
[[package]]
@@ -2615,6 +2656,27 @@ files = [
{file = "psycopg2_binary-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:f7ae5d65ccfbebdfa761585228eb4d0df3a8b15cfb53bd953e713e09fbb12957"},
]
+[[package]]
+name = "py-ecc"
+version = "7.0.1"
+description = "py-ecc: Elliptic curve crypto in python including secp256k1, alt_bn128, and bls12_381"
+optional = false
+python-versions = "<4,>=3.8"
+files = [
+ {file = "py_ecc-7.0.1-py3-none-any.whl", hash = "sha256:84a8b4d436163c83c65345a68e32f921ef6e64374a36f8e561f0455b4b08f5f2"},
+ {file = "py_ecc-7.0.1.tar.gz", hash = "sha256:557461f42e57294d734305a30faf6b8903421651871e9cdeff8d8e67c6796c70"},
+]
+
+[package.dependencies]
+cached-property = ">=1.5.1"
+eth-typing = ">=3.0.0"
+eth-utils = ">=2.0.0"
+
+[package.extras]
+dev = ["build (>=0.9.0)", "bumpversion (>=0.5.3)", "ipython", "pre-commit (>=3.4.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)", "sphinx (>=6.0.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)", "tox (>=4.0.0)", "twine", "wheel"]
+docs = ["sphinx (>=6.0.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)"]
+test = ["pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"]
+
[[package]]
name = "pycodestyle"
version = "2.7.0"
@@ -2680,13 +2742,13 @@ files = [
[[package]]
name = "pygments"
-version = "2.18.0"
+version = "2.19.1"
description = "Pygments is a syntax highlighting package written in Python."
optional = false
python-versions = ">=3.8"
files = [
- {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"},
- {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"},
+ {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"},
+ {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"},
]
[package.extras]
@@ -2748,13 +2810,13 @@ six = ">=1.5"
[[package]]
name = "pytz"
-version = "2024.2"
+version = "2025.1"
description = "World timezone definitions, modern and historical"
optional = false
python-versions = "*"
files = [
- {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"},
- {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"},
+ {file = "pytz-2025.1-py2.py3-none-any.whl", hash = "sha256:89dd22dca55b46eac6eda23b2d72721bf1bdfef212645d81513ef5d03038de57"},
+ {file = "pytz-2025.1.tar.gz", hash = "sha256:c2db42be2a2518b28e65f9207c4d05e6ff547d1efa4086469ef855e4ab70178e"},
]
[[package]]
@@ -3035,23 +3097,23 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"]
[[package]]
name = "rlp"
-version = "4.0.1"
+version = "4.1.0"
description = "rlp: A package for Recursive Length Prefix encoding and decoding"
optional = false
python-versions = "<4,>=3.8"
files = [
- {file = "rlp-4.0.1-py3-none-any.whl", hash = "sha256:ff6846c3c27b97ee0492373aa074a7c3046aadd973320f4fffa7ac45564b0258"},
- {file = "rlp-4.0.1.tar.gz", hash = "sha256:bcefb11013dfadf8902642337923bd0c786dc8a27cb4c21da6e154e52869ecb1"},
+ {file = "rlp-4.1.0-py3-none-any.whl", hash = "sha256:8eca394c579bad34ee0b937aecb96a57052ff3716e19c7a578883e767bc5da6f"},
+ {file = "rlp-4.1.0.tar.gz", hash = "sha256:be07564270a96f3e225e2c107db263de96b5bc1f27722d2855bd3459a08e95a9"},
]
[package.dependencies]
eth-utils = ">=2"
[package.extras]
-dev = ["build (>=0.9.0)", "bumpversion (>=0.5.3)", "hypothesis (==5.19.0)", "ipython", "pre-commit (>=3.4.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)", "sphinx (>=6.0.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)", "tox (>=4.0.0)", "twine", "wheel"]
-docs = ["sphinx (>=6.0.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)"]
+dev = ["build (>=0.9.0)", "bump_my_version (>=0.19.0)", "hypothesis (>=6.22.0,<6.108.7)", "ipython", "pre-commit (>=3.4.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)", "sphinx (>=6.0.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx_rtd_theme (>=1.0.0)", "towncrier (>=24,<25)", "tox (>=4.0.0)", "twine", "wheel"]
+docs = ["sphinx (>=6.0.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx_rtd_theme (>=1.0.0)", "towncrier (>=24,<25)"]
rust-backend = ["rusty-rlp (>=0.2.1)"]
-test = ["hypothesis (==5.19.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"]
+test = ["hypothesis (>=6.22.0,<6.108.7)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"]
[[package]]
name = "rpds-py"
@@ -3201,6 +3263,17 @@ files = [
{file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"},
]
+[[package]]
+name = "sortedcontainers"
+version = "2.4.0"
+description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set"
+optional = false
+python-versions = "*"
+files = [
+ {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"},
+ {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"},
+]
+
[[package]]
name = "sqlalchemy"
version = "2.0.31"
@@ -3302,6 +3375,21 @@ files = [
[package.extras]
widechars = ["wcwidth"]
+[[package]]
+name = "tenacity"
+version = "9.0.0"
+description = "Retry code until it succeeds"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"},
+ {file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"},
+]
+
+[package.extras]
+doc = ["reno", "sphinx"]
+test = ["pytest", "tornado (>=4.5)", "typeguard"]
+
[[package]]
name = "tomli"
version = "1.2.3"
@@ -3522,76 +3610,90 @@ watchdog = ["watchdog (>=2.3)"]
[[package]]
name = "wrapt"
-version = "1.17.0"
+version = "1.17.2"
description = "Module for decorators, wrappers and monkey patching."
optional = false
python-versions = ">=3.8"
files = [
- {file = "wrapt-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2a0c23b8319848426f305f9cb0c98a6e32ee68a36264f45948ccf8e7d2b941f8"},
- {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1ca5f060e205f72bec57faae5bd817a1560fcfc4af03f414b08fa29106b7e2d"},
- {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e185ec6060e301a7e5f8461c86fb3640a7beb1a0f0208ffde7a65ec4074931df"},
- {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb90765dd91aed05b53cd7a87bd7f5c188fcd95960914bae0d32c5e7f899719d"},
- {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:879591c2b5ab0a7184258274c42a126b74a2c3d5a329df16d69f9cee07bba6ea"},
- {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fce6fee67c318fdfb7f285c29a82d84782ae2579c0e1b385b7f36c6e8074fffb"},
- {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0698d3a86f68abc894d537887b9bbf84d29bcfbc759e23f4644be27acf6da301"},
- {file = "wrapt-1.17.0-cp310-cp310-win32.whl", hash = "sha256:69d093792dc34a9c4c8a70e4973a3361c7a7578e9cd86961b2bbf38ca71e4e22"},
- {file = "wrapt-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:f28b29dc158ca5d6ac396c8e0a2ef45c4e97bb7e65522bfc04c989e6fe814575"},
- {file = "wrapt-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:74bf625b1b4caaa7bad51d9003f8b07a468a704e0644a700e936c357c17dd45a"},
- {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f2a28eb35cf99d5f5bd12f5dd44a0f41d206db226535b37b0c60e9da162c3ed"},
- {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:81b1289e99cf4bad07c23393ab447e5e96db0ab50974a280f7954b071d41b489"},
- {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2939cd4a2a52ca32bc0b359015718472d7f6de870760342e7ba295be9ebaf9"},
- {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6a9653131bda68a1f029c52157fd81e11f07d485df55410401f745007bd6d339"},
- {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4e4b4385363de9052dac1a67bfb535c376f3d19c238b5f36bddc95efae15e12d"},
- {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bdf62d25234290db1837875d4dceb2151e4ea7f9fff2ed41c0fde23ed542eb5b"},
- {file = "wrapt-1.17.0-cp311-cp311-win32.whl", hash = "sha256:5d8fd17635b262448ab8f99230fe4dac991af1dabdbb92f7a70a6afac8a7e346"},
- {file = "wrapt-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:92a3d214d5e53cb1db8b015f30d544bc9d3f7179a05feb8f16df713cecc2620a"},
- {file = "wrapt-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:89fc28495896097622c3fc238915c79365dd0ede02f9a82ce436b13bd0ab7569"},
- {file = "wrapt-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:875d240fdbdbe9e11f9831901fb8719da0bd4e6131f83aa9f69b96d18fae7504"},
- {file = "wrapt-1.17.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5ed16d95fd142e9c72b6c10b06514ad30e846a0d0917ab406186541fe68b451"},
- {file = "wrapt-1.17.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18b956061b8db634120b58f668592a772e87e2e78bc1f6a906cfcaa0cc7991c1"},
- {file = "wrapt-1.17.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:daba396199399ccabafbfc509037ac635a6bc18510ad1add8fd16d4739cdd106"},
- {file = "wrapt-1.17.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4d63f4d446e10ad19ed01188d6c1e1bb134cde8c18b0aa2acfd973d41fcc5ada"},
- {file = "wrapt-1.17.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8a5e7cc39a45fc430af1aefc4d77ee6bad72c5bcdb1322cfde852c15192b8bd4"},
- {file = "wrapt-1.17.0-cp312-cp312-win32.whl", hash = "sha256:0a0a1a1ec28b641f2a3a2c35cbe86c00051c04fffcfcc577ffcdd707df3f8635"},
- {file = "wrapt-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:3c34f6896a01b84bab196f7119770fd8466c8ae3dfa73c59c0bb281e7b588ce7"},
- {file = "wrapt-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:714c12485aa52efbc0fc0ade1e9ab3a70343db82627f90f2ecbc898fdf0bb181"},
- {file = "wrapt-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da427d311782324a376cacb47c1a4adc43f99fd9d996ffc1b3e8529c4074d393"},
- {file = "wrapt-1.17.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba1739fb38441a27a676f4de4123d3e858e494fac05868b7a281c0a383c098f4"},
- {file = "wrapt-1.17.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e711fc1acc7468463bc084d1b68561e40d1eaa135d8c509a65dd534403d83d7b"},
- {file = "wrapt-1.17.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:140ea00c87fafc42739bd74a94a5a9003f8e72c27c47cd4f61d8e05e6dec8721"},
- {file = "wrapt-1.17.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:73a96fd11d2b2e77d623a7f26e004cc31f131a365add1ce1ce9a19e55a1eef90"},
- {file = "wrapt-1.17.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0b48554952f0f387984da81ccfa73b62e52817a4386d070c75e4db7d43a28c4a"},
- {file = "wrapt-1.17.0-cp313-cp313-win32.whl", hash = "sha256:498fec8da10e3e62edd1e7368f4b24aa362ac0ad931e678332d1b209aec93045"},
- {file = "wrapt-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:fd136bb85f4568fffca995bd3c8d52080b1e5b225dbf1c2b17b66b4c5fa02838"},
- {file = "wrapt-1.17.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:17fcf043d0b4724858f25b8826c36e08f9fb2e475410bece0ec44a22d533da9b"},
- {file = "wrapt-1.17.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4a557d97f12813dc5e18dad9fa765ae44ddd56a672bb5de4825527c847d6379"},
- {file = "wrapt-1.17.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0229b247b0fc7dee0d36176cbb79dbaf2a9eb7ecc50ec3121f40ef443155fb1d"},
- {file = "wrapt-1.17.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8425cfce27b8b20c9b89d77fb50e368d8306a90bf2b6eef2cdf5cd5083adf83f"},
- {file = "wrapt-1.17.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9c900108df470060174108012de06d45f514aa4ec21a191e7ab42988ff42a86c"},
- {file = "wrapt-1.17.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:4e547b447073fc0dbfcbff15154c1be8823d10dab4ad401bdb1575e3fdedff1b"},
- {file = "wrapt-1.17.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:914f66f3b6fc7b915d46c1cc424bc2441841083de01b90f9e81109c9759e43ab"},
- {file = "wrapt-1.17.0-cp313-cp313t-win32.whl", hash = "sha256:a4192b45dff127c7d69b3bdfb4d3e47b64179a0b9900b6351859f3001397dabf"},
- {file = "wrapt-1.17.0-cp313-cp313t-win_amd64.whl", hash = "sha256:4f643df3d4419ea3f856c5c3f40fec1d65ea2e89ec812c83f7767c8730f9827a"},
- {file = "wrapt-1.17.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:69c40d4655e078ede067a7095544bcec5a963566e17503e75a3a3e0fe2803b13"},
- {file = "wrapt-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f495b6754358979379f84534f8dd7a43ff8cff2558dcdea4a148a6e713a758f"},
- {file = "wrapt-1.17.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:baa7ef4e0886a6f482e00d1d5bcd37c201b383f1d314643dfb0367169f94f04c"},
- {file = "wrapt-1.17.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fc931382e56627ec4acb01e09ce66e5c03c384ca52606111cee50d931a342d"},
- {file = "wrapt-1.17.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8f8909cdb9f1b237786c09a810e24ee5e15ef17019f7cecb207ce205b9b5fcce"},
- {file = "wrapt-1.17.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ad47b095f0bdc5585bced35bd088cbfe4177236c7df9984b3cc46b391cc60627"},
- {file = "wrapt-1.17.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:948a9bd0fb2c5120457b07e59c8d7210cbc8703243225dbd78f4dfc13c8d2d1f"},
- {file = "wrapt-1.17.0-cp38-cp38-win32.whl", hash = "sha256:5ae271862b2142f4bc687bdbfcc942e2473a89999a54231aa1c2c676e28f29ea"},
- {file = "wrapt-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:f335579a1b485c834849e9075191c9898e0731af45705c2ebf70e0cd5d58beed"},
- {file = "wrapt-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d751300b94e35b6016d4b1e7d0e7bbc3b5e1751e2405ef908316c2a9024008a1"},
- {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7264cbb4a18dc4acfd73b63e4bcfec9c9802614572025bdd44d0721983fc1d9c"},
- {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33539c6f5b96cf0b1105a0ff4cf5db9332e773bb521cc804a90e58dc49b10578"},
- {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c30970bdee1cad6a8da2044febd824ef6dc4cc0b19e39af3085c763fdec7de33"},
- {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bc7f729a72b16ee21795a943f85c6244971724819819a41ddbaeb691b2dd85ad"},
- {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:6ff02a91c4fc9b6a94e1c9c20f62ea06a7e375f42fe57587f004d1078ac86ca9"},
- {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2dfb7cff84e72e7bf975b06b4989477873dcf160b2fd89959c629535df53d4e0"},
- {file = "wrapt-1.17.0-cp39-cp39-win32.whl", hash = "sha256:2399408ac33ffd5b200480ee858baa58d77dd30e0dd0cab6a8a9547135f30a88"},
- {file = "wrapt-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:4f763a29ee6a20c529496a20a7bcb16a73de27f5da6a843249c7047daf135977"},
- {file = "wrapt-1.17.0-py3-none-any.whl", hash = "sha256:d2c63b93548eda58abf5188e505ffed0229bf675f7c3090f8e36ad55b8cbc371"},
- {file = "wrapt-1.17.0.tar.gz", hash = "sha256:16187aa2317c731170a88ef35e8937ae0f533c402872c1ee5e6d079fcf320801"},
+ {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3d57c572081fed831ad2d26fd430d565b76aa277ed1d30ff4d40670b1c0dd984"},
+ {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5e251054542ae57ac7f3fba5d10bfff615b6c2fb09abeb37d2f1463f841ae22"},
+ {file = "wrapt-1.17.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:80dd7db6a7cb57ffbc279c4394246414ec99537ae81ffd702443335a61dbf3a7"},
+ {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a6e821770cf99cc586d33833b2ff32faebdbe886bd6322395606cf55153246c"},
+ {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b60fb58b90c6d63779cb0c0c54eeb38941bae3ecf7a73c764c52c88c2dcb9d72"},
+ {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b870b5df5b71d8c3359d21be8f0d6c485fa0ebdb6477dda51a1ea54a9b558061"},
+ {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4011d137b9955791f9084749cba9a367c68d50ab8d11d64c50ba1688c9b457f2"},
+ {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:1473400e5b2733e58b396a04eb7f35f541e1fb976d0c0724d0223dd607e0f74c"},
+ {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3cedbfa9c940fdad3e6e941db7138e26ce8aad38ab5fe9dcfadfed9db7a54e62"},
+ {file = "wrapt-1.17.2-cp310-cp310-win32.whl", hash = "sha256:582530701bff1dec6779efa00c516496968edd851fba224fbd86e46cc6b73563"},
+ {file = "wrapt-1.17.2-cp310-cp310-win_amd64.whl", hash = "sha256:58705da316756681ad3c9c73fd15499aa4d8c69f9fd38dc8a35e06c12468582f"},
+ {file = "wrapt-1.17.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ff04ef6eec3eee8a5efef2401495967a916feaa353643defcc03fc74fe213b58"},
+ {file = "wrapt-1.17.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4db983e7bca53819efdbd64590ee96c9213894272c776966ca6306b73e4affda"},
+ {file = "wrapt-1.17.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9abc77a4ce4c6f2a3168ff34b1da9b0f311a8f1cfd694ec96b0603dff1c79438"},
+ {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b929ac182f5ace000d459c59c2c9c33047e20e935f8e39371fa6e3b85d56f4a"},
+ {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f09b286faeff3c750a879d336fb6d8713206fc97af3adc14def0cdd349df6000"},
+ {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a7ed2d9d039bd41e889f6fb9364554052ca21ce823580f6a07c4ec245c1f5d6"},
+ {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:129a150f5c445165ff941fc02ee27df65940fcb8a22a61828b1853c98763a64b"},
+ {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1fb5699e4464afe5c7e65fa51d4f99e0b2eadcc176e4aa33600a3df7801d6662"},
+ {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9a2bce789a5ea90e51a02dfcc39e31b7f1e662bc3317979aa7e5538e3a034f72"},
+ {file = "wrapt-1.17.2-cp311-cp311-win32.whl", hash = "sha256:4afd5814270fdf6380616b321fd31435a462019d834f83c8611a0ce7484c7317"},
+ {file = "wrapt-1.17.2-cp311-cp311-win_amd64.whl", hash = "sha256:acc130bc0375999da18e3d19e5a86403667ac0c4042a094fefb7eec8ebac7cf3"},
+ {file = "wrapt-1.17.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d5e2439eecc762cd85e7bd37161d4714aa03a33c5ba884e26c81559817ca0925"},
+ {file = "wrapt-1.17.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fc7cb4c1c744f8c05cd5f9438a3caa6ab94ce8344e952d7c45a8ed59dd88392"},
+ {file = "wrapt-1.17.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8fdbdb757d5390f7c675e558fd3186d590973244fab0c5fe63d373ade3e99d40"},
+ {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bb1d0dbf99411f3d871deb6faa9aabb9d4e744d67dcaaa05399af89d847a91d"},
+ {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d18a4865f46b8579d44e4fe1e2bcbc6472ad83d98e22a26c963d46e4c125ef0b"},
+ {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc570b5f14a79734437cb7b0500376b6b791153314986074486e0b0fa8d71d98"},
+ {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6d9187b01bebc3875bac9b087948a2bccefe464a7d8f627cf6e48b1bbae30f82"},
+ {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9e8659775f1adf02eb1e6f109751268e493c73716ca5761f8acb695e52a756ae"},
+ {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e8b2816ebef96d83657b56306152a93909a83f23994f4b30ad4573b00bd11bb9"},
+ {file = "wrapt-1.17.2-cp312-cp312-win32.whl", hash = "sha256:468090021f391fe0056ad3e807e3d9034e0fd01adcd3bdfba977b6fdf4213ea9"},
+ {file = "wrapt-1.17.2-cp312-cp312-win_amd64.whl", hash = "sha256:ec89ed91f2fa8e3f52ae53cd3cf640d6feff92ba90d62236a81e4e563ac0e991"},
+ {file = "wrapt-1.17.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6ed6ffac43aecfe6d86ec5b74b06a5be33d5bb9243d055141e8cabb12aa08125"},
+ {file = "wrapt-1.17.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:35621ae4c00e056adb0009f8e86e28eb4a41a4bfa8f9bfa9fca7d343fe94f998"},
+ {file = "wrapt-1.17.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a604bf7a053f8362d27eb9fefd2097f82600b856d5abe996d623babd067b1ab5"},
+ {file = "wrapt-1.17.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cbabee4f083b6b4cd282f5b817a867cf0b1028c54d445b7ec7cfe6505057cf8"},
+ {file = "wrapt-1.17.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49703ce2ddc220df165bd2962f8e03b84c89fee2d65e1c24a7defff6f988f4d6"},
+ {file = "wrapt-1.17.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8112e52c5822fc4253f3901b676c55ddf288614dc7011634e2719718eaa187dc"},
+ {file = "wrapt-1.17.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9fee687dce376205d9a494e9c121e27183b2a3df18037f89d69bd7b35bcf59e2"},
+ {file = "wrapt-1.17.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:18983c537e04d11cf027fbb60a1e8dfd5190e2b60cc27bc0808e653e7b218d1b"},
+ {file = "wrapt-1.17.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:703919b1633412ab54bcf920ab388735832fdcb9f9a00ae49387f0fe67dad504"},
+ {file = "wrapt-1.17.2-cp313-cp313-win32.whl", hash = "sha256:abbb9e76177c35d4e8568e58650aa6926040d6a9f6f03435b7a522bf1c487f9a"},
+ {file = "wrapt-1.17.2-cp313-cp313-win_amd64.whl", hash = "sha256:69606d7bb691b50a4240ce6b22ebb319c1cfb164e5f6569835058196e0f3a845"},
+ {file = "wrapt-1.17.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:4a721d3c943dae44f8e243b380cb645a709ba5bd35d3ad27bc2ed947e9c68192"},
+ {file = "wrapt-1.17.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:766d8bbefcb9e00c3ac3b000d9acc51f1b399513f44d77dfe0eb026ad7c9a19b"},
+ {file = "wrapt-1.17.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e496a8ce2c256da1eb98bd15803a79bee00fc351f5dfb9ea82594a3f058309e0"},
+ {file = "wrapt-1.17.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d615e4fe22f4ad3528448c193b218e077656ca9ccb22ce2cb20db730f8d306"},
+ {file = "wrapt-1.17.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a5aaeff38654462bc4b09023918b7f21790efb807f54c000a39d41d69cf552cb"},
+ {file = "wrapt-1.17.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a7d15bbd2bc99e92e39f49a04653062ee6085c0e18b3b7512a4f2fe91f2d681"},
+ {file = "wrapt-1.17.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e3890b508a23299083e065f435a492b5435eba6e304a7114d2f919d400888cc6"},
+ {file = "wrapt-1.17.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:8c8b293cd65ad716d13d8dd3624e42e5a19cc2a2f1acc74b30c2c13f15cb61a6"},
+ {file = "wrapt-1.17.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4c82b8785d98cdd9fed4cac84d765d234ed3251bd6afe34cb7ac523cb93e8b4f"},
+ {file = "wrapt-1.17.2-cp313-cp313t-win32.whl", hash = "sha256:13e6afb7fe71fe7485a4550a8844cc9ffbe263c0f1a1eea569bc7091d4898555"},
+ {file = "wrapt-1.17.2-cp313-cp313t-win_amd64.whl", hash = "sha256:eaf675418ed6b3b31c7a989fd007fa7c3be66ce14e5c3b27336383604c9da85c"},
+ {file = "wrapt-1.17.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5c803c401ea1c1c18de70a06a6f79fcc9c5acfc79133e9869e730ad7f8ad8ef9"},
+ {file = "wrapt-1.17.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f917c1180fdb8623c2b75a99192f4025e412597c50b2ac870f156de8fb101119"},
+ {file = "wrapt-1.17.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ecc840861360ba9d176d413a5489b9a0aff6d6303d7e733e2c4623cfa26904a6"},
+ {file = "wrapt-1.17.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb87745b2e6dc56361bfde481d5a378dc314b252a98d7dd19a651a3fa58f24a9"},
+ {file = "wrapt-1.17.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58455b79ec2661c3600e65c0a716955adc2410f7383755d537584b0de41b1d8a"},
+ {file = "wrapt-1.17.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4e42a40a5e164cbfdb7b386c966a588b1047558a990981ace551ed7e12ca9c2"},
+ {file = "wrapt-1.17.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:91bd7d1773e64019f9288b7a5101f3ae50d3d8e6b1de7edee9c2ccc1d32f0c0a"},
+ {file = "wrapt-1.17.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:bb90fb8bda722a1b9d48ac1e6c38f923ea757b3baf8ebd0c82e09c5c1a0e7a04"},
+ {file = "wrapt-1.17.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:08e7ce672e35efa54c5024936e559469436f8b8096253404faeb54d2a878416f"},
+ {file = "wrapt-1.17.2-cp38-cp38-win32.whl", hash = "sha256:410a92fefd2e0e10d26210e1dfb4a876ddaf8439ef60d6434f21ef8d87efc5b7"},
+ {file = "wrapt-1.17.2-cp38-cp38-win_amd64.whl", hash = "sha256:95c658736ec15602da0ed73f312d410117723914a5c91a14ee4cdd72f1d790b3"},
+ {file = "wrapt-1.17.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99039fa9e6306880572915728d7f6c24a86ec57b0a83f6b2491e1d8ab0235b9a"},
+ {file = "wrapt-1.17.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2696993ee1eebd20b8e4ee4356483c4cb696066ddc24bd70bcbb80fa56ff9061"},
+ {file = "wrapt-1.17.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:612dff5db80beef9e649c6d803a8d50c409082f1fedc9dbcdfde2983b2025b82"},
+ {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62c2caa1585c82b3f7a7ab56afef7b3602021d6da34fbc1cf234ff139fed3cd9"},
+ {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c958bcfd59bacc2d0249dcfe575e71da54f9dcf4a8bdf89c4cb9a68a1170d73f"},
+ {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc78a84e2dfbc27afe4b2bd7c80c8db9bca75cc5b85df52bfe634596a1da846b"},
+ {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ba0f0eb61ef00ea10e00eb53a9129501f52385c44853dbd6c4ad3f403603083f"},
+ {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1e1fe0e6ab7775fd842bc39e86f6dcfc4507ab0ffe206093e76d61cde37225c8"},
+ {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c86563182421896d73858e08e1db93afdd2b947a70064b813d515d66549e15f9"},
+ {file = "wrapt-1.17.2-cp39-cp39-win32.whl", hash = "sha256:f393cda562f79828f38a819f4788641ac7c4085f30f1ce1a68672baa686482bb"},
+ {file = "wrapt-1.17.2-cp39-cp39-win_amd64.whl", hash = "sha256:36ccae62f64235cf8ddb682073a60519426fdd4725524ae38874adf72b5f2aeb"},
+ {file = "wrapt-1.17.2-py3-none-any.whl", hash = "sha256:b18f2d1533a71f069c7f82d524a52599053d4c7166e9dd374ae2136b7f40f7c8"},
+ {file = "wrapt-1.17.2.tar.gz", hash = "sha256:41388e9d4d1522446fe79d3213196bd9e3b301a336965b9e27ca2788ebd122f3"},
]
[[package]]
@@ -3728,4 +3830,4 @@ type = ["pytest-mypy"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.8,<4"
-content-hash = "bd6df379fd69a965b04fd04705489792995ebce429755a90bca7c3bf97650db0"
+content-hash = "2c5be8c990e6614f5c9c3cc0c87bbfba452e126701f6788bf1caeef5205775a2"
diff --git a/pyproject.toml b/pyproject.toml
index ce74bbfbd..fd7ab2168 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,21 +1,20 @@
[build-system]
requires = ["poetry-core>=1.0.0"]
-build-backend = "poetry_dynamic_versioning.backend"
+build-backend = "poetry.core.masonry.api"
[tool.poetry]
name = "hemera"
description = "Tools for exporting Ethereum blockchain data to JSON/CSV file and postgresql"
-version = "0.6.0"
+version = "1.0.0a1"
authors = [
"xuzh ",
- "shanshuo0918 "
+ "shanshuo0918 ",
]
readme = "README.md"
license = "Apache-2.0"
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
- "License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
@@ -24,11 +23,8 @@ classifiers = [
]
keywords = ["ethereum", "indexer", "explorer", "hemera"]
packages = [
- { include = "cli" },
- { include = "common" },
- { include = "api" },
- { include = "indexer" },
- { include = "enumeration" },
+ { include = "hemera", from = "." },
+ { include = "hemera_udf", from = "." }
]
[tool.poetry.dependencies]
@@ -64,6 +60,10 @@ dill = "0.3.9"
multiprocess = "0.70.17"
PyYAML = "6.0.2"
numpy = "1.24.4"
+tenacity = "9.0.0"
+kafka-python = "2.0.2"
+prometheus_client = "0.21.1"
+sortedcontainers = "2.4.0"
[tool.poetry.group.dev.dependencies]
pytest = ">=7.0.0"
@@ -74,7 +74,7 @@ ruff = "^0.0.235"
pytest-cov = "*"
[tool.poetry.scripts]
-hemera = "cli:cli"
+hemera = "hemera.cli:cli"
[tool.poetry.urls]
"Homepage" = "https://github.com/HemeraProtocol/hemera-indexer"
@@ -101,11 +101,13 @@ markers = [
"indexer_bridge: Tests related to the indexer bridge",
"indexer_bridge_optimism: Tests related to the indexer bridge optimism",
"indexer_bridge_arbitrum: Tests related to the indexer bridge arbitrum",
+ "indexer_bridge_morph: Tests related to the indexer bridge morph",
"indexer_jobs: Tests related to the indexer jobs",
"indexer_jobs_user_ops: Tests related to the indexer jobs user ops",
"indexer_address_index: Tests related to the indexer address index",
"explorer_api: Test explorer related API",
"indexer_jobs_day_mining: Tests related to the indexer day mining",
"ens: Tests related to the indexer ENS",
- "pipeline: Continuous Integration"
-]
\ No newline at end of file
+ "pipeline: Continuous Integration",
+ "multicall_helper: Tests related to the multicall helper",
+]
diff --git a/indexer/tests/jobs/__init__.py b/tests/__init__.py
similarity index 100%
rename from indexer/tests/jobs/__init__.py
rename to tests/__init__.py
diff --git a/indexer/tests/user_ops/__init__.py b/tests/hemera/__init__.py
similarity index 100%
rename from indexer/tests/user_ops/__init__.py
rename to tests/hemera/__init__.py
diff --git a/indexer/tests/utils/__init__.py b/tests/hemera/common/__init__.py
similarity index 100%
rename from indexer/tests/utils/__init__.py
rename to tests/hemera/common/__init__.py
diff --git a/tests/hemera/common/enumeration/__init__.py b/tests/hemera/common/enumeration/__init__.py
new file mode 100644
index 000000000..b58ac2e19
--- /dev/null
+++ b/tests/hemera/common/enumeration/__init__.py
@@ -0,0 +1,6 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time 2024/12/11 12:21
+# @Author will
+# @File __init__.py.py
+# @Brief
diff --git a/tests/hemera/common/enumeration/test_entity_type.py b/tests/hemera/common/enumeration/test_entity_type.py
new file mode 100644
index 000000000..7795b9d2d
--- /dev/null
+++ b/tests/hemera/common/enumeration/test_entity_type.py
@@ -0,0 +1,130 @@
+import pytest
+
+from hemera.common.enumeration.entity_type import (
+ DynamicEntityTypeRegistry,
+ EntityType,
+ StaticOutputTypes,
+ calculate_entity_value,
+ generate_output_types,
+)
+from hemera.indexer.domains.block import Block
+from hemera.indexer.domains.log import Log
+from hemera.indexer.domains.token import Token
+from hemera.indexer.domains.trace import Trace
+
+
+@pytest.mark.indexer
+@pytest.mark.indexer_utils
+def test_entity_type_basic():
+ """Test basic EntityType definitions"""
+ assert EntityType.EXPLORER_BASE == 1 << 0
+ assert EntityType.EXPLORER_TOKEN == 1 << 1
+ assert EntityType.EXPLORER_TRACE == 1 << 2
+ assert EntityType.EXPLORER == EntityType.EXPLORER_BASE | EntityType.EXPLORER_TOKEN | EntityType.EXPLORER_TRACE
+
+
+@pytest.mark.indexer
+@pytest.mark.indexer_utils
+def test_dynamic_entity_type_registration():
+ """Test registering new dynamic entity types"""
+ # Reset registry state
+ DynamicEntityTypeRegistry._next_bit = 14
+ DynamicEntityTypeRegistry._dynamic_types = {}
+
+ test_type = DynamicEntityTypeRegistry.register("TEST_TYPE")
+ assert test_type == 1 << 14
+
+ # Test duplicate registration returns same value
+ duplicate_type = DynamicEntityTypeRegistry.register("TEST_TYPE")
+ assert duplicate_type == test_type
+
+
+@pytest.mark.indexer
+@pytest.mark.indexer_utils
+def test_output_type_registration():
+ """Test registration of output types for both static and dynamic types"""
+ # Reset registries
+ StaticOutputTypes._output_types = {}
+ DynamicEntityTypeRegistry._output_types = {}
+
+ # Register static output types
+ base_types = {Block, Log}
+ StaticOutputTypes.register_output_types(EntityType.EXPLORER_BASE, base_types)
+
+ # Register dynamic output types
+ test_type = DynamicEntityTypeRegistry.register("TEST_TYPE")
+ test_types = {Token, Trace}
+ DynamicEntityTypeRegistry.register_output_types(test_type, test_types)
+
+ # Test retrieval through generate_output_types
+ combined_types = EntityType.EXPLORER_BASE | test_type
+ retrieved_types = set(generate_output_types(combined_types))
+ assert retrieved_types == base_types | test_types
+
+
+@pytest.mark.indexer
+@pytest.mark.indexer_utils
+def test_calculate_entity_value_valid():
+ """Test calculate_entity_value with valid inputs"""
+ # Test single type
+ value = calculate_entity_value("EXPLORER_BASE")
+ assert value == EntityType.EXPLORER_BASE
+
+ # Test multiple types
+ value = calculate_entity_value("EXPLORER_BASE,EXPLORER_TOKEN")
+ assert value == EntityType.EXPLORER_BASE | EntityType.EXPLORER_TOKEN
+
+ # Test with dynamic type
+ test_type = DynamicEntityTypeRegistry.register("TEST_TYPE")
+ value = calculate_entity_value("EXPLORER_BASE,TEST_TYPE")
+ assert value == EntityType.EXPLORER_BASE | test_type
+
+
+@pytest.mark.indexer
+@pytest.mark.indexer_utils
+def test_calculate_entity_value_invalid():
+ """Test calculate_entity_value with invalid inputs"""
+ # Test empty input
+ assert calculate_entity_value("") == 0
+ assert calculate_entity_value(None) == 0
+
+ # Test invalid type
+ with pytest.raises(ValueError) as excinfo:
+ calculate_entity_value("INVALID_TYPE")
+ assert "is not an available entity type" in str(excinfo.value)
+
+
+@pytest.mark.indexer
+@pytest.mark.indexer_utils
+def test_combine_all_entity_types():
+ """Test combining all entity types including dynamic ones"""
+ # Register a dynamic type
+ test_type = DynamicEntityTypeRegistry.register("TEST_TYPE")
+
+ # Get combined value
+ all_types = EntityType.combine_all_entity_types()
+
+ # Verify includes both static and dynamic
+ assert all_types & EntityType.EXPLORER != 0
+ assert all_types & test_type != 0
+ assert all_types & (EntityType.EXPLORER_BASE | EntityType.EXPLORER_TOKEN | EntityType.EXPLORER_TRACE) != 0
+
+
+@pytest.mark.indexer
+@pytest.mark.indexer_utils
+def test_duplicate_output_type_handling():
+ """Test handling of duplicate output types across different entity types"""
+ # Reset registries
+ StaticOutputTypes._output_types = {}
+ DynamicEntityTypeRegistry._output_types = {}
+
+ # Register same type for different entities
+ common_type = {Block}
+ StaticOutputTypes.register_output_types(EntityType.EXPLORER_BASE, common_type)
+ StaticOutputTypes.register_output_types(EntityType.EXPLORER_TOKEN, common_type)
+
+ # Verify duplicates are removed
+ combined_types = EntityType.EXPLORER_BASE | EntityType.EXPLORER_TOKEN
+ retrieved_types = list(generate_output_types(combined_types))
+ assert len(retrieved_types) == 1
+ assert Block in retrieved_types
diff --git a/tests/hemera/indexer/__init__.py b/tests/hemera/indexer/__init__.py
new file mode 100644
index 000000000..6b839a736
--- /dev/null
+++ b/tests/hemera/indexer/__init__.py
@@ -0,0 +1,6 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time 2024/12/11 12:20
+# @Author will
+# @File __init__.py.py
+# @Brief
diff --git a/indexer/utils/__init__.py b/tests/hemera/indexer/domains/__init__.py
similarity index 100%
rename from indexer/utils/__init__.py
rename to tests/hemera/indexer/domains/__init__.py
diff --git a/indexer/tests/domain/test_token_transfers.py b/tests/hemera/indexer/domains/test_token_transfers.py
similarity index 98%
rename from indexer/tests/domain/test_token_transfers.py
rename to tests/hemera/indexer/domains/test_token_transfers.py
index 392b38741..345475853 100644
--- a/indexer/tests/domain/test_token_transfers.py
+++ b/tests/hemera/indexer/domains/test_token_transfers.py
@@ -1,8 +1,8 @@
import pytest
-from common.utils.web3_utils import ZERO_ADDRESS
-from indexer.domain.log import Log
-from indexer.domain.token_transfer import extract_transfer_from_log
+from hemera.common.utils.web3_utils import ZERO_ADDRESS
+from hemera.indexer.domains.log import Log
+from hemera.indexer.domains.token_transfer import extract_transfer_from_log
@pytest.mark.indexer
diff --git a/scheduler/__init__.py b/tests/hemera/indexer/jobs/__init__.py
similarity index 100%
rename from scheduler/__init__.py
rename to tests/hemera/indexer/jobs/__init__.py
diff --git a/indexer/tests/jobs/test_export_blocks_job.py b/tests/hemera/indexer/jobs/test_export_blocks_job.py
similarity index 62%
rename from indexer/tests/jobs/test_export_blocks_job.py
rename to tests/hemera/indexer/jobs/test_export_blocks_job.py
index c3d2924a4..7d3ca4608 100644
--- a/indexer/tests/jobs/test_export_blocks_job.py
+++ b/tests/hemera/indexer/jobs/test_export_blocks_job.py
@@ -1,11 +1,11 @@
import pytest
-from indexer.controller.scheduler.job_scheduler import JobScheduler
-from indexer.domain.block import Block
-from indexer.exporters.console_item_exporter import ConsoleItemExporter
-from indexer.tests import ETHEREUM_PUBLIC_NODE_RPC_URL
-from indexer.utils.provider import get_provider_from_uri
-from indexer.utils.thread_local_proxy import ThreadLocalProxy
+from hemera.indexer.controller.scheduler.job_scheduler import JobScheduler
+from hemera.indexer.domains.block import Block
+from hemera.indexer.exporters.console_item_exporter import ConsoleItemExporter
+from hemera.indexer.utils.provider import get_provider_from_uri
+from hemera.indexer.utils.thread_local_proxy import ThreadLocalProxy
+from tests_commons import ETHEREUM_PUBLIC_NODE_RPC_URL
@pytest.mark.indexer
@@ -17,7 +17,6 @@ def test_export_job():
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(ETHEREUM_PUBLIC_NODE_RPC_URL, batch=True)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=100,
debug_batch_size=1,
max_workers=5,
diff --git a/indexer/tests/jobs/test_export_coin_balances_job.py b/tests/hemera/indexer/jobs/test_export_coin_balances_job.py
similarity index 64%
rename from indexer/tests/jobs/test_export_coin_balances_job.py
rename to tests/hemera/indexer/jobs/test_export_coin_balances_job.py
index c15c60df5..4c716ae67 100644
--- a/indexer/tests/jobs/test_export_coin_balances_job.py
+++ b/tests/hemera/indexer/jobs/test_export_coin_balances_job.py
@@ -1,11 +1,11 @@
import pytest
-from indexer.controller.scheduler.job_scheduler import JobScheduler
-from indexer.domain.coin_balance import CoinBalance
-from indexer.exporters.console_item_exporter import ConsoleItemExporter
-from indexer.tests import LINEA_PUBLIC_NODE_RPC_URL
-from indexer.utils.provider import get_provider_from_uri
-from indexer.utils.thread_local_proxy import ThreadLocalProxy
+from hemera.indexer.controller.scheduler.job_scheduler import JobScheduler
+from hemera.indexer.domains.coin_balance import CoinBalance
+from hemera.indexer.exporters.console_item_exporter import ConsoleItemExporter
+from hemera.indexer.utils.provider import get_provider_from_uri
+from hemera.indexer.utils.thread_local_proxy import ThreadLocalProxy
+from tests_commons import LINEA_PUBLIC_NODE_RPC_URL
@pytest.mark.indexer
@@ -17,7 +17,6 @@ def test_export_coin_balance_job():
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(LINEA_PUBLIC_NODE_RPC_URL, batch=True)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=100,
debug_batch_size=1,
max_workers=5,
diff --git a/indexer/tests/jobs/test_export_contracts_job.py b/tests/hemera/indexer/jobs/test_export_contracts_job.py
similarity index 64%
rename from indexer/tests/jobs/test_export_contracts_job.py
rename to tests/hemera/indexer/jobs/test_export_contracts_job.py
index c3454b114..8b1b2adf4 100644
--- a/indexer/tests/jobs/test_export_contracts_job.py
+++ b/tests/hemera/indexer/jobs/test_export_contracts_job.py
@@ -1,11 +1,11 @@
import pytest
-from indexer.controller.scheduler.job_scheduler import JobScheduler
-from indexer.domain.contract import Contract
-from indexer.exporters.console_item_exporter import ConsoleItemExporter
-from indexer.tests import LINEA_PUBLIC_NODE_RPC_URL
-from indexer.utils.provider import get_provider_from_uri
-from indexer.utils.thread_local_proxy import ThreadLocalProxy
+from hemera.indexer.controller.scheduler.job_scheduler import JobScheduler
+from hemera.indexer.domains.contract import Contract
+from hemera.indexer.exporters.console_item_exporter import ConsoleItemExporter
+from hemera.indexer.utils.provider import get_provider_from_uri
+from hemera.indexer.utils.thread_local_proxy import ThreadLocalProxy
+from tests_commons import LINEA_PUBLIC_NODE_RPC_URL
@pytest.mark.indexer
@@ -17,7 +17,6 @@ def test_export_coin_balance_job():
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(LINEA_PUBLIC_NODE_RPC_URL, batch=True)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=100,
debug_batch_size=1,
max_workers=5,
diff --git a/indexer/tests/jobs/test_export_token_balances_and_holders.py b/tests/hemera/indexer/jobs/test_export_token_balances_and_holders.py
similarity index 98%
rename from indexer/tests/jobs/test_export_token_balances_and_holders.py
rename to tests/hemera/indexer/jobs/test_export_token_balances_and_holders.py
index 6ca87f70e..3144ff86d 100644
--- a/indexer/tests/jobs/test_export_token_balances_and_holders.py
+++ b/tests/hemera/indexer/jobs/test_export_token_balances_and_holders.py
@@ -1,12 +1,12 @@
import pytest
-from indexer.controller.scheduler.job_scheduler import JobScheduler
-from indexer.domain.current_token_balance import CurrentTokenBalance
-from indexer.domain.token_balance import TokenBalance
-from indexer.exporters.console_item_exporter import ConsoleItemExporter
-from indexer.tests import LINEA_PUBLIC_NODE_RPC_URL, MANTLE_PUBLIC_NODE_DEBUG_RPC_URL, MANTLE_PUBLIC_NODE_RPC_URL
-from indexer.utils.provider import get_provider_from_uri
-from indexer.utils.thread_local_proxy import ThreadLocalProxy
+from hemera.indexer.controller.scheduler.job_scheduler import JobScheduler
+from hemera.indexer.domains.current_token_balance import CurrentTokenBalance
+from hemera.indexer.domains.token_balance import TokenBalance
+from hemera.indexer.exporters.console_item_exporter import ConsoleItemExporter
+from hemera.indexer.utils.provider import get_provider_from_uri
+from hemera.indexer.utils.thread_local_proxy import ThreadLocalProxy
+from tests_commons import LINEA_PUBLIC_NODE_RPC_URL, MANTLE_PUBLIC_NODE_DEBUG_RPC_URL, MANTLE_PUBLIC_NODE_RPC_URL
@pytest.mark.indexer
@@ -19,7 +19,6 @@ def test_export_current_token_balance_job():
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(MANTLE_PUBLIC_NODE_DEBUG_RPC_URL, batch=True)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=100,
debug_batch_size=1,
max_workers=5,
@@ -51,7 +50,6 @@ def test_export_current_token_balance_job_mul():
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(MANTLE_PUBLIC_NODE_DEBUG_RPC_URL, batch=True)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=100,
debug_batch_size=1,
max_workers=5,
@@ -83,7 +81,6 @@ def test_export_token_balance_job():
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(LINEA_PUBLIC_NODE_RPC_URL, batch=True)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=100,
debug_batch_size=1,
max_workers=5,
@@ -570,7 +567,6 @@ def test_export_token_balance_job_mul():
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(LINEA_PUBLIC_NODE_RPC_URL, batch=True)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=100,
debug_batch_size=1,
max_workers=5,
diff --git a/indexer/tests/jobs/test_export_token_transfers_job.py b/tests/hemera/indexer/jobs/test_export_token_transfers_job.py
similarity index 85%
rename from indexer/tests/jobs/test_export_token_transfers_job.py
rename to tests/hemera/indexer/jobs/test_export_token_transfers_job.py
index b1a66da97..6f00335b1 100644
--- a/indexer/tests/jobs/test_export_token_transfers_job.py
+++ b/tests/hemera/indexer/jobs/test_export_token_transfers_job.py
@@ -1,12 +1,12 @@
import pytest
-from common.utils.web3_utils import ZERO_ADDRESS
-from indexer.controller.scheduler.job_scheduler import JobScheduler
-from indexer.domain.token_transfer import ERC20TokenTransfer, ERC721TokenTransfer, ERC1155TokenTransfer
-from indexer.exporters.console_item_exporter import ConsoleItemExporter
-from indexer.tests import ETHEREUM_PUBLIC_NODE_DEBUG_RPC_URL, ETHEREUM_PUBLIC_NODE_RPC_URL, LINEA_PUBLIC_NODE_RPC_URL
-from indexer.utils.provider import get_provider_from_uri
-from indexer.utils.thread_local_proxy import ThreadLocalProxy
+from hemera.common.utils.web3_utils import ZERO_ADDRESS
+from hemera.indexer.controller.scheduler.job_scheduler import JobScheduler
+from hemera.indexer.domains.token_transfer import ERC20TokenTransfer, ERC721TokenTransfer, ERC1155TokenTransfer
+from hemera.indexer.exporters.console_item_exporter import ConsoleItemExporter
+from hemera.indexer.utils.provider import get_provider_from_uri
+from hemera.indexer.utils.thread_local_proxy import ThreadLocalProxy
+from tests_commons import ETHEREUM_PUBLIC_NODE_DEBUG_RPC_URL, ETHEREUM_PUBLIC_NODE_RPC_URL, LINEA_PUBLIC_NODE_RPC_URL
@pytest.mark.indexer
@@ -18,7 +18,6 @@ def test_export_job():
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(LINEA_PUBLIC_NODE_RPC_URL, batch=True)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=100,
debug_batch_size=1,
max_workers=5,
@@ -44,7 +43,6 @@ def test_export_weth_depoist_transfer_job():
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(ETHEREUM_PUBLIC_NODE_DEBUG_RPC_URL, batch=True)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=100,
debug_batch_size=1,
max_workers=5,
@@ -86,7 +84,6 @@ def test_export_weth_depoist_transfer_with_wrong_config_job():
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(ETHEREUM_PUBLIC_NODE_DEBUG_RPC_URL, batch=True)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=100,
debug_batch_size=1,
max_workers=5,
diff --git a/indexer/tests/jobs/test_export_trace_job.py b/tests/hemera/indexer/jobs/test_export_trace_job.py
similarity index 64%
rename from indexer/tests/jobs/test_export_trace_job.py
rename to tests/hemera/indexer/jobs/test_export_trace_job.py
index baf7c681b..40db7790b 100644
--- a/indexer/tests/jobs/test_export_trace_job.py
+++ b/tests/hemera/indexer/jobs/test_export_trace_job.py
@@ -1,11 +1,11 @@
import pytest
-from indexer.controller.scheduler.job_scheduler import JobScheduler
-from indexer.domain.contract_internal_transaction import ContractInternalTransaction
-from indexer.exporters.console_item_exporter import ConsoleItemExporter
-from indexer.tests import LINEA_PUBLIC_NODE_RPC_URL
-from indexer.utils.provider import get_provider_from_uri
-from indexer.utils.thread_local_proxy import ThreadLocalProxy
+from hemera.indexer.controller.scheduler.job_scheduler import JobScheduler
+from hemera.indexer.domains.contract_internal_transaction import ContractInternalTransaction
+from hemera.indexer.exporters.console_item_exporter import ConsoleItemExporter
+from hemera.indexer.utils.provider import get_provider_from_uri
+from hemera.indexer.utils.thread_local_proxy import ThreadLocalProxy
+from tests_commons import LINEA_PUBLIC_NODE_RPC_URL
@pytest.mark.indexer
@@ -17,7 +17,6 @@ def test_export_job():
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(LINEA_PUBLIC_NODE_RPC_URL, batch=True)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=100,
debug_batch_size=1,
max_workers=5,
diff --git a/tests/hemera/indexer/jobs/test_export_transactions_job.py b/tests/hemera/indexer/jobs/test_export_transactions_job.py
new file mode 100644
index 000000000..0b28603de
--- /dev/null
+++ b/tests/hemera/indexer/jobs/test_export_transactions_job.py
@@ -0,0 +1,61 @@
+import pytest
+
+from hemera.indexer.controller.scheduler.job_scheduler import JobScheduler
+from hemera.indexer.domains.log import Log
+from hemera.indexer.exporters.console_item_exporter import ConsoleItemExporter
+from hemera.indexer.utils.provider import get_provider_from_uri
+from hemera.indexer.utils.thread_local_proxy import ThreadLocalProxy
+from tests_commons import ETHEREUM_PUBLIC_NODE_RPC_URL
+
+
+@pytest.mark.indexer
+@pytest.mark.indexer_exporter
+@pytest.mark.serial
+def test_export_transaction_job():
+ job_scheduler = JobScheduler(
+ batch_web3_provider=ThreadLocalProxy(lambda: get_provider_from_uri(ETHEREUM_PUBLIC_NODE_RPC_URL, batch=True)),
+ batch_web3_debug_provider=ThreadLocalProxy(
+ lambda: get_provider_from_uri(ETHEREUM_PUBLIC_NODE_RPC_URL, batch=True)
+ ),
+ batch_size=100,
+ debug_batch_size=1,
+ max_workers=5,
+ config={},
+ required_output_types=[Log],
+ )
+
+ job_scheduler.run_jobs(
+ start_block=20273057,
+ end_block=20273058,
+ )
+ assert len(job_scheduler.get_data_buff()[Log.type()]) == 827
+ job_scheduler.clear_data_buff()
+
+
+@pytest.mark.indexer
+@pytest.mark.indexer_exporter
+@pytest.mark.serial
+def test_export_transaction_job_with_get_receipt_from_block():
+ job_scheduler = JobScheduler(
+ batch_web3_provider=ThreadLocalProxy(lambda: get_provider_from_uri(ETHEREUM_PUBLIC_NODE_RPC_URL, batch=True)),
+ batch_web3_debug_provider=ThreadLocalProxy(
+ lambda: get_provider_from_uri(ETHEREUM_PUBLIC_NODE_RPC_URL, batch=True)
+ ),
+ batch_size=100,
+ debug_batch_size=1,
+ max_workers=5,
+ config={
+ "export_transactions_and_logs_job": {
+ "use_receipt_from_blocks_rpc": True,
+ }
+ },
+ required_output_types=[Log],
+ )
+
+ job_scheduler.run_jobs(
+ start_block=20273057,
+ end_block=20273058,
+ )
+ assert len(job_scheduler.get_data_buff()[Log.type()]) == 827
+
+ job_scheduler.clear_data_buff()
diff --git a/indexer/tests/jobs/test_token_id_infos_job.py b/tests/hemera/indexer/jobs/test_token_id_infos_job.py
similarity index 94%
rename from indexer/tests/jobs/test_token_id_infos_job.py
rename to tests/hemera/indexer/jobs/test_token_id_infos_job.py
index 069298f9a..b3a59cb28 100644
--- a/indexer/tests/jobs/test_token_id_infos_job.py
+++ b/tests/hemera/indexer/jobs/test_token_id_infos_job.py
@@ -1,11 +1,17 @@
import pytest
-from indexer.controller.scheduler.job_scheduler import JobScheduler
-from indexer.domain.token_id_infos import *
-from indexer.exporters.console_item_exporter import ConsoleItemExporter
-from indexer.tests import CYBER_PUBLIC_NODE_RPC_URL, LINEA_PUBLIC_NODE_RPC_URL
-from indexer.utils.provider import get_provider_from_uri
-from indexer.utils.thread_local_proxy import ThreadLocalProxy
+from hemera.indexer.controller.scheduler.job_scheduler import JobScheduler
+from hemera.indexer.domains.token_id_infos import (
+ ERC721TokenIdChange,
+ ERC721TokenIdDetail,
+ ERC1155TokenIdDetail,
+ UpdateERC721TokenIdDetail,
+ UpdateERC1155TokenIdDetail,
+)
+from hemera.indexer.exporters.console_item_exporter import ConsoleItemExporter
+from hemera.indexer.utils.provider import get_provider_from_uri
+from hemera.indexer.utils.thread_local_proxy import ThreadLocalProxy
+from tests_commons import CYBER_PUBLIC_NODE_RPC_URL, LINEA_PUBLIC_NODE_RPC_URL
@pytest.mark.indexer
@@ -25,7 +31,6 @@ def test_export_token_id_info_job_on_cyber():
batch=True,
)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=100,
debug_batch_size=1,
max_workers=5,
@@ -94,7 +99,6 @@ def test_export_token_id_info_job_on_cyber_mul():
batch=True,
)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=100,
debug_batch_size=1,
max_workers=5,
@@ -164,7 +168,6 @@ def test_export_token_id_info_job_on_linea():
batch=True,
)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=10,
debug_batch_size=1,
max_workers=1,
@@ -259,7 +262,6 @@ def test_export_token_id_info_job_on_linea_mul():
batch=True,
)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=10,
debug_batch_size=1,
max_workers=1,
diff --git a/scheduler/scheduler.py b/tests/hemera/indexer/utils/__init__.py
similarity index 100%
rename from scheduler/scheduler.py
rename to tests/hemera/indexer/utils/__init__.py
diff --git a/indexer/tests/utils/test_multicall_helper.py b/tests/hemera/indexer/utils/test_multicall_helper.py
similarity index 88%
rename from indexer/tests/utils/test_multicall_helper.py
rename to tests/hemera/indexer/utils/test_multicall_helper.py
index a52938ab1..3290820c5 100644
--- a/indexer/tests/utils/test_multicall_helper.py
+++ b/tests/hemera/indexer/utils/test_multicall_helper.py
@@ -4,20 +4,42 @@
# @Author will
# @File test_multicall_helper.py
# @Brief
-import os
import pytest
from web3 import Web3
-from common.utils.abi_code_utils import Function
-from indexer.tests import ETHEREUM_PUBLIC_NODE_RPC_URL
-from indexer.utils.multicall_hemera import Call
-from indexer.utils.multicall_hemera.multi_call_helper import MultiCallHelper
+from hemera.common.utils.abi_code_utils import Function
+from hemera.common.utils.format_utils import hex_str_to_bytes
+from hemera.indexer.utils.abi_setting import ERC20_BALANCE_OF_FUNCTION
+from hemera.indexer.utils.multicall_hemera import Call
+from hemera.indexer.utils.multicall_hemera.abi import AGGREGATE_FUNC, TRY_BLOCK_AND_AGGREGATE_FUNC
+from hemera.indexer.utils.multicall_hemera.multi_call_helper import MultiCallHelper
+from tests_commons import ETHEREUM_PUBLIC_NODE_RPC_URL
web3 = Web3(Web3.HTTPProvider(ETHEREUM_PUBLIC_NODE_RPC_URL))
multicall_helper = MultiCallHelper(web3, {"batch_size": 100, "multicall": True, "max_workers": 10})
+def test_multicall_encode():
+ cl1 = Call(
+ "0x833589fcd6edb6e08f4c7c32d4f71b54bda02913",
+ function_abi=ERC20_BALANCE_OF_FUNCTION,
+ parameters=["0x49866A9CFE9129FbB0B93dCDb4b5eb758Ee3F9Be"],
+ )
+ cl2 = Call(
+ "0x833589fcd6edb6e08f4c7c32d4f71b54bda02913",
+ function_abi=ERC20_BALANCE_OF_FUNCTION,
+ parameters=["0xdDE1bb2B8cb427B889567CbDf6527c4E69C0a392"],
+ )
+ parameters = [[[call.target, hex_str_to_bytes(call.data)] for call in [cl1, cl2]]]
+ assert AGGREGATE_FUNC.encode_function_call_data(parameters) == AGGREGATE_FUNC.encode_multicall_data(parameters)
+
+ parameters = [False, [[call.target, hex_str_to_bytes(call.data)] for call in [cl1, cl2]]]
+ assert TRY_BLOCK_AND_AGGREGATE_FUNC.encode_function_call_data(
+ parameters
+ ) == TRY_BLOCK_AND_AGGREGATE_FUNC.encode_multicall_data(parameters)
+
+
@pytest.mark.indexer
@pytest.mark.multicall_helper
def test_mutlicall_mantle():
diff --git a/indexer/tests/utils/test_utils.py b/tests/hemera/indexer/utils/test_utils.py
similarity index 97%
rename from indexer/tests/utils/test_utils.py
rename to tests/hemera/indexer/utils/test_utils.py
index 1aaa8b02c..943aef512 100644
--- a/indexer/tests/utils/test_utils.py
+++ b/tests/hemera/indexer/utils/test_utils.py
@@ -1,10 +1,10 @@
import pytest
-from common.utils.abi_code_utils import Event, Function, decode_data
-from common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes
-from indexer.domain.log import Log
-from indexer.domain.receipt import Receipt
-from indexer.domain.transaction import Transaction
+from hemera.common.utils.abi_code_utils import Event, Function, decode_data
+from hemera.common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes
+from hemera.indexer.domains.log import Log
+from hemera.indexer.domains.receipt import Receipt
+from hemera.indexer.domains.transaction import Transaction
@pytest.mark.indexer
diff --git a/tests/hemera_udf/__init__.py b/tests/hemera_udf/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/tests/hemera_udf/address_index/__init__.py b/tests/hemera_udf/address_index/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/indexer/tests/address_index/test_address_index_job.py b/tests/hemera_udf/address_index/test_address_index_job.py
similarity index 67%
rename from indexer/tests/address_index/test_address_index_job.py
rename to tests/hemera_udf/address_index/test_address_index_job.py
index 178295662..c0f337f36 100644
--- a/indexer/tests/address_index/test_address_index_job.py
+++ b/tests/hemera_udf/address_index/test_address_index_job.py
@@ -1,11 +1,11 @@
import pytest
-from indexer.controller.scheduler.job_scheduler import JobScheduler
-from indexer.exporters.console_item_exporter import ConsoleItemExporter
-from indexer.modules.custom.address_index.domain import *
-from indexer.tests import ETHEREUM_PUBLIC_NODE_RPC_URL
-from indexer.utils.provider import get_provider_from_uri
-from indexer.utils.thread_local_proxy import ThreadLocalProxy
+from hemera.indexer.controller.scheduler.job_scheduler import JobScheduler
+from hemera.indexer.exporters.console_item_exporter import ConsoleItemExporter
+from hemera.indexer.utils.provider import get_provider_from_uri
+from hemera.indexer.utils.thread_local_proxy import ThreadLocalProxy
+from hemera_udf.address_index.domains import *
+from tests_commons import ETHEREUM_PUBLIC_NODE_RPC_URL
@pytest.mark.indexer
@@ -17,7 +17,6 @@ def test_export_address_index_job():
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(ETHEREUM_PUBLIC_NODE_RPC_URL, batch=True)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=100,
debug_batch_size=1,
max_workers=5,
diff --git a/tests/hemera_udf/bridge/__init__.py b/tests/hemera_udf/bridge/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/indexer/tests/bridge/arbitrum/__init__.py b/tests/hemera_udf/bridge/arbitrum/__init__.py
similarity index 100%
rename from indexer/tests/bridge/arbitrum/__init__.py
rename to tests/hemera_udf/bridge/arbitrum/__init__.py
diff --git a/indexer/tests/bridge/arbitrum/arbitrum_bridge_parser_test.py b/tests/hemera_udf/bridge/arbitrum/arbitrum_bridge_parser_test.py
similarity index 100%
rename from indexer/tests/bridge/arbitrum/arbitrum_bridge_parser_test.py
rename to tests/hemera_udf/bridge/arbitrum/arbitrum_bridge_parser_test.py
diff --git a/indexer/tests/bridge/arbitrum/jobs/__init__.py b/tests/hemera_udf/bridge/arbitrum/jobs/__init__.py
similarity index 100%
rename from indexer/tests/bridge/arbitrum/jobs/__init__.py
rename to tests/hemera_udf/bridge/arbitrum/jobs/__init__.py
diff --git a/indexer/tests/bridge/arbitrum/jobs/test_arb_eth.py b/tests/hemera_udf/bridge/arbitrum/jobs/test_arb_eth.py
similarity index 93%
rename from indexer/tests/bridge/arbitrum/jobs/test_arb_eth.py
rename to tests/hemera_udf/bridge/arbitrum/jobs/test_arb_eth.py
index 5b5fc8127..7ceb442d9 100644
--- a/indexer/tests/bridge/arbitrum/jobs/test_arb_eth.py
+++ b/tests/hemera_udf/bridge/arbitrum/jobs/test_arb_eth.py
@@ -8,22 +8,22 @@
import pytest
-from indexer.controller.scheduler.job_scheduler import JobScheduler
-from indexer.exporters.console_item_exporter import ConsoleItemExporter
-from indexer.modules.bridge.arbitrum.arb_parser import (
+from hemera.indexer.controller.scheduler.job_scheduler import JobScheduler
+from hemera.indexer.exporters.console_item_exporter import ConsoleItemExporter
+from hemera.indexer.utils.provider import get_provider_from_uri
+from hemera.indexer.utils.thread_local_proxy import ThreadLocalProxy
+from hemera_udf.bridge.arbitrum.arb_parser import (
ArbitrumStateBatchConfirmed,
ArbitrumStateBatchCreated,
ArbitrumTransactionBatch,
)
-from indexer.modules.bridge.domain.arbitrum import (
+from hemera_udf.bridge.domains.arbitrum import (
ArbitrumL1ToL2TransactionOnL1,
ArbitrumL1ToL2TransactionOnL2,
ArbitrumL2ToL1TransactionOnL1,
ArbitrumL2ToL1TransactionOnL2,
)
-from indexer.tests import ARBITRUM_PUBLIC_NODE_RPC_URL, ETHEREUM_PUBLIC_NODE_DEBUG_RPC_URL, ETHEREUM_PUBLIC_NODE_RPC_URL
-from indexer.utils.provider import get_provider_from_uri
-from indexer.utils.thread_local_proxy import ThreadLocalProxy
+from tests_commons import ARBITRUM_PUBLIC_NODE_RPC_URL, ETHEREUM_PUBLIC_NODE_DEBUG_RPC_URL, ETHEREUM_PUBLIC_NODE_RPC_URL
@pytest.mark.indexer
@@ -39,7 +39,6 @@ def test_l1_to_l2_deposit_eth_on_l1():
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(ETHEREUM_PUBLIC_NODE_RPC_URL, batch=True)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=100,
debug_batch_size=1,
max_workers=5,
@@ -86,7 +85,6 @@ def test_l2_to_l1_deposit_eth_on_l2():
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(ARBITRUM_PUBLIC_NODE_RPC_URL, batch=True)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=100,
debug_batch_size=1,
max_workers=5,
@@ -123,7 +121,6 @@ def test_l1_to_l2_deposit_erc20():
l2_tnx_hash = '0x7f2e057ff9fe588e822b192636e7dca87982a93666cb5218c8bdae429c283d1b'
"""
eth_job = JobScheduler(
- item_exporters=[ConsoleItemExporter()],
batch_web3_provider=ThreadLocalProxy(lambda: get_provider_from_uri(ETHEREUM_PUBLIC_NODE_RPC_URL, batch=True)),
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(ETHEREUM_PUBLIC_NODE_RPC_URL, batch=True)
@@ -162,7 +159,6 @@ def test_l1_to_l2_deposit_erc20():
lambda: get_provider_from_uri(ARBITRUM_PUBLIC_NODE_RPC_URL, batch=True)
),
batch_web3_provider=ThreadLocalProxy(lambda: get_provider_from_uri(ARBITRUM_PUBLIC_NODE_RPC_URL, batch=True)),
- item_exporters=[ConsoleItemExporter()],
batch_size=10,
max_workers=1,
config={
@@ -195,7 +191,6 @@ def test_l2_to_l1_withdraw():
l1_tnx_hash = '0x1013dea84e83985fa2dd7dbf4ff71dced8c98d1e442e47f0ec39ac5fe4b2008a'
"""
arb_job = JobScheduler(
- item_exporters=[ConsoleItemExporter()],
batch_web3_provider=ThreadLocalProxy(lambda: get_provider_from_uri(ARBITRUM_PUBLIC_NODE_RPC_URL, batch=True)),
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(ARBITRUM_PUBLIC_NODE_RPC_URL, batch=True)
@@ -227,7 +222,6 @@ def test_l2_to_l1_withdraw():
arb_job.clear_data_buff()
eth_job = JobScheduler(
- item_exporters=[ConsoleItemExporter()],
batch_web3_provider=ThreadLocalProxy(lambda: get_provider_from_uri(ETHEREUM_PUBLIC_NODE_RPC_URL, batch=True)),
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(ETHEREUM_PUBLIC_NODE_DEBUG_RPC_URL, batch=True)
@@ -264,7 +258,6 @@ def test_state_batch_eth():
# node_created_tnx_hash = '0x3772f60c09379b147a80086f185b9fc3b7151a871fb48fa674e40ffa970b4aa4'
eth_job = JobScheduler(
- item_exporters=[ConsoleItemExporter()],
batch_web3_provider=ThreadLocalProxy(lambda: get_provider_from_uri(ETHEREUM_PUBLIC_NODE_RPC_URL, batch=True)),
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(ETHEREUM_PUBLIC_NODE_DEBUG_RPC_URL, batch=True)
@@ -293,7 +286,6 @@ def test_state_batch_eth():
eth_job.clear_data_buff()
eth_job_1 = JobScheduler(
- item_exporters=[ConsoleItemExporter()],
batch_web3_provider=ThreadLocalProxy(lambda: get_provider_from_uri(ETHEREUM_PUBLIC_NODE_RPC_URL, batch=True)),
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(ETHEREUM_PUBLIC_NODE_DEBUG_RPC_URL, batch=True)
@@ -335,7 +327,6 @@ def test_transaction_batch_eth():
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(ETHEREUM_PUBLIC_NODE_RPC_URL, batch=True)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=100,
debug_batch_size=1,
max_workers=5,
diff --git a/indexer/tests/bridge/arbitrum/jobs/test_dodo_test_arb_sepolia.py b/tests/hemera_udf/bridge/arbitrum/jobs/test_dodo_test_arb_sepolia.py
similarity index 97%
rename from indexer/tests/bridge/arbitrum/jobs/test_dodo_test_arb_sepolia.py
rename to tests/hemera_udf/bridge/arbitrum/jobs/test_dodo_test_arb_sepolia.py
index 5f6ae74b7..fa670eb15 100644
--- a/indexer/tests/bridge/arbitrum/jobs/test_dodo_test_arb_sepolia.py
+++ b/tests/hemera_udf/bridge/arbitrum/jobs/test_dodo_test_arb_sepolia.py
@@ -53,7 +53,6 @@ def test_l1_to_l2_deposit_dodo():
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(ARBITRUM_TESTNET_PUBLIC_NODE_RPC_URL, batch=True)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=100,
debug_batch_size=1,
max_workers=5,
@@ -115,7 +114,6 @@ def test_l1_to_l2_deposit_dodo():
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(DODO_TESTNET_PUBLIC_NODE_RPC_URL, batch=True)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=100,
debug_batch_size=1,
max_workers=5,
@@ -154,7 +152,6 @@ def test_l1_to_l2_deposit_usdc():
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(ARBITRUM_TESTNET_PUBLIC_NODE_RPC_URL, batch=True)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=10,
debug_batch_size=1,
max_workers=1,
@@ -199,7 +196,6 @@ def test_l1_to_l2_deposit_kind12():
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(ARBITRUM_TESTNET_PUBLIC_NODE_RPC_URL, batch=True)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=10,
debug_batch_size=1,
max_workers=1,
@@ -243,7 +239,6 @@ def test_l1_to_l2_deposit_gld():
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(ARBITRUM_TESTNET_PUBLIC_NODE_RPC_URL, batch=True)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=10,
debug_batch_size=1,
max_workers=1,
@@ -286,7 +281,6 @@ def test_l1_to_l2_deposit_erc20():
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(ETHEREUM_PUBLIC_NODE_RPC_URL, batch=True)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=10,
debug_batch_size=1,
max_workers=1,
@@ -322,7 +316,6 @@ def test_l1_to_l2_deposit_erc20():
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(ARBITRUM_PUBLIC_NODE_RPC_URL, batch=True)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=10,
debug_batch_size=1,
max_workers=1,
@@ -357,7 +350,6 @@ def test_l2_to_l1_withdraw():
job_scheduler = JobScheduler(
batch_web3_provider=ThreadLocalProxy(lambda: get_provider_from_uri(l2_rpc, batch=True)),
batch_web3_debug_provider=ThreadLocalProxy(lambda: get_provider_from_uri(l2_rpc, batch=True)),
- item_exporters=[ConsoleItemExporter()],
batch_size=10,
debug_batch_size=1,
max_workers=1,
@@ -389,7 +381,6 @@ def test_l2_to_l1_withdraw():
job_scheduler = JobScheduler(
batch_web3_provider=ThreadLocalProxy(lambda: get_provider_from_uri(l1_rpc, batch=True)),
batch_web3_debug_provider=ThreadLocalProxy(lambda: get_provider_from_uri(l1_rpc, batch=True)),
- item_exporters=[ConsoleItemExporter()],
batch_size=10,
debug_batch_size=1,
max_workers=1,
@@ -429,7 +420,6 @@ def test_state_batch_eth():
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(ARBITRUM_TESTNET_PUBLIC_NODE_RPC_URL, batch=True)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=10,
debug_batch_size=1,
max_workers=1,
@@ -463,7 +453,6 @@ def test_state_batch_eth():
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(ARBITRUM_TESTNET_PUBLIC_NODE_RPC_URL, batch=True)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=10,
debug_batch_size=1,
max_workers=1,
@@ -500,7 +489,6 @@ def test_transaction_batch_eth():
job_scheduler = JobScheduler(
batch_web3_provider=ThreadLocalProxy(lambda: get_provider_from_uri(l1_rpc, batch=True)),
batch_web3_debug_provider=ThreadLocalProxy(lambda: get_provider_from_uri(l1_rpc, batch=True)),
- item_exporters=[ConsoleItemExporter()],
batch_size=10,
debug_batch_size=1,
max_workers=1,
diff --git a/indexer/tests/bridge/arbitrum/rlp_test.py b/tests/hemera_udf/bridge/arbitrum/rlp_test.py
similarity index 97%
rename from indexer/tests/bridge/arbitrum/rlp_test.py
rename to tests/hemera_udf/bridge/arbitrum/rlp_test.py
index 569623452..73baa3eea 100644
--- a/indexer/tests/bridge/arbitrum/rlp_test.py
+++ b/tests/hemera_udf/bridge/arbitrum/rlp_test.py
@@ -7,7 +7,7 @@
import pytest
from web3 import Web3
-from indexer.modules.bridge.arbitrum.arb_rlp import calculate_deposit_tx_id, calculate_submit_retryable_id
+from hemera_udf.bridge.arbitrum.arb_rlp import calculate_deposit_tx_id, calculate_submit_retryable_id
@pytest.mark.indexer
diff --git a/tests/hemera_udf/bridge/bedrock/__init__.py b/tests/hemera_udf/bridge/bedrock/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/tests/hemera_udf/bridge/bedrock/jobs/__init__.py b/tests/hemera_udf/bridge/bedrock/jobs/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/indexer/tests/bridge/bedrock/jobs/test_fetch_op_bedrock_bridge_on_data_job.py b/tests/hemera_udf/bridge/bedrock/jobs/test_fetch_op_bedrock_bridge_on_data_job.py
similarity index 65%
rename from indexer/tests/bridge/bedrock/jobs/test_fetch_op_bedrock_bridge_on_data_job.py
rename to tests/hemera_udf/bridge/bedrock/jobs/test_fetch_op_bedrock_bridge_on_data_job.py
index fb4ee948f..a12f21889 100644
--- a/indexer/tests/bridge/bedrock/jobs/test_fetch_op_bedrock_bridge_on_data_job.py
+++ b/tests/hemera_udf/bridge/bedrock/jobs/test_fetch_op_bedrock_bridge_on_data_job.py
@@ -1,11 +1,11 @@
import pytest
-from indexer.controller.scheduler.job_scheduler import JobScheduler
-from indexer.exporters.console_item_exporter import ConsoleItemExporter
-from indexer.modules.bridge.domain.op_bedrock import OpL1ToL2DepositedTransaction
-from indexer.tests import ETHEREUM_PUBLIC_NODE_RPC_URL
-from indexer.utils.provider import get_provider_from_uri
-from indexer.utils.thread_local_proxy import ThreadLocalProxy
+from hemera.indexer.controller.scheduler.job_scheduler import JobScheduler
+from hemera.indexer.exporters.console_item_exporter import ConsoleItemExporter
+from hemera.indexer.utils.provider import get_provider_from_uri
+from hemera.indexer.utils.thread_local_proxy import ThreadLocalProxy
+from hemera_udf.bridge.domains.op_bedrock import OpL1ToL2DepositedTransaction
+from tests_commons import ETHEREUM_PUBLIC_NODE_RPC_URL
@pytest.mark.indexer
@@ -18,7 +18,6 @@ def test_fetch_op_bedrock_bridge_on_data():
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(ETHEREUM_PUBLIC_NODE_RPC_URL, batch=True)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=100,
debug_batch_size=1,
max_workers=5,
diff --git a/tests/hemera_udf/bridge/bedrock/parser/__init__.py b/tests/hemera_udf/bridge/bedrock/parser/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/tests/hemera_udf/bridge/bedrock/parser/function_parser/__init__.py b/tests/hemera_udf/bridge/bedrock/parser/function_parser/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/indexer/tests/bridge/bedrock/parser/function_parser/test_finalize_bridge_erc20.py b/tests/hemera_udf/bridge/bedrock/parser/function_parser/test_finalize_bridge_erc20.py
similarity index 89%
rename from indexer/tests/bridge/bedrock/parser/function_parser/test_finalize_bridge_erc20.py
rename to tests/hemera_udf/bridge/bedrock/parser/function_parser/test_finalize_bridge_erc20.py
index 72b4a66ca..3f09ff5c1 100644
--- a/indexer/tests/bridge/bedrock/parser/function_parser/test_finalize_bridge_erc20.py
+++ b/tests/hemera_udf/bridge/bedrock/parser/function_parser/test_finalize_bridge_erc20.py
@@ -1,12 +1,12 @@
import pytest
-from indexer.modules.bridge.bedrock.parser.function_parser import BedRockFunctionCallType
+from hemera_udf.bridge.bedrock.parser.function_parser import BedRockFunctionCallType
@pytest.mark.indexer
@pytest.mark.indexer_bridge
def test_bedrock_finalize_bridge_erc20_decoder():
- from indexer.modules.bridge.bedrock.parser.function_parser.finalize_bridge_erc20 import decode_function
+ from hemera_udf.bridge.bedrock.parser.function_parser.finalize_bridge_erc20 import decode_function
# BridgeRemoteFunctionCallInfo(bridge_from_address='0xc451b0191351ce308fdfd779d73814c910fc5ecb', bridge_to_address='0xc451b0191351ce308fdfd779d73814c910fc5ecb', local_token_address='0xb73603c5d87fa094b7314c74ace2e64d165016fb', remote_token_address='0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48', amount=100000000000, extra_info={}, remove_function_call_type=1)
bridge_info = decode_function(
diff --git a/indexer/tests/bridge/bedrock/parser/function_parser/test_finalize_bridge_erc721.py b/tests/hemera_udf/bridge/bedrock/parser/function_parser/test_finalize_bridge_erc721.py
similarity index 89%
rename from indexer/tests/bridge/bedrock/parser/function_parser/test_finalize_bridge_erc721.py
rename to tests/hemera_udf/bridge/bedrock/parser/function_parser/test_finalize_bridge_erc721.py
index e2dec7c1f..638632faf 100644
--- a/indexer/tests/bridge/bedrock/parser/function_parser/test_finalize_bridge_erc721.py
+++ b/tests/hemera_udf/bridge/bedrock/parser/function_parser/test_finalize_bridge_erc721.py
@@ -1,12 +1,12 @@
import pytest
-from indexer.modules.bridge.bedrock.parser.function_parser import BedRockFunctionCallType
+from hemera_udf.bridge.bedrock.parser.function_parser import BedRockFunctionCallType
@pytest.mark.indexer
@pytest.mark.indexer_bridge
def test_bedrock_finalize_bridge_erc721_decoder():
- from indexer.modules.bridge.bedrock.parser.function_parser.finalize_bridge_erc721 import decode_function
+ from hemera_udf.bridge.bedrock.parser.function_parser.finalize_bridge_erc721 import decode_function
bridge_info = decode_function(
bytearray.fromhex(
diff --git a/indexer/tests/bridge/bedrock/parser/function_parser/test_finalize_bridge_eth.py b/tests/hemera_udf/bridge/bedrock/parser/function_parser/test_finalize_bridge_eth.py
similarity index 81%
rename from indexer/tests/bridge/bedrock/parser/function_parser/test_finalize_bridge_eth.py
rename to tests/hemera_udf/bridge/bedrock/parser/function_parser/test_finalize_bridge_eth.py
index 2861ed0e7..00e175754 100644
--- a/indexer/tests/bridge/bedrock/parser/function_parser/test_finalize_bridge_eth.py
+++ b/tests/hemera_udf/bridge/bedrock/parser/function_parser/test_finalize_bridge_eth.py
@@ -1,12 +1,12 @@
import pytest
-from indexer.modules.bridge.bedrock.parser.function_parser import BedRockFunctionCallType
+from hemera_udf.bridge.bedrock.parser.function_parser import BedRockFunctionCallType
@pytest.mark.indexer
@pytest.mark.indexer_bridge
def test_bedrock_finalize_bridge_eth_decoder():
- from indexer.modules.bridge.bedrock.parser.function_parser.finalize_bridge_eth import decode_function
+ from hemera_udf.bridge.bedrock.parser.function_parser.finalize_bridge_eth import decode_function
bridge_info = decode_function(
bytearray.fromhex(
diff --git a/indexer/tests/bridge/bedrock/parser/test_bedrock_bridge_parser.py b/tests/hemera_udf/bridge/bedrock/parser/test_bedrock_bridge_parser.py
similarity index 98%
rename from indexer/tests/bridge/bedrock/parser/test_bedrock_bridge_parser.py
rename to tests/hemera_udf/bridge/bedrock/parser/test_bedrock_bridge_parser.py
index 116c3927e..bfdb10730 100644
--- a/indexer/tests/bridge/bedrock/parser/test_bedrock_bridge_parser.py
+++ b/tests/hemera_udf/bridge/bedrock/parser/test_bedrock_bridge_parser.py
@@ -1,11 +1,11 @@
import pytest
-from indexer.modules.bridge.bedrock.parser.bedrock_bridge_parser import (
+from hemera_udf.bridge.bedrock.parser.bedrock_bridge_parser import (
parse_message_passed_event,
parse_transaction_deposited_event,
)
-from indexer.modules.bridge.bedrock.parser.function_parser import BedRockFunctionCallType
-from indexer.tests.json_rpc_to_dataclass import get_transaction_from_rpc
+from hemera_udf.bridge.bedrock.parser.function_parser import BedRockFunctionCallType
+from tests_commons.json_rpc_to_dataclass import get_transaction_from_rpc
DEFAULT_ETHEREUM_RPC = "https://ethereum-rpc.publicnode.com"
DEFAULT_OPTIMISM_RPC = "https://optimism-rpc.publicnode.com"
diff --git a/tests/hemera_udf/bridge/morphl2/__init__.py b/tests/hemera_udf/bridge/morphl2/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/tests/hemera_udf/bridge/morphl2/jobs/__init__.py b/tests/hemera_udf/bridge/morphl2/jobs/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/indexer/tests/bridge/morphl2/jobs/test_fetch_morph_bridge_on_l1.py b/tests/hemera_udf/bridge/morphl2/jobs/test_fetch_morph_bridge_on_l1.py
similarity index 91%
rename from indexer/tests/bridge/morphl2/jobs/test_fetch_morph_bridge_on_l1.py
rename to tests/hemera_udf/bridge/morphl2/jobs/test_fetch_morph_bridge_on_l1.py
index 81dc58589..63dd9b61a 100644
--- a/indexer/tests/bridge/morphl2/jobs/test_fetch_morph_bridge_on_l1.py
+++ b/tests/hemera_udf/bridge/morphl2/jobs/test_fetch_morph_bridge_on_l1.py
@@ -1,16 +1,16 @@
import pytest
-from indexer.controller.scheduler.job_scheduler import JobScheduler
-from indexer.exporters.console_item_exporter import ConsoleItemExporter
-from indexer.modules.bridge.domain.morph import (
+from hemera.indexer.controller.scheduler.job_scheduler import JobScheduler
+from hemera.indexer.exporters.console_item_exporter import ConsoleItemExporter
+from hemera.indexer.utils.provider import get_provider_from_uri
+from hemera.indexer.utils.thread_local_proxy import ThreadLocalProxy
+from hemera_udf.bridge.domains.morph import (
MorphDepositedTransactionOnL1,
MorphDepositedTransactionOnL2,
MorphWithdrawalTransactionOnL1,
MorphWithdrawalTransactionOnL2,
)
-from indexer.tests import ETHEREUM_PUBLIC_NODE_RPC_URL, MORPHL2_PUBLIC_NODE_RPC_URL
-from indexer.utils.provider import get_provider_from_uri
-from indexer.utils.thread_local_proxy import ThreadLocalProxy
+from tests_commons import ETHEREUM_PUBLIC_NODE_RPC_URL, MORPHL2_PUBLIC_NODE_RPC_URL
@pytest.mark.indexer
@@ -22,7 +22,6 @@ def test_fetch_morphl2_bridge_on_l1_deposited():
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(ETHEREUM_PUBLIC_NODE_RPC_URL, batch=True)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=100,
debug_batch_size=1,
max_workers=5,
@@ -53,7 +52,6 @@ def test_fetch_morphl2_bridge_on_l1_withdrawal():
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(ETHEREUM_PUBLIC_NODE_RPC_URL, batch=True)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=100,
debug_batch_size=1,
max_workers=5,
@@ -84,7 +82,6 @@ def test_fetch_morphl2_bridge_on_l2_withdrawl():
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(MORPHL2_PUBLIC_NODE_RPC_URL, batch=True)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=100,
debug_batch_size=1,
max_workers=5,
@@ -133,7 +130,6 @@ def test_fetch_morphl2_bridge_on_l2_deposited():
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(MORPHL2_PUBLIC_NODE_RPC_URL, batch=True)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=100,
debug_batch_size=1,
max_workers=5,
@@ -182,7 +178,6 @@ def test_fetch_morphl2_bridge_on_l2_deposit():
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(MORPHL2_PUBLIC_NODE_RPC_URL, batch=True)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=100,
debug_batch_size=1,
max_workers=5,
diff --git a/tests/hemera_udf/day_mining/__init__.py b/tests/hemera_udf/day_mining/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/indexer/tests/day_mining/test_export_day_mining.py b/tests/hemera_udf/day_mining/test_export_day_mining.py
similarity index 63%
rename from indexer/tests/day_mining/test_export_day_mining.py
rename to tests/hemera_udf/day_mining/test_export_day_mining.py
index 05a6743b9..691d942bf 100644
--- a/indexer/tests/day_mining/test_export_day_mining.py
+++ b/tests/hemera_udf/day_mining/test_export_day_mining.py
@@ -1,11 +1,11 @@
import pytest
-from indexer.controller.scheduler.job_scheduler import JobScheduler
-from indexer.exporters.console_item_exporter import ConsoleItemExporter
-from indexer.modules.custom.all_features_value_record import AllFeatureValueRecordTraitsActiveness
-from indexer.tests import CYBER_PUBLIC_NODE_RPC_URL
-from indexer.utils.provider import get_provider_from_uri
-from indexer.utils.thread_local_proxy import ThreadLocalProxy
+from hemera.indexer.controller.scheduler.job_scheduler import JobScheduler
+from hemera.indexer.exporters.console_item_exporter import ConsoleItemExporter
+from hemera.indexer.utils.provider import get_provider_from_uri
+from hemera.indexer.utils.thread_local_proxy import ThreadLocalProxy
+from hemera_udf.aci_features.domains import AllFeatureValueRecordTraitsActiveness
+from tests_commons import CYBER_PUBLIC_NODE_RPC_URL
@pytest.mark.indexer
@@ -17,7 +17,6 @@ def test_export_job():
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(CYBER_PUBLIC_NODE_RPC_URL, batch=True)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=100,
debug_batch_size=10,
max_workers=5,
diff --git a/tests/hemera_udf/ens/__init__.py b/tests/hemera_udf/ens/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/indexer/tests/ens/test_namehash.py b/tests/hemera_udf/ens/test_namehash.py
similarity index 76%
rename from indexer/tests/ens/test_namehash.py
rename to tests/hemera_udf/ens/test_namehash.py
index bd3dfc7b0..06fa9db0f 100644
--- a/indexer/tests/ens/test_namehash.py
+++ b/tests/hemera_udf/ens/test_namehash.py
@@ -8,15 +8,15 @@
import pytest
-from indexer.controller.scheduler.job_scheduler import JobScheduler
-from indexer.domain.log import Log
-from indexer.domain.transaction import Transaction
-from indexer.exporters.console_item_exporter import ConsoleItemExporter
-from indexer.modules.custom.hemera_ens import EnsConfLoader, EnsHandler
-from indexer.modules.custom.hemera_ens.ens_hash import compute_node_label, get_label, namehash
-from indexer.tests import ETHEREUM_PUBLIC_NODE_RPC_URL
-from indexer.utils.provider import get_provider_from_uri
-from indexer.utils.thread_local_proxy import ThreadLocalProxy
+from hemera.indexer.controller.scheduler.job_scheduler import JobScheduler
+from hemera.indexer.domains.log import Log
+from hemera.indexer.domains.transaction import Transaction
+from hemera.indexer.exporters.console_item_exporter import ConsoleItemExporter
+from hemera.indexer.utils.provider import get_provider_from_uri
+from hemera.indexer.utils.thread_local_proxy import ThreadLocalProxy
+from hemera_udf.hemera_ens.ens_handler import EnsConfLoader, EnsHandler
+from hemera_udf.hemera_ens.ens_hash import compute_node_label, get_label, namehash
+from tests_commons import ETHEREUM_PUBLIC_NODE_RPC_URL
@pytest.mark.indexer
@@ -49,7 +49,6 @@ def test_mirgate_names():
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(ETHEREUM_PUBLIC_NODE_RPC_URL, batch=True)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=100,
debug_batch_size=1,
max_workers=5,
diff --git a/tests/hemera_udf/etherfi/__init__.py b/tests/hemera_udf/etherfi/__init__.py
new file mode 100644
index 000000000..a14c2df9b
--- /dev/null
+++ b/tests/hemera_udf/etherfi/__init__.py
@@ -0,0 +1,6 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# @Time 2024/12/10 18:26
+# @Author will
+# @File __init__.py.py
+# @Brief
diff --git a/indexer/tests/custom_jobs/test_export_etherfi_share.py b/tests/hemera_udf/etherfi/test_export_etherfi_share.py
similarity index 72%
rename from indexer/tests/custom_jobs/test_export_etherfi_share.py
rename to tests/hemera_udf/etherfi/test_export_etherfi_share.py
index 0fdb5f548..d1bc1506c 100644
--- a/indexer/tests/custom_jobs/test_export_etherfi_share.py
+++ b/tests/hemera_udf/etherfi/test_export_etherfi_share.py
@@ -1,15 +1,11 @@
import pytest
-from indexer.controller.scheduler.job_scheduler import JobScheduler
-from indexer.exporters.console_item_exporter import ConsoleItemExporter
-from indexer.modules.custom.etherfi.domains.eeth import (
- EtherFiPositionValuesD,
- EtherFiShareBalanceCurrentD,
- EtherFiShareBalanceD,
-)
-from indexer.tests import ETHEREUM_PUBLIC_NODE_RPC_URL
-from indexer.utils.provider import get_provider_from_uri
-from indexer.utils.thread_local_proxy import ThreadLocalProxy
+from hemera.indexer.controller.scheduler.job_scheduler import JobScheduler
+from hemera.indexer.exporters.console_item_exporter import ConsoleItemExporter
+from hemera.indexer.utils.provider import get_provider_from_uri
+from hemera.indexer.utils.thread_local_proxy import ThreadLocalProxy
+from hemera_udf.etherfi.domains import EtherFiPositionValuesD, EtherFiShareBalanceCurrentD, EtherFiShareBalanceD
+from tests_commons import ETHEREUM_PUBLIC_NODE_RPC_URL
@pytest.mark.indexer
@@ -21,7 +17,6 @@ def test_export_etherfi_share_job():
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(ETHEREUM_PUBLIC_NODE_RPC_URL, batch=True)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=100,
debug_batch_size=1,
max_workers=5,
diff --git a/tests/hemera_udf/lido/__init__.py b/tests/hemera_udf/lido/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/indexer/tests/custom_jobs/test_export_lido_share.py b/tests/hemera_udf/lido/test_export_lido_share.py
similarity index 70%
rename from indexer/tests/custom_jobs/test_export_lido_share.py
rename to tests/hemera_udf/lido/test_export_lido_share.py
index 9373b47e8..9c6c4cf4e 100644
--- a/indexer/tests/custom_jobs/test_export_lido_share.py
+++ b/tests/hemera_udf/lido/test_export_lido_share.py
@@ -1,11 +1,11 @@
import pytest
-from indexer.controller.scheduler.job_scheduler import JobScheduler
-from indexer.exporters.console_item_exporter import ConsoleItemExporter
-from indexer.modules.custom.lido.domains.seth import LidoPositionValuesD, LidoShareBalanceCurrentD, LidoShareBalanceD
-from indexer.tests import ETHEREUM_PUBLIC_NODE_RPC_URL
-from indexer.utils.provider import get_provider_from_uri
-from indexer.utils.thread_local_proxy import ThreadLocalProxy
+from hemera.indexer.controller.scheduler.job_scheduler import JobScheduler
+from hemera.indexer.exporters.console_item_exporter import ConsoleItemExporter
+from hemera.indexer.utils.provider import get_provider_from_uri
+from hemera.indexer.utils.thread_local_proxy import ThreadLocalProxy
+from hemera_udf.lido.domains import LidoPositionValuesD, LidoShareBalanceCurrentD, LidoShareBalanceD
+from tests_commons import ETHEREUM_PUBLIC_NODE_RPC_URL
@pytest.mark.indexer
@@ -17,7 +17,6 @@ def test_export_lido_share_job():
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(ETHEREUM_PUBLIC_NODE_RPC_URL, batch=True)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=100,
debug_batch_size=1,
max_workers=5,
diff --git a/tests/hemera_udf/meme_agent/__init__.py b/tests/hemera_udf/meme_agent/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/tests/hemera_udf/meme_agent/test_export_meme_agent.py b/tests/hemera_udf/meme_agent/test_export_meme_agent.py
new file mode 100644
index 000000000..d0d6871ab
--- /dev/null
+++ b/tests/hemera_udf/meme_agent/test_export_meme_agent.py
@@ -0,0 +1,98 @@
+import pytest
+
+from hemera.indexer.controller.scheduler.job_scheduler import JobScheduler
+from hemera.indexer.exporters.console_item_exporter import ConsoleItemExporter
+from hemera.indexer.utils.provider import get_provider_from_uri
+from hemera.indexer.utils.thread_local_proxy import ThreadLocalProxy
+from hemera_udf.meme_agent.domains.clanker import ClankerCreatedTokenD
+from hemera_udf.meme_agent.domains.larry import LarryCreatedTokenD
+from hemera_udf.meme_agent.domains.virtuals import VirtualsCreatedTokenD
+from tests_commons import BASE_PUBLIC_NODE_RPC_URL
+
+config = {
+ "export_meme_token_created_job": {
+ "clanker_factory_address_v0": "0x250c9FB2b411B48273f69879007803790A6AeA47",
+ "clanker_factory_address_v1": "0x9b84fce5dcd9a38d2d01d5d72373f6b6b067c3e1",
+ "virtuals_factory_address_v0": "0x41a0f5b16b10748d594b471850bd7488f929beba",
+ "virtuals_factory_address_v1": "0x94Bf9622348Cf5598D9A491Fa809194Cf85A0D61",
+ "larry_factory_address": [
+ "0x5faAb5D52790916ed9c2C159960006151e311bA0",
+ "0xb3a720f17902b7d2e8c38c5044c3b20e8ac9c27c",
+ ],
+ }
+}
+
+
+@pytest.mark.indexer
+@pytest.mark.indexer_exporter
+@pytest.mark.serial
+def test_export_lanker_v1_job():
+ job_scheduler = JobScheduler(
+ batch_web3_provider=ThreadLocalProxy(lambda: get_provider_from_uri(BASE_PUBLIC_NODE_RPC_URL, batch=True)),
+ batch_web3_debug_provider=ThreadLocalProxy(lambda: get_provider_from_uri(BASE_PUBLIC_NODE_RPC_URL, batch=True)),
+ batch_size=100,
+ debug_batch_size=1,
+ max_workers=5,
+ config=config,
+ required_output_types=[ClankerCreatedTokenD],
+ )
+
+ job_scheduler.run_jobs(
+ start_block=23603785,
+ end_block=23603786,
+ )
+
+ data_buff = job_scheduler.get_data_buff()
+
+ token = data_buff[ClankerCreatedTokenD.type()]
+ assert len(token) == 1
+
+
+@pytest.mark.indexer
+@pytest.mark.indexer_exporter
+@pytest.mark.serial
+def test_export_virtuals_v1_job():
+ job_scheduler = JobScheduler(
+ batch_web3_provider=ThreadLocalProxy(lambda: get_provider_from_uri(BASE_PUBLIC_NODE_RPC_URL, batch=True)),
+ batch_web3_debug_provider=ThreadLocalProxy(lambda: get_provider_from_uri(BASE_PUBLIC_NODE_RPC_URL, batch=True)),
+ batch_size=100,
+ debug_batch_size=1,
+ max_workers=5,
+ config=config,
+ required_output_types=[VirtualsCreatedTokenD],
+ )
+
+ job_scheduler.run_jobs(
+ start_block=23268136,
+ end_block=23268137,
+ )
+
+ data_buff = job_scheduler.get_data_buff()
+
+ token = data_buff[VirtualsCreatedTokenD.type()]
+ assert len(token) == 1
+
+
+@pytest.mark.indexer
+@pytest.mark.indexer_exporter
+@pytest.mark.serial
+def test_export_larry_job():
+ job_scheduler = JobScheduler(
+ batch_web3_provider=ThreadLocalProxy(lambda: get_provider_from_uri(BASE_PUBLIC_NODE_RPC_URL, batch=True)),
+ batch_web3_debug_provider=ThreadLocalProxy(lambda: get_provider_from_uri(BASE_PUBLIC_NODE_RPC_URL, batch=True)),
+ batch_size=100,
+ debug_batch_size=1,
+ max_workers=5,
+ config=config,
+ required_output_types=[LarryCreatedTokenD],
+ )
+
+ job_scheduler.run_jobs(
+ start_block=23526723,
+ end_block=23526723,
+ )
+
+ data_buff = job_scheduler.get_data_buff()
+
+ token = data_buff[LarryCreatedTokenD.type()]
+ assert len(token) == 1
diff --git a/tests/hemera_udf/user_ops/__init__.py b/tests/hemera_udf/user_ops/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/indexer/tests/user_ops/test_export_user_ops_job.py b/tests/hemera_udf/user_ops/test_export_user_ops_job.py
similarity index 63%
rename from indexer/tests/user_ops/test_export_user_ops_job.py
rename to tests/hemera_udf/user_ops/test_export_user_ops_job.py
index 79ab8c9ac..3b1fab1e7 100644
--- a/indexer/tests/user_ops/test_export_user_ops_job.py
+++ b/tests/hemera_udf/user_ops/test_export_user_ops_job.py
@@ -1,11 +1,11 @@
import pytest
-from indexer.controller.scheduler.job_scheduler import JobScheduler
-from indexer.exporters.console_item_exporter import ConsoleItemExporter
-from indexer.modules.user_ops.domain.user_operations import UserOperationsResult
-from indexer.tests import CYBER_PUBLIC_NODE_RPC_URL
-from indexer.utils.provider import get_provider_from_uri
-from indexer.utils.thread_local_proxy import ThreadLocalProxy
+from hemera.indexer.controller.scheduler.job_scheduler import JobScheduler
+from hemera.indexer.exporters.console_item_exporter import ConsoleItemExporter
+from hemera.indexer.utils.provider import get_provider_from_uri
+from hemera.indexer.utils.thread_local_proxy import ThreadLocalProxy
+from hemera_udf.user_ops.domains import UserOperationsResult
+from tests_commons import CYBER_PUBLIC_NODE_RPC_URL
@pytest.mark.indexer
@@ -17,7 +17,6 @@ def test_export_job():
batch_web3_debug_provider=ThreadLocalProxy(
lambda: get_provider_from_uri(CYBER_PUBLIC_NODE_RPC_URL, batch=True)
),
- item_exporters=[ConsoleItemExporter()],
batch_size=100,
debug_batch_size=1,
max_workers=5,
diff --git a/indexer/tests/__init__.py b/tests_commons/__init__.py
similarity index 93%
rename from indexer/tests/__init__.py
rename to tests_commons/__init__.py
index 7806af9f9..b56651765 100644
--- a/indexer/tests/__init__.py
+++ b/tests_commons/__init__.py
@@ -25,3 +25,5 @@
CYBER_PUBLIC_NODE_RPC_URL = os.environ.get("CYBER_PUBLIC_NODE_RPC_URL", "https://cyber-mainnet-archive.alt.technology")
+
+BASE_PUBLIC_NODE_RPC_URL = os.environ.get("BASE_PUBLIC_NODE_RPC_URL", "https://base.llamarpc.com")
diff --git a/indexer/tests/json_rpc_to_dataclass.py b/tests_commons/json_rpc_to_dataclass.py
similarity index 87%
rename from indexer/tests/json_rpc_to_dataclass.py
rename to tests_commons/json_rpc_to_dataclass.py
index 2d7cab799..85641d25f 100644
--- a/indexer/tests/json_rpc_to_dataclass.py
+++ b/tests_commons/json_rpc_to_dataclass.py
@@ -3,12 +3,12 @@
from eth_utils import to_int
-from indexer.domain.log import Log
-from indexer.domain.receipt import Receipt
-from indexer.domain.transaction import Transaction
-from indexer.utils.json_rpc_requests import generate_get_block_by_number_json_rpc, generate_get_receipt_json_rpc
-from indexer.utils.provider import BatchHTTPProvider
-from indexer.utils.rpc_utils import rpc_response_batch_to_results
+from hemera.indexer.domains.log import Log
+from hemera.indexer.domains.receipt import Receipt
+from hemera.indexer.domains.transaction import Transaction
+from hemera.indexer.utils.json_rpc_requests import generate_get_block_by_number_json_rpc, generate_get_receipt_json_rpc
+from hemera.indexer.utils.provider import BatchHTTPProvider
+from hemera.indexer.utils.rpc_utils import rpc_response_batch_to_results
def get_transaction_from_rpc(rpc: str, transaction_hash: str) -> Optional[Transaction]:
diff --git a/indexer/tests/utils.py b/tests_commons/utils.py
similarity index 100%
rename from indexer/tests/utils.py
rename to tests_commons/utils.py