diff --git a/.gitignore b/.gitignore index 565e3f375..c4b213d10 100644 --- a/.gitignore +++ b/.gitignore @@ -58,3 +58,6 @@ sync_record alembic.ini !hemera_udf/hemera_ens/abi/*.json !hemera_udf/cyber_id/abi/*.json + +# DB +*.db \ No newline at end of file diff --git a/Makefile b/Makefile index f5d63d714..ecf277834 100644 --- a/Makefile +++ b/Makefile @@ -48,14 +48,14 @@ init_db: development: @echo "Setting up development environment..." @bash -c 'set -euo pipefail; \ - PYTHON_CMD=$$(command -v python3 || command -v python); \ + PYTHON_CMD=$$(command -v python3.10 || command -v python.10); \ if [ -z "$$PYTHON_CMD" ] || ! "$$PYTHON_CMD" --version 2>&1 | grep -q "Python 3"; then \ echo "Python 3 is not found. Please install Python 3 and try again."; \ exit 1; \ fi; \ python_version=$$($$PYTHON_CMD -c "import sys; print(\"{}.{}\".format(sys.version_info.major, sys.version_info.minor))"); \ - if ! echo "$$python_version" | grep -qE "^3\.(8|9|10|11)"; then \ - echo "Python version $$python_version is not supported. Please use Python 3.8, 3.9, 3.10, or 3.11."; \ + if ! echo "$$python_version" | grep -qE "^3\.(10|11|12|13)"; then \ + echo "Python version $$python_version is not supported. Please use Python 3.10, 3.11, 3.12 or 3.13."; \ exit 1; \ fi; \ echo "Using Python: $$($$PYTHON_CMD --version)"; \ diff --git a/config/indexer-config.yaml b/config/indexer-config.yaml index 187215c7f..e9f652198 100644 --- a/config/indexer-config.yaml +++ b/config/indexer-config.yaml @@ -1,4 +1,12 @@ chain_id: 1 +uniswap_v3_job: + # empty means no filter. only work in price job + pool_address: + # works in price\token job + jobs: + - type: uniswapv3 # uniswapv3` + factory address: '0x1f98431c8ad98523631ae4a59f267346ea31f984' + position_token_address: '0xc36442b4a4522e871399cd717abdd847ab11fe88' opensea_job: seaport_1_6: contract_address: "0x0000000000000068f116a894984e2db1123eb395" diff --git a/config/morphl2-config.yaml b/config/morphl2-config.yaml new file mode 100644 index 000000000..f4c22d313 --- /dev/null +++ b/config/morphl2-config.yaml @@ -0,0 +1,4 @@ +chain_id: 1 +morph_bridge_on_l1_job: + l1_message_queue_oracle_contract_address: "0x3931ade842f5bb8763164bdd81e5361dce6cc1ef" + l1_cross_domain_messenger_contract_address: "0xdc71366effa760804dcfc3edf87fa2a6f1623304" diff --git a/hemera/api/app/address/__init__.py b/hemera/api/app/address/__init__.py deleted file mode 100644 index c72b9a720..000000000 --- a/hemera/api/app/address/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -from flask_restx.namespace import Namespace - -address_features_namespace = Namespace( - "Address Features", - path="/", - description="Address Features API", -) diff --git a/hemera/api/app/address/features.py b/hemera/api/app/address/features.py deleted file mode 100644 index dc67d2f44..000000000 --- a/hemera/api/app/address/features.py +++ /dev/null @@ -1,28 +0,0 @@ -from functools import wraps - -feature_router = {} - - -class FeatureRegistry: - def __init__(self): - self.features = {} - self.feature_list = [] - - def register(self, feature_name, subcategory): - def decorator(f): - if feature_name not in self.features: - self.features[feature_name] = {} - self.feature_list.append(feature_name) - self.features[feature_name][subcategory] = f - - @wraps(f) - def wrapper(*args, **kwargs): - return f(*args, **kwargs) - - return wrapper - - return decorator - - -feature_registry = FeatureRegistry() -register_feature = feature_registry.register diff --git a/hemera/api/app/address/models.py b/hemera/api/app/address/models.py deleted file mode 100644 index bf8d5967e..000000000 --- a/hemera/api/app/address/models.py +++ /dev/null @@ -1,43 +0,0 @@ -from sqlalchemy import Column, Integer -from sqlalchemy.dialects.postgresql import BOOLEAN, BYTEA, INTEGER, NUMERIC, TEXT, TIMESTAMP - -from hemera.common.models import HemeraModel - - -class AddressBaseProfile(HemeraModel): - __tablename__ = "af_base_profile" - - address = Column(BYTEA, primary_key=True, nullable=False) - init_funding_from_address = Column(BYTEA) - init_funding_value = Column(NUMERIC(100)) - init_funding_transaction_hash = Column(BYTEA) - init_funding_block_timestamp = Column(TIMESTAMP) - init_block_hash = Column(BYTEA) - init_block_number = Column(INTEGER) - creation_code = Column(BYTEA) - deployed_code = Column(BYTEA) - deployed_block_timestamp = Column(TIMESTAMP) - deployed_block_number = Column(INTEGER) - deployed_block_hash = Column(BYTEA) - deployed_transaction_hash = Column(BYTEA) - deployed_internal_transaction_from_address = Column(BYTEA) - deployed_transaction_from_address = Column(BYTEA) - deployed_trace_id = Column(TEXT) - is_contract = Column(BOOLEAN) - first_transaction_hash = Column(BYTEA) - first_block_hash = Column(BYTEA) - first_block_number = Column(INTEGER) - first_block_timestamp = Column(TIMESTAMP) - first_trace_id = Column(TEXT) - first_is_from_address = Column(BOOLEAN) - first_trace_type = Column(TEXT) - first_call_type = Column(TEXT) - - -class ScheduledMetadata(HemeraModel): - __tablename__ = "af_base_na_scheduled_metadata" - - id = Column(Integer, primary_key=True) - dag_id = Column(TEXT) - execution_date = Column(TIMESTAMP) - last_data_timestamp = Column(TIMESTAMP) diff --git a/hemera/api/app/address/routes.py b/hemera/api/app/address/routes.py deleted file mode 100644 index 9f79a1700..000000000 --- a/hemera/api/app/address/routes.py +++ /dev/null @@ -1,73 +0,0 @@ -import time - -from flask import request -from flask_restx import Resource - -from hemera.api.app.address import address_features_namespace -from hemera.api.app.address.features import feature_registry -from hemera.api.app.main import app - -PAGE_SIZE = 10 -MAX_TRANSACTION = 500000 -MAX_TRANSACTION_WITH_CONDITION = 10000 -MAX_INTERNAL_TRANSACTION = 10000 -MAX_TOKEN_TRANSFER = 10000 - -logger = app.logger - - -@address_features_namespace.route("/v1/aci/
/all_features") -@address_features_namespace.route("/v2/aci/
/all_features") -class ACIAllFeatures(Resource): - def get(self, address): - address = address.lower() - requested_features = request.args.get("features") - - if requested_features: - feature_list = [f for f in requested_features.split(",") if f in feature_registry.feature_list] - else: - feature_list = feature_registry.feature_list - - feature_result = {} - total_start_time = time.time() - - for feature in feature_list: - feature_start_time = time.time() - feature_result[feature] = {} - for subcategory in feature_registry.features[feature]: - subcategory_start_time = time.time() - try: - feature_result[feature][subcategory] = feature_registry.features[feature][subcategory](address) - subcategory_end_time = time.time() - logger.debug( - f"Feature '{feature}' subcategory '{subcategory}' execution time: {subcategory_end_time - subcategory_start_time:.4f} seconds" - ) - except Exception as e: - logger.error(f"Error in feature '{feature}' subcategory '{subcategory}': {str(e)}") - feature_result[feature][subcategory] = {"error": str(e)} - - feature_end_time = time.time() - logger.debug( - f"Total execution time for feature '{feature}': {feature_end_time - feature_start_time:.4f} seconds" - ) - - feature_data_list = [ - {"id": feature_id, **subcategory_dict} - for feature_id in feature_list - if ( - subcategory_dict := { - subcategory: feature_result[feature_id][subcategory] - for subcategory in feature_registry.features[feature_id] - if feature_result[feature_id][subcategory] is not None - } - ) - ] - combined_result = { - "address": address, - "features": feature_data_list, - } - - total_end_time = time.time() - logger.debug(f"Total execution time for all features: {total_end_time - total_start_time:.4f} seconds") - - return combined_result, 200 diff --git a/hemera/api/app/api.py b/hemera/api/app/api.py deleted file mode 100644 index 4b19074eb..000000000 --- a/hemera/api/app/api.py +++ /dev/null @@ -1,39 +0,0 @@ -#!/usr/bin/python3 -# -*- coding: utf-8 -*- - -from flask_restx import Api - -from hemera.api.app.address.routes import address_features_namespace -from hemera.api.app.contract.routes import contract_namespace -from hemera.api.app.explorer.routes import explorer_namespace -from hemera.api.app.l2_explorer.routes import l2_explorer_namespace -from hemera.api.app.user_operation.routes import user_operation_namespace -from hemera_udf.address_index.endpoint.routes import address_profile_namespace - -# keep the `*`, make sure registry processed -from hemera_udf.deposit_to_l2.endpoint.routes import * -from hemera_udf.eigen_layer.endpoint.routes import * -from hemera_udf.hemera_ens.endpoint.routes import * -from hemera_udf.init_capital.endpoints.routes import * -from hemera_udf.merchant_moe.endpoints.routes import merchant_moe_namespace -from hemera_udf.opensea.endpoint.routes import * -from hemera_udf.staking_fbtc.endpoints.routes import staking_namespace -from hemera_udf.uniswap_v3.endpoints.routes import * - -api = Api() - -api.add_namespace(explorer_namespace) -api.add_namespace(opensea_namespace) -api.add_namespace(contract_namespace) -api.add_namespace(uniswap_v3_namespace) -api.add_namespace(token_deposit_namespace) -api.add_namespace(user_operation_namespace) -api.add_namespace(staking_namespace) -api.add_namespace(merchant_moe_namespace) - -api.add_namespace(l2_explorer_namespace) -api.add_namespace(af_ens_namespace) -api.add_namespace(address_profile_namespace) - -api.add_namespace(address_features_namespace) -api.add_namespace(init_capital_namespace) diff --git a/hemera/api/app/cache.py b/hemera/api/app/cache.py deleted file mode 100644 index 8dea03404..000000000 --- a/hemera/api/app/cache.py +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/python3 -# -*- coding: utf-8 -*- - -import redis -from flask_caching import Cache - -from hemera.common.utils.config import get_config - -app_config = get_config() -# Use cache -cache = Cache() - - -class RedisDb: - def __init__(self, host, cache_type=""): - if cache_type == "RedisClusterCache": - self.enable_cache = True - self.r = redis.RedisCluster( - host=host, - port=6379, - ssl=True, - ) - elif cache_type == "RedisCache": - self.enable_cache = True - self.r = redis.Redis( - host=host, - port=6379, - ssl=True, - decode_responses=True, - ) - else: - self.enable_cache = False - self.r = None - - def handle_redis_token(self, key, value=None): - if value: - self.r.set(key, value, ex=3600) - else: - redis_token = self.r.get(key) - return redis_token - - def set_init_value(self, key, value): - self.r.set(key, value) - - def get_next_increment_value(self, key): - return self.r.incr(key) - - -redis_db = RedisDb("127.0.0.1", "local") diff --git a/hemera/api/app/config.py b/hemera/api/app/config.py deleted file mode 100644 index 0065afe7b..000000000 --- a/hemera/api/app/config.py +++ /dev/null @@ -1,278 +0,0 @@ -import os -from configparser import ConfigParser -from dataclasses import dataclass, field -from enum import Enum, auto -from urllib.parse import urlparse - -from dataclass_wizard import YAMLWizard - - -@dataclass -class APIUrls(YAMLWizard): - w3w_api_url: str = field(default="https://api.w3w.ai") - - -@dataclass -class TokenConfiguration(YAMLWizard): - dashboard_token: str = field(default="ETH") - native_token: str = field(default="ETH") - gas_fee_token: str = field(default="ETH") - - -@dataclass -class CacheConfig(YAMLWizard): - cache_type: str = field(default=None) - cache_redis_host: str = field(default="127.0.0.1") - cache_key_prefix: str = field(default="socialscan_api_ut") - - def get_cache_config(self, redis_db): - if self.cache_type == "RedisCache": - return { - "CACHE_TYPE": "RedisCache", - "CACHE_REDIS_HOST": redis_db.r, - "CACHE_KEY_PREFIX": self.cache_key_prefix, - } - elif self.cache_type == "RedisClusterCache": - return { - "CACHE_TYPE": "RedisCache", - "CACHE_REDIS_HOST": redis_db.r, - "CACHE_KEY_PREFIX": self.cache_key_prefix, - } - else: - return { - "CACHE_TYPE": "SimpleCache", - "DEBUG": True, - } - - -def get_env_or_set_default(env_var, default_var): - if not os.getenv(env_var): - if os.getenv(default_var): - os.environ[env_var] = os.getenv(default_var) - return os.getenv(env_var) - - -@dataclass -class OpRollupDAConfig(YAMLWizard): - da_type: str = field(default=None) - plasma_api_endpoint: str = field(default=None) - blob_scan_endpoint: str = field(default=None) - - -@dataclass -class L2Config(YAMLWizard): - rollup_type: str = field(default=None) - bridge_compatible: bool = field(default=False) - withdrawal_expired_day: int = field(default=0) - da_config: OpRollupDAConfig = field(default_factory=OpRollupDAConfig) - - -@dataclass -class DatabaseConfig(YAMLWizard): - host: str = field(default=None) - port: int = field(default=None) - username: str = field(default=None) - password: str = field(default=None) - database: str = field(default=None) - schema: str = field(default="public") - - def get_sql_alchemy_uri(self): - user = self.username - password = self.password - host = self.host - port = self.port - database = self.database - - return f"postgresql+psycopg2://{user}:{password}@{host}:{port}/{database}" - - @staticmethod - def load_database_config_from_url(url): - result = urlparse(url) - return DatabaseConfig( - host=result.hostname, - port=result.port, - username=result.username, - password=result.password, - database=result.path[1:], # Remove leading '/' - ) - - -class APIModule(Enum): - SOCIALSCAN = auto() - EXPLORER = auto() - L2_EXPLORER = auto() - STATUS = auto() - DEVELOPER = auto() - CONTRACT = auto() - INSCRIPTION = auto() - CELESTIA = auto() - USER_OPERATION = auto() - - -def parse_enum_list(enum_string): - names = enum_string.split(",") - enum_list = [APIModule[name.strip()] for name in names if name.strip() in APIModule.__members__] - return enum_list - - -@dataclass -class AppConfig(YAMLWizard): - api_modules: list = field( - default_factory=lambda: [ - APIModule.EXPLORER, - APIModule.DEVELOPER, - APIModule.CONTRACT, - APIModule.L2_EXPLORER, - APIModule.USER_OPERATION, - ] - ) - env: str = field(default=None) - chain: str = field(default=None) - ens_service: str = field(default=None) - contract_service: str = field(default=None) - token_service: str = field(default=None) - feature_flags: dict = field(default_factory=dict) - l2_config: L2Config = field(default_factory=L2Config) - cache_config: CacheConfig = field(default_factory=CacheConfig) - sql_alchemy_engine_options: dict = field(default_factory=dict) - db_read_sql_alchemy_database_config: DatabaseConfig = field(default_factory=DatabaseConfig) - db_write_sql_alchemy_database_config: DatabaseConfig = field(default_factory=DatabaseConfig) - db_common_sql_alchemy_database_config: DatabaseConfig = field(default_factory=DatabaseConfig) - sql_alchemy_database_engine_options: dict = field( - default_factory=lambda: { - "pool_size": 100, - "max_overflow": 100, - } - ) - extra_config: dict = field(default_factory=dict) - rpc: str = field(default="https://ethereum.publicnode.com") - debug_rpc: str = field(default="https://ethereum.publicnode.com") - token_configuration: TokenConfiguration = field(default_factory=TokenConfiguration) - api_urls: APIUrls = field(default_factory=APIUrls) - - def update_from_env(self): - self.env = os.getenv("", self.env) - self.chain = os.getenv("CHAIN", self.chain) - self.ens_service = os.getenv("ENS_SERVICE", self.ens_service) - self.contract_service = os.getenv("CONTRACT_SERVICE", self.contract_service) - self.token_service = os.getenv("TOKEN_SERVICE", self.token_service) - self.sql_alchemy_database_engine_options["pool_size"] = int( - os.getenv("SQL_POOL_SIZE", self.sql_alchemy_database_engine_options.get("pool_size", 100)) - ) - self.sql_alchemy_database_engine_options["max_overflow"] = int( - os.getenv("SQL_MAX_OVERFLOW", self.sql_alchemy_database_engine_options.get("max_overflow", 100)) - ) - self.rpc = os.getenv("PROVIDER_URI", self.rpc) - self.debug_rpc = os.getenv("DEBUG_PROVIDER_URI", self.debug_rpc) - - read_url = get_env_or_set_default("READ_POSTGRES_URL", "POSTGRES_URL") - write_url = get_env_or_set_default("WRITE_POSTGRES_URL", "POSTGRES_URL") - common_url = get_env_or_set_default("COMMON_POSTGRES_URL", "POSTGRES_URL") - - self.db_read_sql_alchemy_database_config = ( - DatabaseConfig.load_database_config_from_url( - read_url or self.db_read_sql_alchemy_database_config.get_sql_alchemy_uri() - ) - if read_url - else self.db_read_sql_alchemy_database_config - ) - self.db_write_sql_alchemy_database_config = ( - DatabaseConfig.load_database_config_from_url( - write_url or self.db_write_sql_alchemy_database_config.get_sql_alchemy_uri() - ) - if write_url - else self.db_write_sql_alchemy_database_config - ) - self.db_common_sql_alchemy_database_config = ( - DatabaseConfig.load_database_config_from_url( - common_url or self.db_common_sql_alchemy_database_config.get_sql_alchemy_uri() - ) - if common_url - else self.db_common_sql_alchemy_database_config - ) - - if os.getenv("CACHE_TYPE"): - self.cache_config.cache_type = os.getenv("CACHE_TYPE", self.cache_config.cache_type) - self.cache_config.cache_redis_host = os.getenv("REDIS_HOST", self.cache_config.cache_redis_host) - - if os.getenv("DASHBOARD_TOKEN"): - self.token_configuration.dashboard_token = os.getenv( - "DASHBOARD_TOKEN", self.token_configuration.dashboard_token - ) - if os.getenv("NATIVE_TOKEN"): - self.token_configuration.native_token = os.getenv("NATIVE_TOKEN", self.token_configuration.native_token) - if os.getenv("GAS_FEE_TOKEN"): - self.token_configuration.gas_fee_token = os.getenv("GAS_FEE_TOKEN", self.token_configuration.gas_fee_token) - - if os.getenv("ROLLUP_TYPE"): - self.l2_config.rollup_type = os.getenv("ROLLUP_TYPE", self.l2_config.rollup_type) - - def get_onchain_badge_config(self): - if "on_chain_badge" in self.extra_config: - return self.extra_config["ONCHAIN_BADGE_INFO"] - - @staticmethod - def load_from_yaml_file(filename: str): - return AppConfig.from_yaml_file(filename) - - @staticmethod - def load_from_config_file(config: ConfigParser): - db_creds = config["DB_CREDS"] - block_chain = config["BLOCK_CHAIN"] - settings = config["SETTINGS"] - api_urls = config["API_URLS"] - remote = config["REMOTE_SERVICE"] - extra_config = {} - - config_data = { - "chain": block_chain["CHAIN"] or None, - "extra_config": extra_config, - "ens_service": remote.get("ENS_SERVICE") or None, - "contract_service": remote.get("CONTRACT_SERVICE") or None, - "token_service": remote.get("TOKEN_SERVICE") or None, - "cache_config": CacheConfig( - cache_type=settings.get("CACHE_TYPE") or "local", - cache_redis_host=db_creds.get("REDIS_HOST") or "localhost", - cache_key_prefix=f"socialscan_api", - ), - "db_read_sql_alchemy_database_config": DatabaseConfig( - host=db_creds.get("HOST", "localhost"), - port=int(db_creds.get("PORT", "5432")), - username=db_creds.get("USER", "admin"), - password=db_creds.get("PASSWD", "admin"), - database=db_creds["DB_NAME"] or "default", - ), - "db_write_sql_alchemy_database_config": DatabaseConfig( - host=db_creds["WRITE_HOST"], - port=int(db_creds.get("PORT", "5432")), - username=db_creds.get("USER", "admin"), - password=db_creds.get("PASSWD", "admin"), - database=db_creds["DB_NAME"] or "default", - ), - "db_common_sql_alchemy_database_config": DatabaseConfig( - host=db_creds["COMMON_HOST"], - port=int(db_creds.get("PORT", "5432")), - username=db_creds.get("USER", "admin"), - password=db_creds.get("PASSWD", "admin"), - database=db_creds["DB_NAME"], - ), - "api_modules": ( - parse_enum_list(settings.get("API_MODULES") or "EXPLORER,DEVELOPER,CONTRACT,L2_EXPLORER") - if block_chain.get("ROLLUP_TYPE") - else parse_enum_list(settings.get("API_MODULES") or "EXPLORER,DEVELOPER,CONTRACT") - ), - "rpc": block_chain.get("RPC_ENDPOINT") or "https://ethereum.publicnode.com", - "debug_rpc": block_chain.get("DEBUG_RPC_ENDPOINT") or "https://ethereum.publicnode.com", - "token_configuration": TokenConfiguration( - dashboard_token=block_chain.get("DASHBOARD_TOKEN") or "ETH", - native_token=block_chain.get("NATIVE_TOKEN") or "ETH", - gas_fee_token=block_chain.get("GAS_FEE_TOKEN") or "ETH", - ), - "api_urls": APIUrls( - w3w_api_url=api_urls.get("W3W_API_HOST") or "https://api.w3w.ai", - ), - } - if "ONCHAIN_BADGE" in config: - config_data["extra_config"].update(config["ONCHAIN_BADGE"]) - - return AppConfig(**config_data) diff --git a/hemera/api/app/contract/__init__.py b/hemera/api/app/contract/__init__.py deleted file mode 100644 index f605d131b..000000000 --- a/hemera/api/app/contract/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -from flask_restx.namespace import Namespace - -contract_namespace = Namespace( - "Explorer Contract Parser", - path="/", - description="Explorer Contract Parser API", -) diff --git a/hemera/api/app/contract/contract_verify.py b/hemera/api/app/contract/contract_verify.py deleted file mode 100644 index b1223cc20..000000000 --- a/hemera/api/app/contract/contract_verify.py +++ /dev/null @@ -1,331 +0,0 @@ -import hashlib -from typing import List, Tuple - -import requests - -from hemera.api.app.utils.web3_utils import get_code, get_storage_at, w3 -from hemera.common.models import db -from hemera.common.models.contracts import Contracts -from hemera.common.utils.config import get_config -from hemera.common.utils.exception_control import APIError -from hemera.common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes - -config = get_config() - -VERIFY_HOST = config.contract_service or "" -VERIFY_SERVICE_VALIDATION = VERIFY_HOST is not None and VERIFY_HOST != "" - -NORMAL_TIMEOUT = 0.5 -VERIFY_TIMEOUT = 30 - -CONTRACT_VERIFY_URL = f"{VERIFY_HOST}/v1/contract_verify/sync_verify" -COMMON_CONTRACT_VERIFY_URL = f"{VERIFY_HOST}/v1/contract_verify/async_verify" -ABI_HOST = f"{VERIFY_HOST}/v1/contract_verify/method" - - -def initial_chain_id(): - try: - CHAIN_ID = config.chain_id - except AttributeError: - - CHAIN_ID = w3.eth.chain_id - return CHAIN_ID - - -CHAIN_ID = initial_chain_id() - - -class MockResponse: - def __init__(self, text, status_code): - self.text = text - self.status_code = status_code - - -def get_sha256_hash(input_string): - sha256 = hashlib.sha256() - sha256.update(input_string.encode("utf-8")) - return sha256.hexdigest() - - -def get_json_response_from_contract_verify_service(endpoint): - if not VERIFY_SERVICE_VALIDATION: - return [] - - request_url = f"{VERIFY_HOST}{endpoint}" - try: - response = requests.get(request_url, timeout=NORMAL_TIMEOUT) - if response.status_code == 200: - return response.json() - else: - return [] - except Exception: - return [] - - -# ========================== -# 1. verify function -# ========================== - - -def validate_input(address, compiler_type, compiler_version): - if not address or not compiler_type or not compiler_version: - raise APIError("Missing base required data", code=400) - - -def get_contract_by_address(address: str): - contract = db.session().query(Contracts).filter_by(address=hex_str_to_bytes(address)).first() - if not contract: - raise APIError("The address is not a contract", code=400) - return contract - - -def check_contract_verification_status(contract): - if contract.is_verified: - raise APIError("This contract is already verified", code=400) - - -def get_creation_or_deployed_code(contract: Contracts): - creation_code = None - deployed_code = None - if contract.creation_code: - creation_code = bytes_to_hex_str(contract.creation_code) - if contract.deployed_code: - deployed_code = bytes_to_hex_str(contract.deployed_code) - - if not creation_code: - creation_code = contract.bytecode - if not deployed_code: - deployed_code = get_code(contract.address) - return creation_code, deployed_code - - -def send_sync_verification_request(payload, file_list): - if not VERIFY_SERVICE_VALIDATION: - return MockResponse("No valid verify service is set", 400) - - payload["chain_id"] = CHAIN_ID - files = [("files", (file.filename, file.read(), "application/octet-stream")) for file in file_list] - try: - return requests.post(CONTRACT_VERIFY_URL, data=payload, files=files, timeout=VERIFY_TIMEOUT) - except Exception as e: - return MockResponse(str(e), 400) - - -def send_async_verification_request(payload): - if not VERIFY_SERVICE_VALIDATION: - return MockResponse("No valid verify service is set", 400) - - payload["chain_id"] = CHAIN_ID - compiler_type = payload["compiler_type"] - files = [] - if compiler_type == "solidity-standard-json-input": - payload["compiler_type"] = "Solidity (Standard-Json-Input)" - files = [("files", (payload["address"] + ".json", payload["input_str"], "application/octet-stream"))] - - elif compiler_type == "solidity-single-file": - payload["compiler_type"] = "Solidity (Single file)" - try: - return requests.post(COMMON_CONTRACT_VERIFY_URL, data=payload, files=files, timeout=VERIFY_TIMEOUT) - except Exception as e: - return MockResponse(str(e), 400) - - -def command_normal_contract_data(module, action, address, guid): - if not VERIFY_SERVICE_VALIDATION: - return {"message": "No valid verify service is set", "status": "0"}, 200 - - if module != "contract": - return {"message": "The parameter is error", "status": "0"}, 200 - if action == "getabi": - if not address: - return {"message": "the address is must input", "status": "0"}, 200 - address = address.lower() - contracts = get_contract_verification_abi_by_address(address) - if not contracts: - return { - "message": "Contract source code not verified", - "status": "0", - }, 200 - else: - return { - "message": "OK", - "status": "1", - "result": contracts.get("abi"), - }, 200 - elif action == "checkverifystatus": - if not guid: - return {"message": "the guid is must input", "status": "0"}, 200 - history = get_check_verified_status(guid) - if not history: - return {"message": "the guid is error", "status": "0"}, 200 - if history["status"] == "SUCCESS": - return { - "message": "OK", - "result": "Pass - Verified", - "status": "1", - }, 200 - elif history["status"] == "FAILED": - return { - "message": "NOK", - "result": "Fail - Unable to verify", - "status": "0", - }, 200 - else: - return { - "message": "NOK", - "result": "Unknown UID", - "status": "0", - }, 200 - return {"message": "the action is error", "status": "0"}, 200 - - -def get_solidity_version(): - return get_json_response_from_contract_verify_service("/v1/contract_verify/solidity_versions") - - -def get_vyper_version(): - return get_json_response_from_contract_verify_service("/v1/contract_verify/vyper_versions") - - -def get_evm_versions(): - return get_json_response_from_contract_verify_service("/v1/contract_verify/evm_versions") - - -def get_explorer_license_type(): - return get_json_response_from_contract_verify_service("/v1/contract_verify/license_types") - - -# ========================== -# 2. get info from a contract -# ========================== - - -def get_contract_code_by_address(address): - endpoint = f"/v1/contract_verify/{CHAIN_ID}/{address}/code" - return get_json_response_from_contract_verify_service(endpoint) - - -def get_similar_addresses(deployed_code_hash): - endpoint = f"/v1/contract_verify/similar_address/{CHAIN_ID}/{deployed_code_hash}" - return get_json_response_from_contract_verify_service(endpoint) - - -def get_abi_by_chain_id_address(address): - endpoint = f"/v1/contract_verify/contract_abi/{CHAIN_ID}/{address}" - return get_json_response_from_contract_verify_service(endpoint) - - -def get_contract_verification_abi_by_address(address): - endpoint = f"/v1/contract_verify/contract_verification_abi/{CHAIN_ID}/{address}" - return get_json_response_from_contract_verify_service(endpoint) - - -def get_check_verified_status(guid): - endpoint = f"/v1/contract_verify/get_verified_status/{CHAIN_ID}/{guid}" - return get_json_response_from_contract_verify_service(endpoint) - - -def get_contract_verification_history_by_address(address): - endpoint = f"/v1/contract_verify/get_verification_history/{CHAIN_ID}/{address}" - return get_json_response_from_contract_verify_service(endpoint) - - -def get_implementation_contract(address): - implementation_address = None - for code in [ - "0x360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc", - "0x7050c9e0f4ca769c69bd3a8ef740bc37934f8e2c036e5a723fd8ee048ed3f8c3", - "0xc5f16f0fcc639fa48a6947836d9850f504798523bf8c9a3a87d5876cf622bcf7", - "0x5f3b5dfeb7b28cdbd7faba78963ee202a494e2a2cc8c9978d5e30d2aebb8c197", - # add 5st - ]: - contract_address = get_storage_at(address, code) - if contract_address and contract_address != "0x0000000000000000000000000000000000000000": - implementation_address = contract_address - return implementation_address - - -# ========================== -# 3. get method/log/contract name for a contract -# ========================== -def get_contract_names(address_list: list[str]) -> dict[str:str]: - if not VERIFY_SERVICE_VALIDATION: - return [] - - CONTRACT_NAME_URL = f"{VERIFY_HOST}/v1/contract_verify/get_contract_name" - request_json = { - "chain_id": CHAIN_ID, - "address_list": address_list, - } - try: - response = requests.post(CONTRACT_NAME_URL, json=request_json, timeout=NORMAL_TIMEOUT) - if response.status_code == 200: - return response.json() - else: - return [] - except Exception: - return [] - - -def get_names_from_method_or_topic_list(method_list): - if not VERIFY_SERVICE_VALIDATION: - return [] - - request_json = {"request_type": 0, "method_list": method_list} - try: - response = requests.post(url=ABI_HOST, json=request_json, timeout=NORMAL_TIMEOUT) - if response.status_code == 200: - return response.json() - return [] - except Exception: - return [] - - -def get_abis_for_method(address_signed_prefix_list: List[Tuple[str, str]]): - if not VERIFY_SERVICE_VALIDATION: - return {} - enrich_address_signed_prefix_list = [(l[0], l[1], 0) for l in address_signed_prefix_list] - return get_abis_by_address_signed_prefix(enrich_address_signed_prefix_list) - - -def get_abis_for_logs(address_signed_prefix_list: List[Tuple[str, str, int]]): - if not VERIFY_SERVICE_VALIDATION: - return {} - return get_abis_by_address_signed_prefix(address_signed_prefix_list) - - -def get_abis_by_address_signed_prefix(address_signed_prefix_list: List[Tuple[str, str, int]]): - result_list = [] - for address, signed_prefix, indexed_true_count in address_signed_prefix_list: - contract = db.session.get(Contracts, hex_str_to_bytes(address)) - if not contract: - continue - deployed_code_hash = contract.deployed_code_hash - - if contract.is_proxy: - if not contract.implementation_contract: - implementation_contract_address = get_implementation_contract(address) - contract.implementation_contract = implementation_contract_address - db.session.commit() - else: - implementation_contract_address = contract.implementation_contract - implementation_contract = db.session.query(Contracts).get(implementation_contract_address) - if implementation_contract: - implementation_deployed_hash = implementation_contract.deployed_code_hash - result_list.append( - (1, indexed_true_count, address, (deployed_code_hash, implementation_deployed_hash), signed_prefix) - ) - else: - result_list.append((0, indexed_true_count, address, deployed_code_hash, signed_prefix)) - else: - result_list.append((0, indexed_true_count, address, deployed_code_hash, signed_prefix)) - - request_json = {"request_type": 1, "request_list": result_list} - - try: - response = requests.post(url=ABI_HOST, json=request_json, timeout=NORMAL_TIMEOUT) - if response.status_code == 200: - return {(address, topic0): result_map for address, topic0, result_map in response.json()} - return {} - except Exception: - return {} diff --git a/hemera/api/app/contract/routes.py b/hemera/api/app/contract/routes.py deleted file mode 100644 index 1d94c3941..000000000 --- a/hemera/api/app/contract/routes.py +++ /dev/null @@ -1,319 +0,0 @@ -import flask -from flask_restx import Resource - -from hemera.api.app.cache import cache -from hemera.api.app.contract import contract_namespace -from hemera.api.app.contract.contract_verify import ( - check_contract_verification_status, - command_normal_contract_data, - get_abi_by_chain_id_address, - get_contract_by_address, - get_contract_code_by_address, - get_creation_or_deployed_code, - get_evm_versions, - get_explorer_license_type, - get_implementation_contract, - get_solidity_version, - get_vyper_version, - send_async_verification_request, - send_sync_verification_request, - validate_input, -) -from hemera.api.app.limiter import limiter -from hemera.common.models import db as postgres_db -from hemera.common.models.contracts import Contracts -from hemera.common.utils.exception_control import APIError -from hemera.common.utils.format_utils import as_dict, hex_str_to_bytes -from hemera.common.utils.web3_utils import ZERO_ADDRESS - - -@contract_namespace.route("/v1/explorer/verify_contract/verify") -@contract_namespace.route("/v2/explorer/verify_contract/verify") -class ExplorerVerifyContract(Resource): - def post(_): - request_form = flask.request.form - address = request_form.get("address", "").lower() - compiler_type = request_form.get("compiler_type") - compiler_version = request_form.get("compiler_version") - evm_version = request_form.get("evm_version") - proxy = request_form.get("proxy") - implementation = request_form.get("implementation") - license_type = request_form.get("license_type", "None") - optimization = request_form.get("optimization") - optimization_runs = request_form.get("optimization_runs") - constructor_arguments = request_form.get("constructor_arguments") - file_list = flask.request.files.getlist("files") - input_str = request_form.get("input_str") - - validate_input(address, compiler_type, compiler_version) - - contracts = get_contract_by_address(address) - check_contract_verification_status(contracts) - - creation_code, deployed_code = get_creation_or_deployed_code(contracts) - - payload = { - "address": address, - "wallet_address": ZERO_ADDRESS, - "compiler_type": compiler_type, - "compiler_version": compiler_version, - "evm_version": evm_version, - "license_type": license_type, - "optimization": optimization, - "optimization_runs": optimization_runs, - "input_str": input_str, - "constructor_arguments": constructor_arguments, - "proxy": proxy, - "implementation": implementation, - "creation_code": creation_code, - "deployed_code": deployed_code, - } - - if compiler_type != "Solidity (Standard-Json-Input)": - libraries = request_form.get("libraries") - payload["libraries_data"] = libraries - - response = send_sync_verification_request(payload, file_list) - - if response.status_code == 200: - contracts.is_verified = True - postgres_db.session.commit() - return {"message": "Contract verified successfully"}, 200 - else: - return {"message": f"Verified contract failed: {response.text}"}, 400 - - -@contract_namespace.route("/v1/explorer/verify_contract/solidity_versions") -class ExplorerSolidityCompilerVersion(Resource): - @cache.cached(timeout=3600, query_string=True) - def get(self): - response = get_solidity_version() - if response: - compiler_versions = response.get("compiler_versions") - return {"compiler_versions": compiler_versions}, 200 - else: - raise APIError("Failed to retrieve compiler versions", code=400) - - -@contract_namespace.route("/v1/explorer/verify_contract/compiler_types") -class ExplorerCompilerType(Resource): - def get(self): - compiler_types = [ - "Solidity (Single file)", - "Solidity (Multi-Part files)", - "Solidity (Standard-Json-Input)", - "Vyper (Experimental)", - ] - return {"compiler_types": compiler_types}, 200 - - -@contract_namespace.route("/v1/explorer/verify_contract/evm_versions") -class ExplorerEvmVersions(Resource): - @cache.cached(timeout=3600, query_string=True) - def get(self): - evm_versions = get_evm_versions() - if evm_versions: - return evm_versions, 200 - raise APIError("Failed to retrieve evm versions", code=400) - - -@contract_namespace.route("/v1/explorer/verify_contract/license_types") -class ExplorerLicenseType(Resource): - @cache.cached(timeout=3600, query_string=True) - def get(self): - license_type = get_explorer_license_type() - if license_type: - return license_type, 200 - raise APIError("Failed to retrieve license types", code=400) - - -@contract_namespace.route("/v1/explorer/verify_contract/vyper_versions") -class ExplorerVyperCompilerVersion(Resource): - @cache.cached(timeout=3600, query_string=True) - def get(self): - response = get_vyper_version() - if response: - compiler_versions = response.get("compiler_versions") - return {"compiler_versions": compiler_versions}, 200 - else: - raise APIError("Failed to retrieve compiler versions", code=400) - - -@contract_namespace.route("/v1/explorer/verify_contract/check") -class ExplorerVerifyContractBeforeCheck(Resource): - def post(self): - request_body = flask.request.json - address = request_body.get("address") - - if not address: - raise APIError("Missing required data", code=400) - address = address.lower() - # Check if address exists in ContractsInfo - contracts = postgres_db.session.query(Contracts).filter_by(address=hex_str_to_bytes(address)).first() - - if not contracts or not contracts.transaction_hash: - raise APIError("The address is not a contract", code=400) - - if contracts.is_verified: - return { - "message": "This contract already verified", - "already_verified": True, - }, 200 - - return { - "message": "This contract can be verified", - "already_verified": False, - }, 200 - - -@contract_namespace.route("/v1/explorer/verify_contract/verify_proxy") -class ExplorerVerifyContract(Resource): - def post(self): - request_body = flask.request.json - proxy_contract_address = request_body.get("proxy_contract_address") - if not proxy_contract_address: - raise APIError("Please sent correct proxy contract address") - - implementation_address = get_implementation_contract(proxy_contract_address) - print(implementation_address) - if not implementation_address: - return { - "implementation_address": None, - "message": "This contract does not look like it contains any delegatecall opcode sequence.", - } - exists = get_abi_by_chain_id_address(address=implementation_address) - - if not exists: - return { - "implementation_contract_address": implementation_address, - "message": f"The implementation contract at {implementation_address} does not seem to be verified.", - "is_verified": False, - } - - return { - "implementation_contract_address": implementation_address, - "message": f"The proxy's implementation contract is found at: {implementation_address}.", - "is_verified": True, - } - - -@contract_namespace.route("/v1/explorer/verify_contract/save_proxy") -class ExplorerVerifyContract(Resource): - def post(self): - request_body = flask.request.json - proxy_contract_address = request_body.get("proxy_contract_address") - implementation_contract_address = request_body.get("implementation_contract_address") - - if not proxy_contract_address or not implementation_contract_address: - raise APIError("Not such proxy contract address", code=400) - - contract = Contracts.query.filter(Contracts.address == proxy_contract_address.lower()).first() - contract.verified_implementation_contract = implementation_contract_address.lower() - - postgres_db.session.add(contract) - postgres_db.session.commit() - return as_dict(contract) - - -@contract_namespace.route("/v1/explorer/command_api/contract") -class ExplorerContractCommandApi(Resource): - def get(self): - module = flask.request.args.get("module") - action = flask.request.args.get("action") - guid = flask.request.args.get("guid") - address = flask.request.args.get("address") - return command_normal_contract_data(module, action, address, guid) - - @limiter.limit("10 per minute") - def post(self): - request_form = flask.request.form - action = request_form.get("action") - module = request_form.get("module") - if module != "contract": - return {"message": "The module is error", "status": "0"}, 200 - - if action != "verifysourcecode": - guid = request_form.get("guid") - address = request_form.get("address") - return command_normal_contract_data(module, action, address, guid) - - address = request_form.get("contractaddress") - address = address.lower() - compiler_type = request_form.get("codeformat") - compiler_version = request_form.get("compilerversion") - optimization_used = request_form.get("optimizationUsed") - if optimization_used == "1": - optimization = True - else: - optimization = False - optimization_runs = request_form.get("runs") - if not optimization_runs: - optimization_runs = 0 - input_str = request_form.get("sourceCode") - constructor_arguments = request_form.get("constructorArguments") - license_type = "None" - evm_version = "default" - - contracts = get_contract_by_address(address) - if contracts.is_verified: - return {"message": "This contract is verified", "status": "0"}, 200 - - creation_code, deployed_code = get_creation_or_deployed_code(contracts) - payload = { - "address": address, - "compiler_type": compiler_type, - "compiler_version": compiler_version, - "evm_version": evm_version, - "license_type": license_type, - "optimization": optimization, - "optimization_runs": optimization_runs, - "input_str": input_str, - "constructor_arguments": constructor_arguments, - "creation_code": creation_code, - "deployed_code": deployed_code, - } - - response = send_async_verification_request(payload) - - if response.status_code == 202: - # todo: use async way - contracts.is_verified = True - postgres_db.session.commit() - return { - "message": "Contract successfully verified", - # "message": "Contract is being verified", - "result": response.json()["guid"], - "status": "1", - }, 200 - else: - return { - "message": response.text, - "status": "0", - }, 200 - - -@contract_namespace.route("/v1/explorer/contract//code") -class ExplorerContractCode(Resource): - def get(self, contract_address): - contract_address = contract_address.lower() - - contract = Contracts.query.get(hex_str_to_bytes(contract_address)) - if not contract or contract.is_verified == False: - raise APIError("Contract not exist or contract is not verified.", code=400) - - contracts_verification = get_contract_code_by_address(address=contract_address) - if not contracts_verification: - raise APIError("Contract code not found!", code=400) - - # need front to change - files = [] - if "folder_path" in contracts_verification: - for file in contracts_verification["folder_path"]: - files.append( - { - "name": file.split("/")[-1], - "path": "https://contract-verify-files.s3.amazonaws.com/" + file, - } - ) - contracts_verification["files"] = files - return contracts_verification, 200 diff --git a/hemera/api/app/db_service/af_token_deposit.py b/hemera/api/app/db_service/af_token_deposit.py deleted file mode 100644 index 8ed1d2eb8..000000000 --- a/hemera/api/app/db_service/af_token_deposit.py +++ /dev/null @@ -1,75 +0,0 @@ -from hemera.common.models import db -from hemera.common.utils.db_utils import build_entities -from hemera.common.utils.format_utils import hex_str_to_bytes -from hemera_udf.deposit_to_l2.models.af_token_deposits__transactions import AFTokenDepositsTransactions -from hemera_udf.deposit_to_l2.models.af_token_deposits_current import AFTokenDepositsCurrent - - -def get_transactions_by_condition(filter_condition=None, columns="*", limit=None, offset=None): - entities = build_entities(AFTokenDepositsTransactions, columns) - - statement = db.session.query(AFTokenDepositsTransactions).with_entities(*entities) - - if filter_condition is not None: - statement = statement.filter(filter_condition) - - statement = statement.order_by(AFTokenDepositsTransactions.block_number.desc()) - - if limit is not None: - statement = statement.limit(limit) - - if offset is not None: - statement = statement.offset(offset) - - return statement.all() - - -def get_transactions_cnt_by_condition(filter_condition=None, columns="*"): - entities = build_entities(AFTokenDepositsTransactions, columns) - - count = db.session.query(AFTokenDepositsTransactions).with_entities(*entities).filter(filter_condition).count() - - return count - - -def get_transactions_cnt_by_wallet(wallet_address): - wallet_address = wallet_address.lower() - bytes_wallet_address = hex_str_to_bytes(wallet_address) - - count = get_transactions_cnt_by_condition( - filter_condition=AFTokenDepositsTransactions.wallet_address == bytes_wallet_address - ) - - return count - - -def get_deposit_chain_list(wallet_address): - wallet_address = wallet_address.lower() - bytes_wallet_address = hex_str_to_bytes(wallet_address) - - chain_list = ( - db.session.query(AFTokenDepositsTransactions.wallet_address, AFTokenDepositsTransactions.chain_id) - .filter(AFTokenDepositsTransactions.wallet_address == bytes_wallet_address) - .group_by(AFTokenDepositsTransactions.wallet_address, AFTokenDepositsTransactions.chain_id) - .all() - ) - - return chain_list - - -def get_deposit_assets_list(wallet_address): - entities = build_entities( - AFTokenDepositsCurrent, ["wallet_address", "chain_id", "contract_address", "token_address", "value"] - ) - - wallet_address = wallet_address.lower() - bytes_wallet_address = hex_str_to_bytes(wallet_address) - - assets_list = ( - db.session.query(AFTokenDepositsCurrent) - .with_entities(*entities) - .filter(AFTokenDepositsCurrent.wallet_address == bytes_wallet_address) - .all() - ) - - return assets_list diff --git a/hemera/api/app/db_service/blocks.py b/hemera/api/app/db_service/blocks.py deleted file mode 100644 index 48d09036f..000000000 --- a/hemera/api/app/db_service/blocks.py +++ /dev/null @@ -1,48 +0,0 @@ -from hemera.common.models import db -from hemera.common.models.blocks import Blocks -from hemera.common.utils.db_utils import build_entities -from hemera.common.utils.format_utils import hex_str_to_bytes - - -def get_last_block(columns="*"): - entities = build_entities(Blocks, columns) - - latest_block = db.session.query(Blocks).with_entities(*entities).order_by(Blocks.number.desc()).first() - - return latest_block - - -def get_block_by_number(block_number: int, columns="*"): - entities = build_entities(Blocks, columns) - - block = db.session.query(Blocks).with_entities(*entities).filter(Blocks.number == block_number).first() - - return block - - -def get_block_by_hash(hash: str, columns="*"): - bytes_hash = hex_str_to_bytes(hash) - entities = build_entities(Blocks, columns) - - results = db.session.query(Blocks).with_entities(*entities).filter(Blocks.hash == bytes_hash).first() - - return results - - -def get_blocks_by_condition(filter_condition=None, columns="*", limit=None, offset=None): - entities = build_entities(Blocks, columns) - - statement = db.session.query(Blocks).with_entities(*entities) - - if filter_condition is not None: - statement = statement.filter(filter_condition) - - statement = statement.order_by(Blocks.number.desc()) - - if limit is not None: - statement = statement.limit(limit) - - if offset is not None: - statement = statement.offset(offset) - - return statement.all() diff --git a/hemera/api/app/db_service/contract_internal_transactions.py b/hemera/api/app/db_service/contract_internal_transactions.py deleted file mode 100644 index 2bdb2c16c..000000000 --- a/hemera/api/app/db_service/contract_internal_transactions.py +++ /dev/null @@ -1,46 +0,0 @@ -from hemera.common.models import db -from hemera.common.models.contract_internal_transactions import ContractInternalTransactions -from hemera.common.utils.db_utils import build_entities -from hemera.common.utils.format_utils import hex_str_to_bytes - - -def get_internal_transactions_by_transaction_hash(transaction_hash, columns="*"): - transaction_hash = hex_str_to_bytes(transaction_hash.lower()) - entities = build_entities(ContractInternalTransactions, columns) - - transactions = ( - db.session.query(ContractInternalTransactions) - .with_entities(*entities) - .order_by( - ContractInternalTransactions.block_number.desc(), - ContractInternalTransactions.transaction_index.desc(), - ) - .filter(ContractInternalTransactions.transaction_hash == transaction_hash) - .all() - ) - return transactions - - -def get_internal_transactions_by_condition(columns="*", filter_condition=None, limit=1, offset=0): - entities = build_entities(ContractInternalTransactions, columns) - - transactions = ( - db.session.query(ContractInternalTransactions) - .with_entities(*entities) - .order_by( - ContractInternalTransactions.block_number.desc(), - ContractInternalTransactions.transaction_index.desc(), - ) - .filter(filter_condition) - .limit(limit) - .offset(offset) - .all() - ) - return transactions - - -def get_internal_transactions_cnt_by_condition(columns="*", filter_condition=None): - entities = build_entities(ContractInternalTransactions, columns) - count = db.session.query(ContractInternalTransactions).with_entities(*entities).filter(filter_condition).count() - - return count diff --git a/hemera/api/app/db_service/contracts.py b/hemera/api/app/db_service/contracts.py deleted file mode 100644 index 666efd669..000000000 --- a/hemera/api/app/db_service/contracts.py +++ /dev/null @@ -1,25 +0,0 @@ -from hemera.common.models import db -from hemera.common.models.contracts import Contracts -from hemera.common.utils.db_utils import build_entities -from hemera.common.utils.format_utils import hex_str_to_bytes - - -def get_contract_by_address(address: str, columns="*"): - bytes_address = hex_str_to_bytes(address) - entities = build_entities(Contracts, columns) - - contract = db.session.query(Contracts).with_entities(*entities).filter(Contracts.address == bytes_address).first() - - return contract - - -def get_contracts_by_addresses(address_list: list[bytes], columns="*"): - entities = build_entities(Contracts, columns) - contracts = ( - db.session.query(Contracts) - .with_entities(*entities) - .filter(Contracts.address.in_(list(set(address_list)))) - .all() - ) - - return contracts diff --git a/hemera/api/app/db_service/daily_transactions_aggregates.py b/hemera/api/app/db_service/daily_transactions_aggregates.py deleted file mode 100644 index ff2540d74..000000000 --- a/hemera/api/app/db_service/daily_transactions_aggregates.py +++ /dev/null @@ -1,16 +0,0 @@ -from hemera.common.models import db -from hemera.common.utils.db_utils import build_entities -from hemera_udf.stats.models.daily_transactions_stats import DailyTransactionsStats - - -def get_daily_transactions_cnt(columns="*", limit=10): - entities = build_entities(DailyTransactionsStats, columns) - - results = ( - db.session.query(DailyTransactionsStats) - .with_entities(*entities) - .order_by(DailyTransactionsStats.block_date.desc()) - .limit(limit) - ) - - return results diff --git a/hemera/api/app/db_service/logs.py b/hemera/api/app/db_service/logs.py deleted file mode 100644 index 368d525ba..000000000 --- a/hemera/api/app/db_service/logs.py +++ /dev/null @@ -1,40 +0,0 @@ -from hemera.common.models import db -from hemera.common.models.logs import Logs -from hemera.common.models.transactions import Transactions -from hemera.common.utils.format_utils import hex_str_to_bytes - - -def get_logs_with_input_by_hash(hash, columns="*"): - bytes_hash = hex_str_to_bytes(hash.lower()) - # Always get FUll Logs now - - logs = ( - db.session.query(Logs) - .filter(Logs.transaction_hash == bytes_hash, Logs.block_timestamp == Transactions.block_timestamp) - .join(Transactions, Logs.transaction_hash == Transactions.hash) - .add_columns(Transactions.input) - .all() - ) - - return logs - - -def get_logs_with_input_by_address(address: str, limit=None, offset=None): - address_bytes = hex_str_to_bytes(address.lower()) - - statement = ( - db.session.query(Logs) - .filter(Logs.address == address_bytes) - .join(Transactions, Logs.transaction_hash == Transactions.hash) - .add_columns(Transactions.input) - ) - - if limit is not None: - statement = statement.limit(limit) - - if offset is not None: - statement = statement.offset(offset) - - logs = statement.all() - - return logs diff --git a/hemera/api/app/db_service/tokens.py b/hemera/api/app/db_service/tokens.py deleted file mode 100644 index 9cb3974b8..000000000 --- a/hemera/api/app/db_service/tokens.py +++ /dev/null @@ -1,318 +0,0 @@ -from sqlalchemy import and_, func, select - -from hemera.api.app.db_service.wallet_addresses import get_token_txn_cnt_by_address -from hemera.api.app.utils.fill_info import fill_address_display_to_transactions, fill_is_contract_to_transactions -from hemera.common.models import db -from hemera.common.models.erc20_token_transfers import ERC20TokenTransfers -from hemera.common.models.erc721_token_transfers import ERC721TokenTransfers -from hemera.common.models.erc1155_token_transfers import ERC1155TokenTransfers -from hemera.common.models.scheduled_metadata import ScheduledMetadata -from hemera.common.models.token_prices import TokenPrices -from hemera.common.models.tokens import Tokens -from hemera.common.utils.config import get_config -from hemera.common.utils.db_utils import build_entities, get_total_row_count -from hemera.common.utils.exception_control import APIError -from hemera.common.utils.format_utils import as_dict, hex_str_to_bytes - -app_config = get_config() - -token_transfer_type_table_dict = { - "tokentxns": ERC20TokenTransfers, - "tokentxns-nft": ERC721TokenTransfers, - "tokentxns-nft1155": ERC1155TokenTransfers, - "erc20": ERC20TokenTransfers, - "erc721": ERC721TokenTransfers, - "erc1155": ERC1155TokenTransfers, - "ERC20": ERC20TokenTransfers, - "ERC721": ERC721TokenTransfers, - "ERC1155": ERC1155TokenTransfers, -} - - -def type_to_token_transfer_table(type): - return token_transfer_type_table_dict[type] - - -def get_address_token_transfer_cnt(token_type, condition, address): - # Get count last update timestamp - last_timestamp = db.session.query(func.max(ScheduledMetadata.last_data_timestamp)).scalar() - - # Get historical count - result = get_token_txn_cnt_by_address(token_type, address) - - new_transfer_count = ( - db.session.query(type_to_token_transfer_table(token_type)) - .filter( - and_( - ( - type_to_token_transfer_table(token_type).block_timestamp >= last_timestamp.date() - if last_timestamp is not None - else True - ), - condition, - ) - ) - .count() - ) - return new_transfer_count + (result[0] if result and result[0] else 0) - - -def get_token_address_token_transfer_cnt(token_type: str, address: str): - # Get count last update timestamp - bytes_address = hex_str_to_bytes(address) - last_timestamp = db.session.query(func.max(ScheduledMetadata.last_data_timestamp)).scalar() - - # Get historical count - result = ( - db.session.query(Tokens).with_entities(Tokens.transfer_count).filter(Tokens.address == bytes_address).first() - ) - if result and result[0]: - return result[0] - return ( - db.session.query(type_to_token_transfer_table(token_type)) - .filter( - and_( - ( - type_to_token_transfer_table(token_type).block_timestamp >= last_timestamp - if last_timestamp is not None - else True - ), - type_to_token_transfer_table(token_type).token_address == bytes_address, - ) - ) - .count() - ) + (result[0] if result and result[0] else 0) - - -def get_raw_token_transfers(type, condition, page_index, page_size, is_count=True): - if type not in token_transfer_type_table_dict: - raise APIError("Invalid type", code=400) - - token_trasfer_table = token_transfer_type_table_dict[type] - - if type in ["tokentxns", "erc20", "ERC20", "tokentxns-nft", "erc721", "ERC721"]: - token_transfers = ( - db.session.execute( - db.select(token_trasfer_table) - .where(condition) - .order_by( - token_trasfer_table.block_number.desc(), - token_trasfer_table.block_timestamp.desc(), - token_trasfer_table.log_index.desc(), - ) - .limit(page_size) - .offset((page_index - 1) * page_size) - ) - .scalars() - .all() - ) - elif type in ["tokentxns-nft1155", "erc1155", "ERC1155"]: - token_transfers = ( - db.session.query(token_trasfer_table) - .filter(condition) - .order_by( - token_trasfer_table.block_number.desc(), - token_trasfer_table.log_index.desc(), - token_trasfer_table.token_id.desc(), - ) - .limit(page_size) - .offset((page_index - 1) * page_size) - .all() - ) - else: - ## - token_transfers = [] - - if is_count: - if (len(token_transfers) > 0 or page_index == 1) and len(token_transfers) < page_size: - total_count = (page_index - 1) * page_size + len(token_transfers) - elif condition: - total_count = get_total_row_count(token_trasfer_table.__tablename__) - else: - total_count = db.session.query(token_trasfer_table).filter(condition).count() - else: - total_count = 0 - - return token_transfers, total_count - - -def parse_token_transfers(token_transfers, type=None): - bytea_address_list = [] - bytea_token_address_list = [] - for token_transfer in token_transfers: - bytea_token_address_list.append(token_transfer.token_address) - bytea_address_list.append(token_transfer.from_address) - bytea_address_list.append(token_transfer.to_address) - bytea_token_address_list = list(set(bytea_token_address_list)) - bytea_address_list = list(set(bytea_address_list)) - - # Find token - if type in ["tokentxns", "erc20", "ERC20"]: - tokens = ( - db.session.query(Tokens) - .filter(and_(Tokens.address.in_(bytea_token_address_list), Tokens.token_type == "ERC20")) - .all() - ) - elif type in ["tokentxns-nft", "erc721", "ERC721"]: - tokens = ( - db.session.query(Tokens) - .filter(and_(Tokens.address.in_(bytea_token_address_list), Tokens.token_type == "ERC721")) - .all() - ) - elif type in ["tokentxns-nft1155", "erc1155", "ERC1155"]: - tokens = ( - db.session.query(Tokens) - .filter(and_(Tokens.address.in_(bytea_token_address_list), Tokens.token_type == "ERC1155")) - .all() - ) - else: - tokens = db.session.query(Tokens).filter(Tokens.address.in_(bytea_token_address_list)).all() - token_map = {} # bytea -> token - for token in tokens: - token_map[token.address] = token - - token_transfer_list = [] - for token_transfer in token_transfers: - token_transfer_json = as_dict(token_transfer) - token = token_map.get(token_transfer.token_address) - - if type in ["tokentxns", "erc20", "ERC20"]: - decimals = 18 - if token: - decimals = token.decimals - token_transfer_json["value"] = ( - "{0:.15f}".format(token_transfer.value / 10**decimals).rstrip("0").rstrip(".") - ) - elif type in ["tokentxns-nft", "erc721", "ERC721"]: - token_transfer_json["token_id"] = "{:f}".format(token_transfer.token_id) - elif type in ["tokentxns-nft1155", "erc1155", "ERC1155"]: - token_transfer_json["value"] = "{:f}".format(token_transfer.value) - token_transfer_json["token_id"] = "{:f}".format(token_transfer.token_id) - - if token: - token_transfer_json["token_symbol"] = token.symbol or "UNKNOWN" - token_transfer_json["token_name"] = token.name or "Unknown Token" - token_transfer_json["token_logo_url"] = token.icon_url - else: - token_transfer_json["token_symbol"] = "UNKNOWN" - token_transfer_json["token_name"] = "Unknown Token" - token_transfer_json["token_logo_url"] = None - - token_transfer_list.append(token_transfer_json) - - fill_is_contract_to_transactions(token_transfer_list, bytea_address_list) - fill_address_display_to_transactions(token_transfer_list, bytea_address_list) - - return token_transfer_list - - -def get_token_by_address(address: str, columns="*"): - bytes_address = hex_str_to_bytes(address) - entities = build_entities(Tokens, columns) - - tokens = db.session.query(Tokens).with_entities(*entities).filter(Tokens.address == bytes_address).first() - - return tokens - - -def get_tokens_cnt_by_condition(columns="*", filter_condition=None): - entities = build_entities(Tokens, columns) - - statement = db.session.query(Tokens).with_entities(*entities) - - if filter_condition is not None: - statement = statement.filter(filter_condition) - - count = statement.count() - - return count - - -def get_tokens_by_condition(columns="*", filter_condition=None, order=None, limit=1, offset=0): - entities = build_entities(Tokens, columns) - - statement = db.session.query(Tokens).with_entities(*entities) - - if filter_condition is not None: - statement = statement.filter(filter_condition) - - if order is not None: - statement = statement.order_by(order) - - tokens = statement.limit(limit).offset(offset).all() - - return tokens - - -def get_token_transfers_with_token_by_hash(hash, model, transfer_columns="*", token_columns="*"): - hash = hex_str_to_bytes(hash.lower()) - - transfer_entities = build_entities(model, transfer_columns) - token_entities = build_entities(Tokens, token_columns) - - token_transfers = ( - db.session.query(model) - .with_entities(*transfer_entities) - .filter(model.transaction_hash == hash) - .join( - Tokens, - model.token_address == Tokens.address, - ) - .add_columns(*token_entities) - .all() - ) - - return token_transfers - - -def get_token_holders(token_address: str, model, columns="*", limit=None, offset=None): - bytes_token_address = hex_str_to_bytes(token_address) - entities = build_entities(model, columns) - - statement = ( - db.session.query(model) - .with_entities(*entities) - .filter( - model.token_address == bytes_token_address, - model.balance > 0, - ) - .order_by(model.balance.desc()) - ) - - if limit is not None: - statement = statement.limit(limit) - - if offset is not None: - statement = statement.offset(offset) - - top_holders = statement.all() - - return top_holders - - -def get_token_holders_cnt(token_address: str, model, columns="*"): - bytes_token_address = hex_str_to_bytes(token_address) - entities = build_entities(model, columns) - - holders_count = ( - db.session.query(model) - .with_entities(*entities) - .filter( - model.token_address == bytes_token_address, - model.balance > 0, - ) - .count() - ) - - return holders_count - - -def get_token_price_map_by_symbol_list(token_symbol_list): - token_price_map = {} - for symbol in token_symbol_list: - token_price = db.session.execute( - select(TokenPrices).where(TokenPrices.symbol == symbol).order_by(TokenPrices.timestamp.desc()).limit(1) - ).scalar() - if token_price: - token_price_map[symbol] = token_price.price - return token_price_map diff --git a/hemera/api/app/db_service/traces.py b/hemera/api/app/db_service/traces.py deleted file mode 100644 index 421ff9cbf..000000000 --- a/hemera/api/app/db_service/traces.py +++ /dev/null @@ -1,27 +0,0 @@ -from hemera.common.models import db -from hemera.common.models.traces import Traces -from hemera.common.utils.db_utils import build_entities -from hemera.common.utils.format_utils import hex_str_to_bytes - - -def get_traces_by_transaction_hash(transaction_hash, columns="*"): - transaction_hash = hex_str_to_bytes(transaction_hash) - entities = build_entities(Traces, columns) - - traces = ( - db.session.query(Traces) - .with_entities(*entities) - .filter(Traces.transaction_hash == transaction_hash) - .order_by(Traces.trace_address) - .all() - ) - - return traces - - -def get_traces_by_condition(filter_condition=None, columns="*", limit=1): - entities = build_entities(Traces, columns) - - traces = db.session.query(Traces).with_entities(*entities).filter(filter_condition).limit(limit).all() - - return traces diff --git a/hemera/api/app/db_service/transactions.py b/hemera/api/app/db_service/transactions.py deleted file mode 100644 index 72b6312d3..000000000 --- a/hemera/api/app/db_service/transactions.py +++ /dev/null @@ -1,174 +0,0 @@ -from datetime import datetime, timedelta - -from sqlalchemy import and_, func, or_ - -from hemera.api.app.cache import cache -from hemera.api.app.db_service.wallet_addresses import get_txn_cnt_by_address -from hemera.common.models import db -from hemera.common.models.scheduled_metadata import ScheduledMetadata -from hemera.common.models.transactions import Transactions -from hemera.common.utils.db_utils import build_entities -from hemera.common.utils.format_utils import hex_str_to_bytes -from hemera_udf.address_index.models.address_transactions import AddressTransactions -from hemera_udf.stats.models.daily_transactions_stats import DailyTransactionsStats - -MAX_ADDRESS_TXN_COUNT = 100000 - - -def get_last_transaction(): - last_transaction = ( - db.session.query(Transactions) - .with_entities(Transactions.block_timestamp) - .order_by(Transactions.block_number.desc()) - .first() - ) - return last_transaction - - -def get_transaction_by_hash(hash: str, columns="*"): - bytes_hash = hex_str_to_bytes(hash) - entities = build_entities(Transactions, columns) - - results = db.session.query(Transactions).with_entities(*entities).filter(Transactions.hash == bytes_hash).first() - - return results - - -def get_transactions_by_from_address(address, columns="*"): - bytes_address = hex_str_to_bytes(address) - entities = build_entities(Transactions, columns) - - results = ( - db.session.query(Transactions) - .with_entities(*entities) - .filter(Transactions.from_address == bytes_address) - .first() - ) - - return results - - -def get_transactions_by_to_address(address, columns="*", limit=1): - bytes_address = hex_str_to_bytes(address) - entities = build_entities(Transactions, columns) - - results = ( - db.session.query(Transactions).with_entities(*entities).filter(Transactions.to_address == bytes_address).first() - ) - - return results - - -@cache.memoize(60) -def get_tps_latest_10min(timestamp): - cnt = Transactions.query.filter(Transactions.block_timestamp >= (timestamp - timedelta(minutes=10))).count() - return float(cnt / 600) - - -def get_address_transaction_cnt_v2(address: str): - last_timestamp = db.session.query(func.max(ScheduledMetadata.last_data_timestamp)).scalar() - bytes_address = hex_str_to_bytes(address) - - result = get_txn_cnt_by_address(address) - past_txn_count = 0 if not result else result[0] - - if past_txn_count > MAX_ADDRESS_TXN_COUNT: - return past_txn_count - - recently_txn_count = ( - db.session.query(AddressTransactions.address) - .filter( - and_( - (AddressTransactions.block_timestamp >= last_timestamp if last_timestamp is not None else True), - AddressTransactions.address == bytes_address, - ) - ) - .count() - ) - - total_count = past_txn_count + recently_txn_count - return total_count - - -def get_address_transaction_cnt(address: str): - last_timestamp = db.session.query(func.max(ScheduledMetadata.last_data_timestamp)).scalar() - bytes_address = hex_str_to_bytes(address) - - result = get_txn_cnt_by_address(address) - past_txn_count = 0 if not result else result[0] - if past_txn_count > MAX_ADDRESS_TXN_COUNT: - return past_txn_count - - recently_txn_count = ( - db.session.query(Transactions.hash) - .filter( - and_( - (Transactions.block_timestamp >= last_timestamp if last_timestamp is not None else True), - or_( - Transactions.from_address == bytes_address, - Transactions.to_address == bytes_address, - ), - ) - ) - .count() - ) - total_count = past_txn_count + recently_txn_count - return total_count - - -def get_total_txn_count(): - # Get the latest block date and cumulative count - latest_record = ( - DailyTransactionsStats.query.with_entities( - DailyTransactionsStats.block_date, - DailyTransactionsStats.total_cnt, - ) - .order_by(DailyTransactionsStats.block_date.desc()) - .first() - ) - - # Check if the query returned a result - if latest_record is None: - return Transactions.query.count() - - block_date, cumulate_count = latest_record - - current_time = datetime.utcnow() - - ten_minutes_ago = current_time - timedelta(minutes=10) - latest_10_min_txn_cnt = Transactions.query.filter(Transactions.block_timestamp >= ten_minutes_ago).count() - - avg_txn_per_minute = latest_10_min_txn_cnt / 10 - - minutes_since_last_block = int((current_time - block_date).total_seconds() / 60) - - estimated_txn = int(avg_txn_per_minute * minutes_since_last_block) - - return estimated_txn + cumulate_count - - -def get_transactions_by_condition(filter_condition=None, columns="*", limit=1, offset=0): - entities = build_entities(Transactions, columns) - - transactions = ( - db.session.query(Transactions) - .with_entities(*entities) - .order_by( - Transactions.block_number.desc(), - Transactions.transaction_index.desc(), - ) - .filter(filter_condition) - .limit(limit) - .offset(offset) - .all() - ) - - return transactions - - -def get_transactions_cnt_by_condition(filter_condition=None, columns="*"): - entities = build_entities(Transactions, columns) - - count = db.session.query(Transactions).with_entities(*entities).filter(filter_condition).count() - - return count diff --git a/hemera/api/app/db_service/wallet_addresses.py b/hemera/api/app/db_service/wallet_addresses.py deleted file mode 100644 index be858f56e..000000000 --- a/hemera/api/app/db_service/wallet_addresses.py +++ /dev/null @@ -1,134 +0,0 @@ -from hemera.api.app.cache import cache -from hemera.api.app.contract.contract_verify import get_contract_names -from hemera.api.app.ens.ens import ENSClient -from hemera.common.models import db -from hemera.common.models.contracts import Contracts -from hemera.common.models.tokens import Tokens -from hemera.common.utils.config import get_config -from hemera.common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes -from hemera_udf.address_index.models.address_index_stats import AddressIndexStats - -app_config = get_config() - -if app_config.ens_service is not None and app_config.ens_service != "": - ens_client = ENSClient(app_config.ens_service) -else: - ens_client = None - -token_address_transfers_type_column_dict = { - "tokentxns": AddressIndexStats.erc20_transfer_count, - "tokentxns-nft": AddressIndexStats.nft_721_transfer_count, - "tokentxns-nft1155": AddressIndexStats.nft_1155_transfer_count, - "erc20": AddressIndexStats.erc20_transfer_count, - "erc721": AddressIndexStats.nft_721_transfer_count, - "erc1155": AddressIndexStats.nft_1155_transfer_count, -} - - -def type_to_stats_column(type): - return token_address_transfers_type_column_dict[type] - - -def get_token_txn_cnt_by_address(token_type, bytes_address: bytes): - result = ( - db.session.query(AddressIndexStats) - .with_entities(type_to_stats_column(token_type)) - .filter(AddressIndexStats.address == bytes_address) - .first() - ) - - return result - - -def get_txn_cnt_by_address(address: str): - bytes_address = hex_str_to_bytes(address) - result = ( - db.session.query(AddressIndexStats) - .with_entities(AddressIndexStats.transaction_count) - .filter(AddressIndexStats.address == bytes_address) - .first() - ) - return result - - -@cache.memoize(3600) -def get_address_display_mapping(bytea_address_list: list[bytes]): - if not bytea_address_list or len(bytea_address_list) == 0: - return {} - - # filter not valid address - bytea_address_list = [address for address in bytea_address_list if address] - str_address_list = [bytes_to_hex_str(address) for address in bytea_address_list] - - # str -> str - address_map = {} - - # Contract + Proxy Contract - proxy_mapping_result = ( - db.session.query(Contracts.address, Contracts.verified_implementation_contract) - .filter( - Contracts.address.in_(bytea_address_list), - Contracts.verified_implementation_contract != None, - ) - .all() - ) - # bytea -> bytea - proxy_mapping = {} - for address in proxy_mapping_result: - proxy_mapping[address.address] = address.verified_implementation_contract - - # Get name for all the potential contracts, including proxy implementations - str_contract_list = str_address_list + [bytes_to_hex_str(address) for address in proxy_mapping.values()] - contract_addresses = get_contract_names(str_contract_list) - - # update address to contract name mapping - address_map.update({address.get("address"): address.get("contract_name") for address in contract_addresses}) - - # If an implementation address has name, overwrite the proxy contract - for proxy_address, implementation_address in proxy_mapping.items(): - str_proxy_address = bytes_to_hex_str(proxy_address) - str_implementation_address = bytes_to_hex_str(implementation_address) - if str_implementation_address in address_map: - address_map[str_proxy_address] = address_map[str_implementation_address] - - # Token - addresses = ( - db.session.query(Tokens.address, Tokens.name, Tokens.symbol) - .filter( - Tokens.address.in_(bytea_address_list), - ) - .all() - ) - for address in addresses: - str_address = bytes_to_hex_str(address.address) - address_map[str_address] = "{}: {} Token".format(address.name, address.symbol) - - # ENS - if ens_client: - addresses = ens_client.batch_get_address_ens(str_address_list) - for key, value in addresses.items(): - address_map[key] = value - - # Any additional manual tags - addresses = ( - db.session.query(AddressIndexStats.address, AddressIndexStats.tag) - .filter( - AddressIndexStats.address.in_(bytea_address_list), - AddressIndexStats.tag != None, - ) - .all() - ) - - for address in addresses: - str_address = bytes_to_hex_str(address.address) - address_map[str_address] = address.tag - - return address_map - - -@cache.memoize(3600) -def get_ens_mapping(wallet_address_list): - if ens_client: - address_map = ens_client.batch_get_address_ens(wallet_address_list) - - return address_map diff --git a/hemera/api/app/explorer/__init__.py b/hemera/api/app/explorer/__init__.py deleted file mode 100644 index 15b0f8a58..000000000 --- a/hemera/api/app/explorer/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/python3 -# -*- coding: utf-8 -*- - -from flask_restx.namespace import Namespace - -explorer_namespace = Namespace("Blockchain Explorer", path="/", description="Blockchain Explorer API") diff --git a/hemera/api/app/explorer/routes.py b/hemera/api/app/explorer/routes.py deleted file mode 100644 index b5b1e869c..000000000 --- a/hemera/api/app/explorer/routes.py +++ /dev/null @@ -1,2380 +0,0 @@ -#!/usr/bin/python3 -# -*- coding: utf-8 -*- - -import csv -import io -import json -import logging -import string -from datetime import date, datetime, time, timedelta -from decimal import Decimal - -import flask -from flask import Response -from flask_restx import Resource, reqparse -from sqlalchemy.sql import and_, func, nullslast, or_ -from sqlalchemy.sql.sqltypes import Numeric - -from hemera.api.app.cache import cache -from hemera.api.app.contract.contract_verify import get_abis_for_method, get_sha256_hash, get_similar_addresses -from hemera.api.app.db_service.blocks import ( - get_block_by_hash, - get_block_by_number, - get_blocks_by_condition, - get_last_block, -) -from hemera.api.app.db_service.contract_internal_transactions import ( - get_internal_transactions_by_condition, - get_internal_transactions_by_transaction_hash, - get_internal_transactions_cnt_by_condition, -) -from hemera.api.app.db_service.contracts import get_contract_by_address -from hemera.api.app.db_service.daily_transactions_aggregates import get_daily_transactions_cnt -from hemera.api.app.db_service.logs import get_logs_with_input_by_address, get_logs_with_input_by_hash -from hemera.api.app.db_service.tokens import ( - get_address_token_transfer_cnt, - get_raw_token_transfers, - get_token_address_token_transfer_cnt, - get_token_by_address, - get_token_holders, - get_token_holders_cnt, - get_token_transfers_with_token_by_hash, - get_tokens_by_condition, - get_tokens_cnt_by_condition, - parse_token_transfers, - type_to_token_transfer_table, -) -from hemera.api.app.db_service.traces import get_traces_by_condition, get_traces_by_transaction_hash -from hemera.api.app.db_service.transactions import ( - get_address_transaction_cnt, - get_address_transaction_cnt_v2, - get_total_txn_count, - get_tps_latest_10min, - get_transaction_by_hash, - get_transactions_by_condition, - get_transactions_by_from_address, - get_transactions_by_to_address, - get_transactions_cnt_by_condition, -) -from hemera.api.app.db_service.wallet_addresses import get_address_display_mapping, get_ens_mapping -from hemera.api.app.explorer import explorer_namespace -from hemera.api.app.utils.fill_info import ( - fill_address_display_to_transactions, - fill_is_contract_to_transactions, - process_token_transfer, -) -from hemera.api.app.utils.format_utils import format_coin_value_with_unit, format_dollar_value -from hemera.api.app.utils.parse_utils import parse_log_with_transaction_input_list, parse_transactions -from hemera.api.app.utils.token_utils import get_token_price -from hemera.api.app.utils.web3_utils import get_balance, get_code, get_gas_price -from hemera.common.models import db -from hemera.common.models.blocks import Blocks -from hemera.common.models.contract_internal_transactions import ContractInternalTransactions -from hemera.common.models.contracts import Contracts -from hemera.common.models.current_token_balances import CurrentTokenBalances -from hemera.common.models.erc20_token_transfers import ERC20TokenTransfers -from hemera.common.models.erc721_token_transfers import ERC721TokenTransfers -from hemera.common.models.erc1155_token_transfers import ERC1155TokenTransfers -from hemera.common.models.token_balances import AddressTokenBalances -from hemera.common.models.tokens import Tokens -from hemera.common.models.traces import Traces -from hemera.common.models.transactions import Transactions -from hemera.common.utils.abi_code_utils import Function, decode_function, decode_log_data -from hemera.common.utils.config import get_config -from hemera.common.utils.db_utils import get_total_row_count -from hemera.common.utils.exception_control import APIError -from hemera.common.utils.format_utils import as_dict, bytes_to_hex_str, format_to_dict, hex_str_to_bytes, row_to_dict -from hemera.common.utils.web3_utils import ( - get_debug_trace_transaction, - is_eth_address, - is_eth_transaction_hash, - to_checksum_address, -) -from hemera_udf.address_index.models.address_index_stats import AddressIndexStats -from hemera_udf.address_index.utils.helpers import ( - get_address_erc20_token_transfer_cnt, - get_address_token_transfers, - get_address_transactions, - parse_address_token_transfers, - parse_address_transactions, -) -from hemera_udf.stats.models.daily_addresses_stats import DailyAddressesStats -from hemera_udf.stats.models.daily_blocks_stats import DailyBlocksStats -from hemera_udf.stats.models.daily_tokens_stats import DailyTokensStats -from hemera_udf.stats.models.daily_transactions_stats import DailyTransactionsStats - -PAGE_SIZE = 25 -MAX_TRANSACTION = 500000 -MAX_TRANSACTION_WITH_CONDITION = 10000 -MAX_INTERNAL_TRANSACTION = 10000 -MAX_TOKEN_TRANSFER = 10000 - -TRANSACTION_LIST_COLUMNS = [ - "hash", - "from_address", - "to_address", - "value", - "input", - "method_id", - "block_number", - "block_timestamp", - "gas_price", - "receipt_gas_used", - "receipt_l1_fee", - "receipt_l1_gas_used", - "receipt_l1_gas_price", - "receipt_contract_address", -] - -app_config = get_config() - - -@explorer_namespace.route("/v1/explorer/health") -class ExplorerHealthCheck(Resource): - def get(self): - block = get_last_block(columns=["number", "timestamp"]) - return { - "latest_block_number": block.number, - "latest_block_timestamp": block.timestamp.isoformat(), - "engine_pool_status": db.engine.pool.status(), - "status": "OK", - }, 200 - - -@explorer_namespace.route("/v1/explorer/stats") -class ExplorerMainStats(Resource): - @cache.cached(timeout=10, query_string=True) - def get(self): - # Get total transactions count. - # This can be slow without daily aggregation job ~300ms - transaction_count = get_total_txn_count() - - # Get latest block - latest_block = get_last_block(columns=["number", "timestamp"]) - latest_block_number = latest_block.number - - # Get 5000 block earlier to calculate avg block time - # If there is no enough block, use the first one - earlier_block_number = max(latest_block_number - 5000, 1) - earlier_block = get_block_by_number(block_number=earlier_block_number, columns=["number", "timestamp"]) - if earlier_block is None: - earlier_block = latest_block - - # Handle 0 - avg_block_time = (latest_block.timestamp.timestamp() - earlier_block.timestamp.timestamp()) / ( - (latest_block_number - earlier_block_number) or 1 - ) - - # Get transaction tps - transaction_tps = get_tps_latest_10min(latest_block.timestamp) - - # TODO add batch for op/arb - latest_batch_number = 0 - - BTC_PRICE = get_token_price("WBTC") - ETH_PRICE = get_token_price("ETH") - ETH_PRICE_PRIVIOUS = get_token_price( - "ETH", - datetime.combine(datetime.now() - timedelta(days=1), time.min), - ) - - if app_config.token_configuration.native_token == "ETH": - NATIVE_TOKEN_PRICE = ETH_PRICE - NATIVE_TOKEN_PRICE_PRIVIOUS = ETH_PRICE_PRIVIOUS - else: - NATIVE_TOKEN_PRICE = get_token_price(app_config.token_configuration.native_token) - NATIVE_TOKEN_PRICE_PRIVIOUS = get_token_price( - app_config.token_configuration.native_token, - datetime.combine(datetime.now() - timedelta(days=1), time.min), - ) - - if app_config.token_configuration.dashboard_token == app_config.token_configuration.native_token: - DASHBOARD_TOKEN_PRICE = NATIVE_TOKEN_PRICE - DASHBOARD_TOKEN_PRICE_PRIVIOUS = NATIVE_TOKEN_PRICE_PRIVIOUS - else: - DASHBOARD_TOKEN_PRICE = get_token_price(app_config.token_configuration.dashboard_token) - DASHBOARD_TOKEN_PRICE_PRIVIOUS = get_token_price( - app_config.token_configuration.dashboard_token, - datetime.combine(datetime.now() - timedelta(days=1), time.min), - ) - - return { - "total_transactions": transaction_count, - "transaction_tps": round(transaction_tps, 2), - "latest_batch": latest_batch_number, - "latest_block": latest_block_number, - "avg_block_time": avg_block_time, - "eth_price": format_dollar_value(ETH_PRICE), - "eth_price_btc": "{0:.5f}".format(ETH_PRICE / (BTC_PRICE or 1)), - "eth_price_diff": "{0:.4f}".format((ETH_PRICE - ETH_PRICE_PRIVIOUS) / (ETH_PRICE_PRIVIOUS or 1)), - "native_token_price": format_dollar_value(NATIVE_TOKEN_PRICE), - "native_token_price_eth": "{0:.5f}".format(NATIVE_TOKEN_PRICE / (ETH_PRICE or 1)), - "native_token_price_diff": ( - "{0:.4f}".format( - (NATIVE_TOKEN_PRICE - NATIVE_TOKEN_PRICE_PRIVIOUS) / (NATIVE_TOKEN_PRICE_PRIVIOUS or 1) - ) - if NATIVE_TOKEN_PRICE_PRIVIOUS != 0 - else 0 - ), - "dashboard_token_price_eth": "{0:.5f}".format(DASHBOARD_TOKEN_PRICE / (ETH_PRICE or 1)), - "dashboard_token_price": format_dollar_value(DASHBOARD_TOKEN_PRICE), - "dashboard_token_price_diff": ( - "{0:.4f}".format( - (DASHBOARD_TOKEN_PRICE - DASHBOARD_TOKEN_PRICE_PRIVIOUS) / (DASHBOARD_TOKEN_PRICE_PRIVIOUS or 1) - ) - if DASHBOARD_TOKEN_PRICE_PRIVIOUS != 0 - else 0 - ), - "gas_fee": "{0:1f}".format(get_gas_price() / 10**9).rstrip("0").rstrip(".") + " Gwei", - }, 200 - - -@explorer_namespace.route("/v1/explorer/charts/transactions_per_day") -class ExplorerChartsTransactionsPerDay(Resource): - @cache.cached(timeout=300, query_string=True) - def get(self): - results = get_daily_transactions_cnt(columns=[("block_date", "date"), "cnt"], limit=14) - - date_list = [] - for item in results: - date_list.append({"value": item.date.isoformat(), "count": item.cnt}) - - return { - "title": "Daily Transactions Chart", - "data": date_list, - }, 200 - - -@explorer_namespace.route("/v1/explorer/search") -class ExplorerSearch(Resource): - @cache.cached(timeout=360, query_string=True) - def get(self): - query_string = flask.request.args.get("q") - if not query_string: - raise APIError("Missing query string", code=400) - query_string = query_string.lower() - - search_result = [] - if query_string.isdigit(): - block = get_block_by_number(block_number=int(query_string)) - if block is not None: - search_result.append( - { - "block_hash": bytes_to_hex_str(block.hash), - "block_number": block.number, - "type": "block", - } - ) - - # Top priority, search wallet_address - if is_eth_address(query_string): - contract = get_contract_by_address(address=query_string) - if contract: - search_result.append( - { - "wallet_address": bytes_to_hex_str(contract.address), - "type": "address", - } - ) - return search_result - else: - wallet = get_transactions_by_from_address(address=query_string, columns=[("from_address", "address")]) - if wallet: - search_result.append( - { - "wallet_address": bytes_to_hex_str(wallet.address), - "type": "address", - } - ) - return search_result - else: - wallet = get_transactions_by_to_address(address=query_string, columns=[("to_address", "address")]) - if wallet: - search_result.append( - { - "wallet_address": bytes_to_hex_str(wallet.address), - "type": "address", - } - ) - return search_result - - # Check transaction hash - if is_eth_transaction_hash(query_string): - transaction = get_transaction_by_hash(hash=query_string, columns=["hash"]) - if transaction: - search_result.append( - { - "transaction_hash": bytes_to_hex_str(transaction.hash), - "type": "transaction", - } - ) - return search_result - else: - block = get_block_by_hash(hash=query_string, columns=["hash", "number"]) - if block: - search_result.append( - { - "block_hash": bytes_to_hex_str(block.hash), - "block_number": block.number, - "type": "block", - } - ) - return search_result - - # search token - if len(query_string) > 1: - # Update, we consolidate all tokens into one single table - filter_condition = and_( - or_( - Tokens.name.ilike(f"%{query_string}%"), - Tokens.symbol.ilike(f"%{query_string}%"), - ), - ) - tokens = get_tokens_by_condition( - columns=["name", "symbol", "address", "icon_url"], filter_condition=filter_condition, limit=5 - ) - - for token in tokens: - search_result.append( - { - "token_name": token.name, - "token_symbol": token.symbol, - "token_address": bytes_to_hex_str(token.address), - "token_logo_url": token.icon_url, - "type": "token", - } - ) - - return search_result, 200 - - -@explorer_namespace.route("/v1/explorer/internal_transactions") -class ExplorerInternalTransactions(Resource): - @cache.cached(timeout=60, query_string=True) - def get(self): - page_index = int(flask.request.args.get("page", 1)) - page_size = int(flask.request.args.get("size", PAGE_SIZE)) - if page_index <= 0 or page_size <= 0: - raise APIError("Invalid page or size", code=400) - - address = flask.request.args.get("address") - block = flask.request.args.get("block", None) - - if page_index * page_size > MAX_INTERNAL_TRANSACTION: - raise APIError( - f"Showing the last {MAX_INTERNAL_TRANSACTION} records only", - code=400, - ) - - filter_condition = True - if address: - address = hex_str_to_bytes(address.lower()) - filter_condition = or_( - ContractInternalTransactions.from_address == address, - ContractInternalTransactions.to_address == address, - ) - elif block: - filter_condition = ContractInternalTransactions.block_number == block - - response_columns = [ - "trace_id", - "from_address", - "to_address", - "value", - "trace_type", - "call_type", - "error", - "status", - "block_number", - "block_timestamp", - "transaction_hash", - ] - transactions = get_internal_transactions_by_condition( - columns=response_columns, - filter_condition=filter_condition, - limit=page_size, - offset=(page_index - 1) * page_size, - ) - - # Count the total number of result - if (len(transactions) > 0 or page_index == 1) and len(transactions) < page_size: - total_records = (page_index - 1) * page_size + len(transactions) - elif filter_condition == True: - total_records = get_total_row_count("contract_internal_transactions") - else: - total_records = get_internal_transactions_cnt_by_condition( - columns=["trace_id"], filter_condition=filter_condition - ) - - transaction_list = [] - bytea_address_list = [] - for transaction in transactions: - transaction_json = format_to_dict(transaction) - transaction_json["from_address_is_contract"] = False - transaction_json["to_address_is_contract"] = False - transaction_json["value"] = format_coin_value_with_unit( - transaction.value, app_config.token_configuration.native_token - ) - transaction_list.append(transaction_json) - bytea_address_list.append(transaction.from_address) - bytea_address_list.append(transaction.to_address) - - # Find whether from/to address is a smart contract - fill_is_contract_to_transactions(transaction_list, bytea_address_list) - # Add display name for from/to address - fill_address_display_to_transactions(transaction_list, bytea_address_list) - - return { - "data": transaction_list, - "total": total_records, - "max_display": min(total_records, MAX_INTERNAL_TRANSACTION), - "page": page_index, - "size": page_size, - }, 200 - - -@explorer_namespace.route("/v1/explorer/transactions") -class ExplorerTransactions(Resource): - @cache.cached(timeout=3, query_string=True) - def get(self): - page_index = int(flask.request.args.get("page", 1)) - page_size = int(flask.request.args.get("size", 25)) - if page_index <= 0 or page_size <= 0: - raise APIError("Invalid page or size", code=400) - - if page_index * page_size > MAX_TRANSACTION: - raise APIError(f"Showing the last {MAX_TRANSACTION} records only", code=400) - - batch = flask.request.args.get("batch", None) - state_batch = flask.request.args.get("state_batch", None) - da_batch = flask.request.args.get("da_batch", None) - block = flask.request.args.get("block", None) - address = flask.request.args.get("address", None) - date = flask.request.args.get("date", None) - - has_filter = False - if batch or block or state_batch or da_batch or address or date: - has_filter = True - if page_index * page_size > MAX_TRANSACTION_WITH_CONDITION: - raise APIError( - f"Showing the last {MAX_TRANSACTION_WITH_CONDITION} records only", - code=400, - ) - - filter_condition = True - total_records = 0 - - if block: - if block.isnumeric(): - chain_block = get_block_by_number(block_number=int(block)) - if not chain_block: - raise APIError("Block not exist", code=400) - total_records = chain_block.transactions_count - filter_condition = Transactions.block_number == block - else: - bytea_block_hash = hex_str_to_bytes(block) - chain_block = get_block_by_hash(hash=block) - if not chain_block: - raise APIError("Block not exist", code=400) - total_records = chain_block.transactions_count - filter_condition = Transactions.block_hash == bytea_block_hash - - elif address: - address_str = address.lower() - address_bytes = hex_str_to_bytes(address_str) - filter_condition = or_( - Transactions.from_address == address_bytes, - Transactions.to_address == address_bytes, - ) - total_records = get_address_transaction_cnt(address_str) - elif date: - date_object = datetime.strptime(date, "%Y%m%d") - start_time = date_object - end_time = start_time + timedelta(days=1) - - filter_condition = (Transactions.block_timestamp >= start_time) & (Transactions.block_timestamp < end_time) - - transactions = get_transactions_by_condition( - columns=TRANSACTION_LIST_COLUMNS, - filter_condition=filter_condition, - limit=page_size, - offset=(page_index - 1) * page_size, - ) - - if (len(transactions) > 0 or page_index == 1) and len(transactions) < page_size: - total_records = (page_index - 1) * page_size + len(transactions) - - # Only if has filter and we haven't calculate total transactions, then we query to get total count - elif has_filter and len(transactions) > 0 and total_records == 0: - total_records = get_transactions_cnt_by_condition(filter_condition=filter_condition, columns=["hash"]) - elif total_records == 0: - total_records = get_total_txn_count() - - transaction_list = parse_transactions(transactions) - - return { - "data": transaction_list, - "total": total_records, - "max_display": min( - (MAX_TRANSACTION_WITH_CONDITION if has_filter else MAX_TRANSACTION), - total_records, - ), - "page": page_index, - "size": page_size, - }, 200 - - -# { -# 'components':[ -# {'internalType': 'address', 'name': 'pool', 'type': 'address'}, -# {'internalType': 'bytes', 'name': 'data', 'type': 'bytes'}, -# {'internalType': 'address', 'name': 'callback', 'type': 'address'}, -# {'internalType': 'bytes', 'name': 'callbackData', 'type': 'bytes'} -# ], -# 'internalType': 'struct IRouter.SwapStep[]', -# 'name': 'steps', -# 'type': 'tuple[]' -# } -def generate_type_str(component): - if component["type"] == "tuple[]": - tuple_types = tuple(map(lambda x: generate_type_str(x), component["components"])) - return "(" + ",".join(tuple_types) + ")[]" - elif component["type"] == "tuple": - tuple_types = tuple(map(lambda x: generate_type_str(x), component["components"])) - return "(" + ",".join(tuple_types) + ")" - else: - return component["type"] - - -@explorer_namespace.route("/v1/explorer/transaction/") -class ExplorerTransactionDetail(Resource): - @cache.cached(timeout=60, query_string=True) - def get(self, hash): - hash = hash.lower() - bytes_hash = hex_str_to_bytes(hash) - transaction = get_transaction_by_hash(hash=hash) - if transaction: - transaction_json = parse_transactions([transaction])[0] - filter_condition = and_( - Traces.transaction_hash == bytes_hash, - Traces.trace_address == "{}", - ) - - traces = get_traces_by_condition(filter_condition=filter_condition, columns=["error"], limit=1) - - # Add trace info to transaction detail - if len(traces) > 0 and traces[0] and traces[0].error: - transaction_json["trace_error"] = traces[0].error - - abi_dict = get_abis_for_method([(transaction_json["to_address"], transaction_json["method_id"])]) - - try: - if abi_dict: - _, contract_function_abi = abi_dict.popitem() - data_types = [] - function_abi_json = json.loads(contract_function_abi.get("function_abi")) - for input in function_abi_json["inputs"]: - full_type_str = generate_type_str(input) - data_types.append(full_type_str) - input["full_type_str"] = full_type_str - - decoded_data, endcoded_data = decode_log_data(data_types, transaction.input[10:]) - input_data = [] - full_function_name = "" - for index in range(len(function_abi_json["inputs"])): - param = function_abi_json["inputs"][index] - input_data.append( - { - "name": param["name"], - "data_type": param["full_type_str"], - "hex_data": decoded_data[index], - "dec_data": endcoded_data[index], - } - ) - full_function_name += f"{param['full_type_str']} {param['name']}, " - function_name = contract_function_abi.get("function_name") - full_function_name = f"{function_name}({full_function_name[:-2]})" - transaction_json["input_data"] = input_data - transaction_json["function_name"] = function_name - transaction_json["function_unsigned"] = contract_function_abi.get("function_unsigned") - transaction_json["full_function_name"] = full_function_name - except Exception as e: - print(str(e)) - - return transaction_json, 200 - else: - raise APIError("Cannot find transaction with hash", code=400) - - -@explorer_namespace.route("/v1/explorer/transaction//logs") -class ExplorerTransactionLogs(Resource): - @cache.cached(timeout=360, query_string=True) - def get(self, hash): - logs = get_logs_with_input_by_hash(hash=hash) - log_list = parse_log_with_transaction_input_list(logs) - - return {"total": len(log_list), "data": log_list}, 200 - - -@explorer_namespace.route("/v1/explorer/transaction//token_transfers") -class ExplorerTransactionTokenTransfers(Resource): - @cache.cached(timeout=360, query_string=True) - def get(self, hash): - erc20_token_transfers = get_token_transfers_with_token_by_hash( - hash=hash, - model=ERC20TokenTransfers, - token_columns=["name", "symbol", "decimals", "icon_url"], - ) - - erc721_token_transfers = get_token_transfers_with_token_by_hash( - hash=hash, model=ERC721TokenTransfers, token_columns=["name", "symbol"] - ) - - erc1155_token_transfers = get_token_transfers_with_token_by_hash( - hash=hash, model=ERC1155TokenTransfers, token_columns=["name", "symbol"] - ) - - token_transfer_list = [] - token_transfer_list.extend(process_token_transfer(erc20_token_transfers, "tokentxns")) - token_transfer_list.extend(process_token_transfer(erc721_token_transfers, "tokentxns-nft")) - token_transfer_list.extend(process_token_transfer(erc1155_token_transfers, "tokentxns-nft1155")) - fill_address_display_to_transactions(token_transfer_list) - - return { - "total": len(token_transfer_list), - "data": token_transfer_list, - }, 200 - - -@explorer_namespace.route("/v1/explorer/transaction//internal_transactions") -class ExplorerTransactionInternalTransactions(Resource): - @cache.cached(timeout=360, query_string=True) - def get(self, hash): - transactions = get_internal_transactions_by_transaction_hash(transaction_hash=hash) - - transaction_list = [] - bytea_address_list = [] - for transaction in transactions: - transaction_json = format_to_dict(transaction) - transaction_json["from_address_is_contract"] = False - transaction_json["to_address_is_contract"] = False - transaction_json["value"] = format_coin_value_with_unit( - transaction.value, app_config.token_configuration.native_token - ) - transaction_list.append(transaction_json) - bytea_address_list.append(transaction.from_address) - bytea_address_list.append(transaction.to_address) - - # Find whether from/to address is a smart contract - fill_is_contract_to_transactions(transaction_list, bytea_address_list) - # Add display name for from/to address - fill_address_display_to_transactions(transaction_list, bytea_address_list) - - return {"total": len(transaction_list), "data": transaction_list}, 200 - - -@explorer_namespace.route("/v1/explorer/transaction//all_internal_transactions") -class ExplorerTransactionInternalTransactions(Resource): - @cache.cached(timeout=360, query_string=True) - def get(self, hash): - - internal_transactions = ( - db.session.query(Traces).filter(Traces.transaction_hash == bytes.fromhex(hash[2:])).all() - ) - transaction_list = [] - address_list = [] - for transaction in internal_transactions: - transaction_json = as_dict(transaction) - transaction_json["from_address_is_contract"] = False - transaction_json["to_address_is_contract"] = False - transaction_json["value"] = ( - format_coin_value_with_unit(transaction.value or 0, app_config.token_configuration.native_token) - if transaction.value - else 0 - ) - transaction_list.append(transaction_json) - address_list.append(transaction.from_address) - address_list.append(transaction.to_address) - - # Find contract - contracts = ( - db.session.query(Contracts) - .with_entities(Contracts.address) - .filter(Contracts.address.in_(list(set(address_list)))) - .all() - ) - contract_list = set(map(lambda x: x.address, contracts)) - - for transaction_json in transaction_list: - if transaction_json["to_address"] in contract_list: - transaction_json["to_address_is_contract"] = True - if transaction_json["from_address"] in contract_list: - transaction_json["from_address_is_contract"] = True - - fill_address_display_to_transactions(transaction_list) - transaction_list.sort(key=lambda x: int(x["trace_id"].split("-")[-1]) if x["trace_id"] else 0) - return {"total": len(transaction_list), "data": transaction_list}, 200 - - -@explorer_namespace.route("/v1/explorer/transaction//traces") -class ExplorerTransactionInternalTransactions(Resource): - @cache.cached(timeout=360, query_string=True) - def get(self, hash): - def process_data(data): - if data is None: - raise APIError("Trace Not Found", code=400) - function_signature_contracts_set = set() - addresses_set = set() - - def process_signature_contracts_map_from_trace(obj): - if isinstance(obj, dict): - from_address = obj.get("from_address") - to_address = obj.get("to_address") - if to_address: - addresses_set.add(hex_str_to_bytes(to_address)) - if from_address: - addresses_set.add(hex_str_to_bytes(from_address)) - input = obj.get("input") - if to_address and input and len(input) > 10: - function_signature_contracts_set.add((to_address, input[:10])) - if obj.get("calls"): - for call in obj.get("calls"): - process_signature_contracts_map_from_trace(call) - - process_signature_contracts_map_from_trace(data) - - abi_map = get_abis_for_method(list(function_signature_contracts_set)) - - address_display_map = get_address_display_mapping(addresses_set) - - def convert_hex_to_dec(x): - if x is None: - return 0 - return int(x, 16) if isinstance(x, str) and x.startswith("0x") else x - - def traverse(obj): - if isinstance(obj, dict): - if obj.get("from_address") in address_display_map: - obj["from_address_display_name"] = address_display_map.get(obj.get("from_address")) - else: - obj["from_address_display_name"] = obj.get("from_address") - - if obj.get("to_address") in address_display_map: - obj["to_address_display_name"] = address_display_map.get(obj.get("to_address")) - else: - obj["to_address_display_name"] = obj.get("to_address") - - function_name = None - function_input, function_output = [], [] - if obj.get("call_type") == "selfdestruct": - function_name = "Selfdestruct" - elif obj.get("call_type") in ["create2", "create"]: - function_name = "CreateContract" - else: - input = obj.get("input") or "0x" - output = obj.get("output") or "0x" - contract_function_abi = abi_map.get((obj.get("to_address"), input[:10])) - decode_failed = True - if contract_function_abi: - abi_function = Function(json.loads(contract_function_abi.get("function_abi"))) - function_name = f"{contract_function_abi.get('function_name')}" - try: - function_input, function_output = decode_function(abi_function, input[2:], output[2:]) - decode_failed = False - except Exception as e: - logging.error( - f'Error decoding function: {str(e)}, to_address: {obj.get("to_address")}, tx_hash: {hash}, contract_function_abi: {abi_function.get_abi()}' - ) - - if decode_failed and obj.get("to_address") and len(input) >= 10: - function_name = input[:10] - function_input = [ - { - "name": "call_data", - "value": input[10:], - "type": "string", - } - ] - if len(output) > 2: - function_output = ( - [ - { - "name": "return_data", - "value": output[2:], - "type": "string", - } - ] - if len(output) > 2 - else [] - ) - else: - function_name = "fallback" - function_input = ( - [ - { - "name": "call_data", - "value": input[2:], - "type": "string", - } - ] - if len(input) > 2 - else [] - ) - function_output = [] - - obj["function_name"] = function_name - obj["function_input"] = function_input - obj["function_output"] = function_output - if obj.get("calls"): - for call in obj.get("calls"): - traverse(call) - - traverse(data) - return data - - hash = hash.lower() - if len(hash) != 66 or not all(c in string.hexdigits for c in hash[2:]): - raise APIError("Invalid transaction hash", code=400) - try: - - traces_row = get_traces_by_transaction_hash(hash) - - trace = get_debug_trace_transaction( - [ - { - "from_address": bytes_to_hex_str(trace.from_address), - "to_address": bytes_to_hex_str(trace.to_address), - "value": ( - "{0:.18f}".format(trace.value / 10**18).rstrip("0").rstrip(".") - if trace.value is not None and trace.value != 0 - else None - ), - "trace_type": trace.trace_type, - "call_type": trace.call_type, - "gas": (int(trace.gas) if trace.gas is not None else None), - "gasUsed": (int(trace.gas_used) if trace.gas_used is not None else None), - "gas_used": (int(trace.gas_used) if trace.gas_used is not None else None), - "input": (bytes_to_hex_str(trace.input) if trace.input is not None else None), - "output": (bytes_to_hex_str(trace.output) if trace.output is not None else None), - "trace_address": str(trace.trace_address).replace("[", "{").replace("]", "}"), - "subtraces": trace.subtraces, - "error": trace.error, - "status": trace.status, - } - for trace in traces_row - ] - ) - except Exception as e: - raise APIError(str(e), code=400) - return {"data": process_data(trace)}, 200 - - -@explorer_namespace.route("/v1/explorer/tokens") -class ExplorerTokens(Resource): - @cache.cached(timeout=10, query_string=True) - def get(self): - page_index = int(flask.request.args.get("page", 1)) - page_size = int(flask.request.args.get("size", 25)) - is_verified = flask.request.args.get("is_verified", "false") in [ - "True", - "true", - "TRUE", - ] - if page_index <= 0 or page_size <= 0: - raise APIError("Invalid page or size", code=400) - # erc20, erc721, erc1155 - type = flask.request.args.get("type") - if type == "erc20": - sort = flask.request.args.get("sort", "market_cap") - order = flask.request.args.get("order", "desc") - if sort not in [ - "market_cap", - "volume_24h", - "holder_count", - "price", - "on_chain_market_cap", - ]: - raise APIError("Invalid sort", code=400) - if order not in ["asc", "desc"]: - raise APIError("Invalid order", code=400) - order_expression = getattr(Tokens, sort) - if order == "desc": - order_expression = order_expression.desc() - else: - order_expression = order_expression.asc() - order_expression = nullslast(order_expression) - - filter_condition = and_( - Tokens.token_type == "ERC20", - Tokens.is_verified == is_verified if is_verified else 1 == 1, - ) - tokens = get_tokens_by_condition( - columns=[ - "address", - "name", - "symbol", - "icon_url", - "total_supply", - "decimals", - "price", - "description", - "volume_24h", - "market_cap", - "on_chain_market_cap", - "holder_count", - ], - filter_condition=filter_condition, - order=order_expression, - limit=page_size, - offset=(page_index - 1) * page_size, - ) - - token_list = [ - { - "address": bytes_to_hex_str(x.address), - "name": x.name, - "symbol": x.symbol, - "logo": x.icon_url, - "description": x.description, - "total_supply": ( - int(x.total_supply) * 10 ** (0 - int(x.decimals)) if x.total_supply is not None else None - ), - "volume_24h": (round(x.volume_24h, 2) if x.volume_24h is not None else None), - "market_cap": (round(x.market_cap, 2) if x.market_cap is not None else None), - "on_chain_market_cap": ( - round(x.on_chain_market_cap, 2) if x.on_chain_market_cap is not None else None - ), - "holder_count": x.holder_count, - "price": (round(x.price, 4) if x.price is not None else None), - } - for x in tokens - ] - - if is_verified: - total_records = get_tokens_cnt_by_condition( - filter_condition=and_(Tokens.is_verified == is_verified, Tokens.token_type == "ERC20") - ) - else: - total_records = get_tokens_cnt_by_condition(filter_condition=Tokens.token_type == "ERC20") - - elif type == "erc721": - sort = flask.request.args.get("sort", "holder_count") - order = flask.request.args.get("order", "desc") - if sort not in ["holder_count", "transfer_count"]: - raise APIError("Invalid sort", code=400) - if order not in ["asc", "desc"]: - raise APIError("Invalid order", code=400) - order_expression = getattr(Tokens, sort) - if order == "desc": - order_expression = order_expression.desc() - else: - order_expression = order_expression.asc() - order_expression = nullslast(order_expression) - tokens = get_tokens_by_condition( - columns=[ - "address", - "name", - "symbol", - "total_supply", - "holder_count", - "transfer_count", - ], - filter_condition=Tokens.token_type == "ERC721", - order=order_expression, - limit=page_size, - offset=(page_index - 1) * page_size, - ) - - total_records = get_tokens_cnt_by_condition(filter_condition=Tokens.token_type == "ERC721") - - token_list = [ - { - "address": bytes_to_hex_str(x.address), - "name": x.name, - "symbol": x.symbol, - "total_supply": (int(x.total_supply) if x.total_supply is not None else None), - "holder_count": x.holder_count, - "transfer_count": x.transfer_count, - } - for x in tokens - ] - - elif type == "erc1155": - sort = flask.request.args.get("sort", "holder_count") - order = flask.request.args.get("order", "desc") - if sort not in ["holder_count", "transfer_count"]: - raise APIError("Invalid sort", code=400) - if order not in ["asc", "desc"]: - raise APIError("Invalid order", code=400) - order_expression = getattr(Tokens, sort) - if order == "desc": - order_expression = order_expression.desc() - else: - order_expression = order_expression.asc() - order_expression = nullslast(order_expression) - - tokens = get_tokens_by_condition( - columns=[ - "address", - "name", - "symbol", - "total_supply", - "holder_count", - "transfer_count", - ], - filter_condition=Tokens.token_type == "ERC1155", - order=order_expression, - limit=page_size, - offset=(page_index - 1) * page_size, - ) - - total_records = get_tokens_cnt_by_condition(filter_condition=Tokens.token_type == "ERC1155") - - token_list = [ - { - "address": bytes_to_hex_str(x.address), - "name": x.name, - "symbol": x.symbol, - "total_supply": (int(x.total_supply) if x.total_supply is not None else None), - "holder_count": x.holder_count, - "transfer_count": x.transfer_count, - } - for x in tokens - ] - else: - raise APIError("Invalid type", code=400) - - return { - "page": page_index, - "size": page_size, - "total": total_records, - "data": token_list, - }, 200 - - -@explorer_namespace.route("/v1/explorer/token_transfers") -class ExplorerTokenTransfers(Resource): - @cache.cached(timeout=10, query_string=True) - def get(self): - page_index = int(flask.request.args.get("page", 1)) - page_size = int(flask.request.args.get("size", 25)) - if page_index <= 0 or page_size <= 0: - raise APIError("Invalid page or size", code=400) - - # type must be one of tokentxns, tokentxns-nft, tokentxns-nft1155 - # type must be one of erc20, erc721, erc1155 - type = flask.request.args.get("type", "").lower() - - # type must be one of tokentxns, tokentxns-nft, tokentxns-nft1155 - # type must be one of erc20, erc721, erc1155 - type = flask.request.args.get("type", "").lower() - - if page_index * page_size > MAX_TOKEN_TRANSFER: - raise APIError(f"Showing the last {MAX_TOKEN_TRANSFER} records only", code=400) - - address = flask.request.args.get("address", None) - token_address = flask.request.args.get("token_address", None) - - filter_condition = True - if address: - str_address = address.lower() - bytea_address = hex_str_to_bytes(str_address) - if type in ["tokentxns", "erc20"]: - filter_condition = or_( - ERC20TokenTransfers.from_address == bytea_address, - ERC20TokenTransfers.to_address == bytea_address, - ) - elif type in ["tokentxns-nft", "erc721"]: - filter_condition = or_( - ERC721TokenTransfers.from_address == bytea_address, - ERC721TokenTransfers.to_address == bytea_address, - ) - elif type in ["tokentxns-nft1155", "erc1155"]: - filter_condition = or_( - ERC1155TokenTransfers.from_address == bytea_address, - ERC1155TokenTransfers.to_address == bytea_address, - ) - total_count = get_address_token_transfer_cnt(type, filter_condition, bytea_address) - elif token_address: - str_token_address = token_address.lower() - bytea_token_address = hex_str_to_bytes(token_address.lower()) - if type in ["tokentxns", "erc20"]: - filter_condition = ERC20TokenTransfers.token_address == bytea_token_address - elif type in ["tokentxns-nft", "erc721"]: - filter_condition = ERC721TokenTransfers.token_address == bytea_token_address - elif type in ["tokentxns-nft1155", "erc1155"]: - filter_condition = ERC1155TokenTransfers.token_address == bytea_token_address - total_count = get_token_address_token_transfer_cnt(type, str_token_address) - else: - total_count = get_total_row_count(type_to_token_transfer_table(type).__tablename__) - - token_transfers, _ = get_raw_token_transfers(type, filter_condition, page_index, page_size, is_count=False) - token_transfer_list = parse_token_transfers(token_transfers, type) - return { - "page": page_index, - "size": page_size, - "total": total_count, - "max_display": MAX_TOKEN_TRANSFER, - "data": token_transfer_list, - }, 200 - - -class CustomRequestParser(reqparse.RequestParser): - def add_argument(self, *args, **kwargs): - if "location" not in kwargs: - kwargs["location"] = "args" - return super(CustomRequestParser, self).add_argument(*args, **kwargs) - - -blocks_parser = CustomRequestParser() - -blocks_parser.add_argument("page", type=int, default=1, help="Page number") -blocks_parser.add_argument("size", type=int, default=25, help="Page size") -blocks_parser.add_argument("state_batch", type=int, default=None, help="State batch filter") -blocks_parser.add_argument("batch", type=int, default=None, help="Batch filter") - - -@explorer_namespace.route("/v1/explorer/blocks") -class ExplorerBlocks(Resource): - @cache.cached(timeout=3, query_string=True) - def get(self): - args = blocks_parser.parse_args() - page_index = args.get("page") - page_size = args.get("size") - if page_index <= 0 or page_size <= 0: - raise APIError("Invalid page or size", code=400) - - state_batch = args.get("state_batch") - batch = args.get("batch") - - block_list_columns = [ - "hash", - "number", - "timestamp", - "parent_hash", - "gas_limit", - "gas_used", - "base_fee_per_gas", - "miner", - "transactions_count", - "internal_transactions_count", - ] - - if state_batch is None and batch is None: - - latest_block = get_last_block(columns=["number"]) - - total_blocks = latest_block.number if latest_block else 0 - - end_block = total_blocks - (page_index - 1) * page_size - start_block = end_block - page_size + 1 - start_block = max(0, start_block) - - blocks = get_blocks_by_condition( - columns=block_list_columns, filter_condition=Blocks.number.between(start_block, end_block) - ) - else: - # TODO: Fix blocks filter by state_batch and batch - filter_condition = True - total_blocks = 0 - blocks = get_blocks_by_condition( - columns=block_list_columns, - filter_condition=filter_condition, - limit=page_size, - offset=(page_index - 1) * page_size, - ) - if total_blocks == 0 and len(blocks) > 0: - latest_block = get_last_block(columns=["number", "timestamp"]) - total_blocks = latest_block.number - block_list = [ - format_to_dict(block) - | { - "transaction_count": block.transactions_count, - "internal_transaction_count": ( - 0 if block.internal_transactions_count is None else block.internal_transactions_count - ), - "internal_transactions_count": ( - 0 if block.internal_transactions_count is None else block.internal_transactions_count - ), - } - for block in blocks - ] - - return { - "data": block_list, - "total": total_blocks, - "page": page_index, - "size": page_size, - }, 200 - - -@explorer_namespace.route("/v1/explorer/block/") -class ExplorerBlockDetail(Resource): - @cache.cached(timeout=1800, query_string=True) - def get(self, number_or_hash): - if number_or_hash.isnumeric(): - number = int(number_or_hash) - block = get_block_by_number(block_number=int(number)) - else: - block = get_block_by_hash(hash=number_or_hash) - - if block: - block_json = format_to_dict(block) - # Need additional data eth_price, block time, internal_transaction_count - - # Added by indexer now - # internal_transaction_count = get_internal_transactions_cnt_by_condition( - # filter_condition=ContractInternalTransactions.block_number == block.number) - block_json["internal_transaction_count"] = ( - 0 if block.internal_transactions_count is None else block.internal_transactions_count - ) - - block_json["gas_fee_token_price"] = "{0:.2f}".format( - get_token_price(app_config.token_configuration.gas_fee_token, block.timestamp) - ) - - earlier_block_number = max(block.number - 1, 1) - earlier_block = get_block_by_number(block_number=earlier_block_number, columns=["number", "timestamp"]) - - block_json["seconds_since_last_block"] = block.timestamp.timestamp() - earlier_block.timestamp.timestamp() - block_json["transaction_count"] = block.transactions_count - - latest_block = get_last_block(columns=["number"]) - - block_json["is_last_block"] = latest_block.number == block.number - return block_json, 200 - else: - raise APIError("Cannot find block with block number or block hash", code=400) - - -@explorer_namespace.route("/v1/explorer/address/
/profile") -class ExplorerAddressProfile(Resource): - @cache.cached(timeout=60, query_string=True) - def get(self, address): - address = address.lower() - NATIVE_TOKEN_PRICE = get_token_price(app_config.token_configuration.native_token) - - native_token_balance = get_balance(address) - profile_json = { - "balance": "{0:.18f}".format(native_token_balance / 10**18).rstrip("0").rstrip("."), - "native_token_price": "{0:.2f}".format(NATIVE_TOKEN_PRICE), - "balance_dollar": "{0:.2f}".format(native_token_balance * Decimal(NATIVE_TOKEN_PRICE) / 10**18), - "is_contract": False, - "is_token": False, - } - - contract = get_contract_by_address(address) - if contract: - profile_json["is_contract"] = True - profile_json["contract_creator"] = bytes_to_hex_str(contract.contract_creator) - profile_json["transaction_hash"] = bytes_to_hex_str(contract.transaction_hash) - profile_json["is_verified"] = contract.is_verified - profile_json["is_proxy"] = contract.is_proxy - profile_json["implementation_contract"] = ( - bytes_to_hex_str(contract.implementation_contract) if contract.implementation_contract else None - ) - profile_json["verified_implementation_contract"] = ( - bytes_to_hex_str(contract.verified_implementation_contract) - if contract.verified_implementation_contract - else None - ) - profile_json["bytecode"] = bytes_to_hex_str(contract.deployed_code) if contract.deployed_code else None - profile_json["creation_code"] = bytes_to_hex_str(contract.creation_code) if contract.creation_code else None - profile_json["deployed_code"] = bytes_to_hex_str(contract.deployed_code) if contract.deployed_code else None - - deployed_code = contract.deployed_code or get_sha256_hash(get_code(address)) - addresses = get_similar_addresses(deployed_code) - profile_json["similar_verified_addresses"] = [add for add in addresses if add != address] - - token = get_token_by_address(address) - - if token: - profile_json["is_token"] = True - profile_json["token_type"] = token.token_type # ERC20/ERC721/ERC1155 - profile_json["token_name"] = token.name or "Unknown Token" - profile_json["token_symbol"] = token.symbol or "UNKNOWN" - profile_json["token_logo_url"] = token.icon_url or None - - # "block_validated": 1 - return profile_json - - -@explorer_namespace.route("/v1/explorer/address/
/token_holdings") -@explorer_namespace.route("/v2/explorer/address/
/token_holdings") -class ExplorerAddressTokenHoldingsV2(Resource): - @cache.cached(timeout=360, query_string=True) - def get(self, address): - address = address.lower() - address_bytes = hex_str_to_bytes(address) - subquery = ( - db.session.query( - AddressTokenBalances.token_address, - AddressTokenBalances.balance, - AddressTokenBalances.token_id, - AddressTokenBalances.token_type, - func.row_number() - .over( - partition_by=( - AddressTokenBalances.token_address, - AddressTokenBalances.token_id, - ), - order_by=[ - AddressTokenBalances.block_timestamp.desc(), - AddressTokenBalances.block_number.desc(), - ], - ) - .label("rn"), - ) - .filter(AddressTokenBalances.address == address_bytes) - .subquery() - ) - - # Left join with other token tables - result = ( - db.session.query( - subquery, - func.coalesce(Tokens.name, Tokens.name, Tokens.name).label("name"), - func.coalesce( - Tokens.symbol, - Tokens.symbol, - Tokens.symbol, - ).label("symbol"), - func.coalesce(Tokens.icon_url, Tokens.icon_url, Tokens.icon_url).label("logo"), - Tokens.decimals.label("decimals"), - ) - .outerjoin( - Tokens, - subquery.c.token_address == Tokens.address, - ) - .filter(subquery.c.rn == 1, subquery.c.balance > 0) - .order_by(subquery.c.token_type) - .all() - ) - token_holder_list = [] - for token_holder in result: - token_holder_list.append( - { - "token_address": bytes_to_hex_str(token_holder.token_address), - "balance": "{0:.6f}".format((token_holder.balance / 10 ** (token_holder.decimals or 0))) - .rstrip("0") - .rstrip("."), - "token_id": (int(token_holder.token_id) if token_holder.token_id else None), - "token_name": token_holder.name or "Unknown Token", - "token_symbol": token_holder.symbol or "UNKNOWN", - "token_logo_url": token_holder.logo or None, - "token_type": token_holder.token_type, - "type": { - "ERC20": "tokentxns", - "ERC721": "tokentxns-nft", - "ERC1155": "tokentxns-nft1155", - }.get(token_holder.token_type), - } - ) - - # Add inscriptions - # add_inscription_holdings() - - return {"data": token_holder_list, "total": len(token_holder_list)} - - -@explorer_namespace.route("/v1/explorer/address/
/transactions") -class ExplorerAddressTransactions(Resource): - @cache.cached(timeout=10, query_string=True) - def get(self, address): - address = address.lower() - - transactions = get_address_transactions( - address=address, - ) - - if len(transactions) < PAGE_SIZE: - total_count = len(transactions) - else: - total_count = get_address_transaction_cnt_v2(address) - - transaction_list = parse_address_transactions(transactions) - - return { - "data": transaction_list, - "total": total_count, - }, 200 - - -@explorer_namespace.route("/v1/explorer/address/
/token_transfers") -class ExplorerAddressTokenTransfers(Resource): - @cache.cached(timeout=10, query_string=True) - def get(self, address): - address = address.lower() - bytea_address = hex_str_to_bytes(address) - type = flask.request.args.get("type", "").lower() - - if type in ["tokentxns", "erc20"]: - token_transfers = get_address_token_transfers(address) - token_transfer_list = parse_address_token_transfers(token_transfers) - total_count = get_address_erc20_token_transfer_cnt(bytea_address) - return { - "total": total_count, - "data": token_transfer_list, - "type": type, - }, 200 - - elif type in ["tokentxns-nft", "erc721"]: - condition = or_( - ERC721TokenTransfers.from_address == bytea_address, - ERC721TokenTransfers.to_address == bytea_address, - ) - elif type in ["tokentxns-nft1155", "erc1155"]: - condition = or_( - ERC1155TokenTransfers.from_address == bytea_address, - ERC1155TokenTransfers.to_address == bytea_address, - ) - else: - raise APIError("Invalid type", code=400) - - token_transfers, _ = get_raw_token_transfers(type, condition, 1, PAGE_SIZE, is_count=False) - total_count = get_address_token_transfer_cnt(type, condition, bytea_address) - token_transfer_list = parse_token_transfers(token_transfers, type) - - return { - "total": total_count, - "data": token_transfer_list, - "type": type, - }, 200 - - -@explorer_namespace.route("/v1/explorer/address/
/internal_transactions") -class ExplorerAddressInternalTransactions(Resource): - @cache.cached(timeout=10, query_string=True) - def get(self, address): - address = address.lower() - address_bytes = hex_str_to_bytes(address) - filter_condition = or_( - ContractInternalTransactions.from_address == address_bytes, - ContractInternalTransactions.to_address == address_bytes, - ) - - transactions = get_internal_transactions_by_condition(filter_condition=filter_condition, limit=PAGE_SIZE) - - if len(transactions) < PAGE_SIZE: - total_count = len(transactions) - else: - total_count = get_internal_transactions_cnt_by_condition(filter_condition=filter_condition) - - transaction_list = [] - bytea_address_list = [] - for transaction in transactions: - transaction_json = format_to_dict(transaction) - transaction_json["from_address_is_contract"] = False - transaction_json["to_address_is_contract"] = False - transaction_json["value"] = format_coin_value_with_unit( - transaction.value, app_config.token_configuration.native_token - ) - transaction_list.append(transaction_json) - bytea_address_list.append(transaction.from_address) - bytea_address_list.append(transaction.to_address) - - # Find whether from/to address is a smart contract - fill_is_contract_to_transactions(transaction_list, bytea_address_list) - # Add display name for from/to address - fill_address_display_to_transactions(transaction_list, bytea_address_list) - - return {"total": total_count, "data": transaction_list}, 200 - - -@explorer_namespace.route("/v1/explorer/address/
/logs") -class ExplorerAddressLogs(Resource): - @cache.cached(timeout=10, query_string=True) - def get(self, address): - address = address.lower() - logs = get_logs_with_input_by_address(address, limit=25) - log_list = parse_log_with_transaction_input_list(logs) - - return {"total": len(logs), "data": log_list}, 200 - - -def token_type_convert(token_type): - if token_type == "ERC20": - return "tokentxns" - elif token_type == "ERC721": - return "tokentxns-nft" - elif token_type == "ERC1155": - return "tokentxns-nft1155" - else: - return None - - -@explorer_namespace.route("/v1/explorer/token/
/profile") -class ExplorerTokenProfile(Resource): - @cache.cached(timeout=60, query_string=True) - def get(self, address): - address = address.lower() - token = get_token_by_address(address) - if not token: - raise APIError("Token not found", code=400) - - extra_erc20_token_info = {} - extra_token_info = {} - if token.token_type == "ERC20": - extra_erc20_token_info = { - "token_price": token.price, - "token_previous_price": token.previous_price, - "decimals": float(token.decimals), - "total_supply": "{0:.6f}".format(token.total_supply / (10**token.decimals) or 0) - .rstrip("0") - .rstrip("."), - "token_market_cap": token.market_cap, - "token_on_chain_market_cap": token.on_chain_market_cap, - "previous_price": token.previous_price, - } - if token.gecko_id: - extra_erc20_token_info["gecko_url"] = f"https://www.coingecko.com/en/coins/{token.gecko_id}" - if token.cmc_slug: - extra_erc20_token_info["cmc_url"] = f"https://coinmarketcap.com/currencies/{token.cmc_slug}/" - token_info = { - "token_name": token.name, - "token_checksum_address": to_checksum_address(token.address), - "token_address": bytes_to_hex_str(token.address), - "token_symbol": token.symbol, - "token_logo_url": token.icon_url, - "token_urls": token.urls, - "social_medias": token.urls, - "token_description": token.description, - "total_supply": "{:f}".format(token.total_supply or 0), - "total_holders": token.holder_count, - "total_transfers": get_token_address_token_transfer_cnt(token.token_type, address), - "token_type": token.token_type, - "type": token_type_convert(token.token_type), - } - token_info.update(extra_token_info) - - return {**token_info, **extra_erc20_token_info} - - -@explorer_namespace.route("/v1/explorer/token/
/token_transfers") -class ExplorerTokenTokenTransfers(Resource): - @cache.cached(timeout=10, query_string=True) - def get(self, address): - address = address.lower() - address_bytes = hex_str_to_bytes(address) - - token = get_token_by_address(address) - if not token: - raise APIError("Token not found", code=400) - - if token.token_type == "ERC20": - condition = ERC20TokenTransfers.token_address == address_bytes - elif token.token_type == "ERC721": - condition = ERC721TokenTransfers.token_address == address_bytes - elif token.token_type == "ERC1155": - condition = ERC1155TokenTransfers.token_address == address_bytes - else: - raise APIError("Invalid type", code=400) - - token_transfers, _ = get_raw_token_transfers(token.token_type, condition, 1, PAGE_SIZE, is_count=False) - - token_transfer_list = parse_token_transfers(token_transfers, token.token_type) - return { - "total": get_token_address_token_transfer_cnt(token.token_type, address), - "data": token_transfer_list, - "type": token.token_type, - }, 200 - - -@explorer_namespace.route("/v1/explorer/token//top_holders") -@explorer_namespace.route("/v2/explorer/token//top_holders") -class ExplorerTokenTopHolders(Resource): - @cache.cached(timeout=360, query_string=True) - def get(self, token_address): - token_address = token_address.lower() - - page_index = int(flask.request.args.get("page", 1)) - page_size = int(flask.request.args.get("size", PAGE_SIZE)) - if page_index <= 0 or page_size <= 0: - raise APIError("Invalid page or size", code=400) - - token = get_token_by_address(token_address) - - if not token: - raise APIError("Token not found", code=400) - - top_holders = get_token_holders( - token_address=token_address, - model=CurrentTokenBalances, - columns=["balance", "address"], - limit=page_size, - offset=(page_index - 1) * page_size, - ) - - total_records = get_token_holders_cnt( - token_address=token_address, model=CurrentTokenBalances, columns=["address"] - ) - - token_holder_list = [] - for token_holder in top_holders: - token_holder_json = {} - token_holder_json["token_address"] = token_address - decimals = 0 - # if type == "tokentxns-nft1155": - # token_holder_json["token_id"] = int(token_holder.token_id) - if token.token_type == "ERC20": - decimals = token.decimals - token_holder_json["wallet_address"] = bytes_to_hex_str(token_holder.address) - token_holder_json["balance"] = "{0:.6f}".format((token_holder.balance / 10 ** (decimals)) or 0) - token_holder_list.append(token_holder_json) - - return {"data": token_holder_list, "total": total_records} - - -@explorer_namespace.route("/v1/explorer/contract/update_info") -@explorer_namespace.route("/v1/socialscan/contract/update_info") -class ExplorerUploadContractInfo(Resource): - def post(self): - request_body = flask.request.json - address = request_body.get("address") - official_website = request_body.get("official_website") - social_list = request_body.get("social_list") - description = request_body.get("description") - email = request_body.get("email") - - if not address or (not official_website and not social_list and not description and not email): - raise APIError("Missing required data", code=400) - - # Check if address exists in ContractsInfo - contracts = get_contract_by_address(address) - - if not contracts: - raise APIError("Error address", code=400) - - # Update existing contract info - if official_website: - contracts.official_website = official_website - if social_list: - contracts.social_list = social_list - if description: - contracts.description = description - if email: - contracts.email = email - contracts.update_time = datetime.now() - db.session.commit() - - return {"message": "Contract info updated successfully"}, 200 - - -@explorer_namespace.route("/v1/explorer/statistics/contract/ranks") -class ExplorerStatisticsContractData(Resource): - statistics_sql_mapping = { - "transactions_received": lambda session, limit: session.query( - Transactions.to_address.label("address"), - func.count().label("transaction_count"), - AddressIndexStats.tag, - ) - .join( - AddressIndexStats, - Transactions.to_address == AddressIndexStats.address, - isouter=True, - ) - .filter( - Transactions.block_timestamp > datetime.now() - timedelta(days=1), - Transactions.to_address.in_(session.query(Contracts.address)), - ) - .group_by(Transactions.to_address, AddressIndexStats.tag) - .order_by(func.count().desc()) - .limit(limit) - .all(), - } - - @cache.cached(timeout=600, query_string=True) - def get(self): - statistics_arg = flask.request.args.get("statistics", None) - try: - limit = int(flask.request.args.get("limit", 10)) - except ValueError: - limit = 10 - if limit > 100: - raise APIError("Limit should not be greater than 100", code=400) - - if statistics_arg not in self.statistics_sql_mapping: - raise APIError("Invalid or missing statistics type", code=400) - - result = self.statistics_sql_mapping[statistics_arg](db.session, limit) - - address_list = [] - for row in result: - address_json = row_to_dict(row) - address_list.append(address_json) - - return {"data": address_list}, 200 - - -@explorer_namespace.route("/v1/explorer/statistics/address/ranks") -class ExplorerStatisticsAddressData(Resource): - statistics_sql_mapping = { - "gas_used": lambda session, limit: session.query( - Transactions.from_address.label("address"), - func.sum(Transactions.receipt_gas_used).label("gas_used"), - AddressIndexStats.tag, - ) - .join( - AddressIndexStats, - Transactions.from_address == AddressIndexStats.address, - isouter=True, - ) - .filter(Transactions.block_timestamp > datetime.now() - timedelta(days=1)) - .group_by(Transactions.from_address, AddressIndexStats.tag) - .order_by(func.sum(Transactions.receipt_gas_used).desc()) - .limit(limit) - .all(), - "transactions_sent": lambda session, limit: session.query( - Transactions.from_address.label("address"), - func.count().label("transaction_count"), - AddressIndexStats.tag, - ) - .join( - AddressIndexStats, - Transactions.from_address == AddressIndexStats.address, - isouter=True, - ) - .filter(Transactions.block_timestamp > datetime.now() - timedelta(days=1)) - .group_by(Transactions.from_address, AddressIndexStats.tag) - .order_by(func.count().desc()) - .limit(limit) - .all(), - } - - @cache.cached(timeout=600, query_string=True) - def get(self): - statistics_arg = flask.request.args.get("statistics", None) - try: - limit = int(flask.request.args.get("limit", 10)) - except ValueError: - limit = 10 - if limit > 100: - raise APIError("Limit should not be greater than 100", code=400) - - if statistics_arg not in self.statistics_sql_mapping: - raise APIError("Invalid or missing statistics type", code=400) - - result = self.statistics_sql_mapping[statistics_arg](db.session, limit) - - unique_addresses = [bytes_to_hex_str(row.address) for row in result] - ens_mapping = get_ens_mapping(unique_addresses) - address_list = [] - for row in result: - address_json = row_to_dict(row) - address_json["ens_name"] = ens_mapping.get(address_json["address"]) - address_list.append(address_json) - - return {"data": address_list}, 200 - - -@explorer_namespace.route("/v1/explorer/chart-data/daily") -class ExplorerChartDataDaily(Resource): - @cache.cached(timeout=3600, query_string=True) - def get(self): - metrics_arg = flask.request.args.get("metrics", "") - metrics_list = [metric.strip() for metric in metrics_arg.split(",") if metric.strip()] - - if not metrics_list: - return {"error": "No metrics provided."}, 400 - - raw_start_date = flask.request.args.get("start_date") - raw_end_date = flask.request.args.get("end_date") - - if raw_start_date is None: - start_date = date(1900, 1, 1) - else: - try: - start_date = datetime.strptime(raw_start_date, "%Y-%m-%d").date() - except ValueError: - return {"error": "Invalid start_date format. Expected format: YYYY-MM-DD."}, 400 - - if raw_end_date is None: - end_date = date.today() - timedelta(days=1) - else: - try: - end_date = datetime.strptime(raw_end_date, "%Y-%m-%d").date() - except ValueError: - return {"error": "Invalid end_date format. Expected format: YYYY-MM-DD."}, 400 - - if end_date < start_date: - return {"error": "end_date should not be earlier than start_date."}, 400 - - tables_to_query = {} - for metric in metrics_list: - if "." not in metric or len(metric.split(".")) != 2: - return {"error": f"Invalid metric: {metric}."}, 400 - table_name, field_name = metric.split(".") - - if table_name not in tables_to_query: - tables_to_query[table_name] = [] - - tables_to_query[table_name].append(field_name) - - data_list = {} - for table_name, fields in tables_to_query.items(): - if table_name == "transaction": - table = DailyTransactionsStats - elif table_name == "address": - table = DailyAddressesStats - elif table_name == "block": - table = DailyBlocksStats - elif table_name == "token": - table = DailyTokensStats - else: - return {"error": f"Unknown table name in metric: {metrics_list}."}, 400 - - for field in fields: - if not hasattr(table, field): - return {"error": f'Unknown field "{field}" in table "{table_name}".'}, 400 - - query = db.session.query( - getattr(table, "block_date"), - *(getattr(table, field) for field in fields), - ).filter( - and_( - table.block_date >= start_date, - table.block_date <= end_date, - ) - ) - - for record in query: - block_date = record[0].isoformat() - - if block_date not in data_list: - data_list[block_date] = {"date": block_date} - - for i, field in enumerate(fields): - value = record[i + 1] - - field_type = getattr(table, field).type - if isinstance(field_type, Numeric): - value = float(value) if value is not None else 0 - - data_list[block_date]["{}.{}".format(table_name, field)] = value or 0 - sorted_data = sorted(list(data_list.values()), key=lambda x: x["date"]) - results = {"data": sorted_data} - return results, 200 - - -def limit_address(value): - if value is None: - return None - if not isinstance(value, str): - raise ValueError("Error! Invalid contract address format, the value must be a string.") - - if len(value) != 42: - raise ValueError("Error! Invalid contract address format, the value must be 42 characters long.") - - if not value.startswith("0x"): - raise ValueError("Error! Invalid contract address format, The value must start with '0x'.") - - if not all(c in "0123456789abcdefABCDEF" for c in value[2:]): - raise ValueError( - "Error! Invalid contract address format, The address must contain only hexadecimal characters." - ) - - return value - - -parser = reqparse.RequestParser() -parser.add_argument( - "startblock", - type=int, - default=0, - help="The integer block number to start searching for transactions", -) -parser.add_argument( - "endblock", - type=int, - default=4999, - help="The integer block number to stop searching for transactions", -) - -parser.add_argument( - "startdate", - type=lambda x: datetime.strptime(x, "%Y-%m-%d"), - required=False, - help="Start date in YYYY-MM-DD format", -) -parser.add_argument( - "enddate", - type=lambda x: datetime.strptime(x, "%Y-%m-%d"), - required=False, - help="End date in YYYY-MM-DD format", -) - -parser.add_argument("filtertype", choices=("date", "block"), default=None) -parser.add_argument("address", type=lambda x: limit_address(x), default=None) -parser.add_argument("contractaddress", type=lambda x: limit_address(x), default=None) - - -def get_block_number_range(): - args = parser.parse_args() - filter_type = args.get("filtertype") - if filter_type == "date": - start_date = args.get("startdate") - end_date = args.get("enddate") - if not start_date or not end_date: - raise APIError("Error date", code=400) - - start_timestamp = datetime.utcfromtimestamp(datetime.combine(start_date, time.min).timestamp()) - end_timestamp = datetime.utcfromtimestamp(datetime.combine(end_date, time.max).timestamp()) - - start_block = Blocks.query.filter(Blocks.timestamp >= start_timestamp).order_by(Blocks.timestamp.asc()).first() - end_block = Blocks.query.filter(Blocks.timestamp <= end_timestamp).order_by(Blocks.timestamp.desc()).first() - - start_block_number = start_block.number if start_block else 0 - end_block_number = end_block.number if end_block else 0 - - else: - start_block_number = args.get("startblock") - end_block_number = args.get("endblock") - return start_block_number, end_block_number - - -def response_csv(data, filename, header): - si = io.StringIO() - cw = csv.DictWriter(si, fieldnames=header) - - if header: - cw.writeheader() - - cw.writerows(data) - output = Response(si.getvalue(), mimetype="text/csv") - output.headers["Content-Disposition"] = "attachment; filename={}.csv".format(filename) - output.headers["Content-type"] = "text/csv; charset=utf-8" - - return output - - -@explorer_namespace.route("/v1/explorer/export/transactions/
") -class ExplorerExportTransactions(Resource): - def get(self, address): - if not address or is_eth_address(address) is False: - raise APIError("Error Wallet Address", code=400) - address = address.lower() - address_bytes = hex_str_to_bytes(address) - - start_block_number, end_block_number = get_block_number_range() - - transactions = get_transactions_by_condition( - filter_condition=and_( - Transactions.block_number >= start_block_number, - Transactions.block_number <= end_block_number, - or_( - Transactions.from_address == address_bytes, - Transactions.to_address == address_bytes, - ), - ), - limit=5000, - ) - - header = [ - "blockNumber", - "timeStamp", - "hash", - "nonce", - "blockHash", - "transactionIndex", - "from", - "to", - "value", - "gas", - "gasPrice", - "isError", - "receiptStatus", - "contractAddress", - "cumulativeGasUsed", - "gasUsed", - "methodId", - ] - result = [ - { - "blockNumber": str(transaction.block_number), - "timeStamp": transaction.block_timestamp.strftime("%s"), - "hash": bytes_to_hex_str(transaction.hash), - "nonce": str(transaction.nonce), - "blockHash": bytes_to_hex_str(transaction.block_hash), - "transactionIndex": str(transaction.transaction_index), - "from": bytes_to_hex_str(transaction.from_address), - "to": bytes_to_hex_str(transaction.to_address), - "value": str(transaction.value), - "gas": str(transaction.gas), - "gasPrice": str(transaction.gas_price), - "isError": "0" if transaction.receipt_status == 1 else "1", - "receiptStatus": str(transaction.receipt_status), - "contractAddress": transaction.receipt_contract_address, - "cumulativeGasUsed": str(transaction.receipt_cumulative_gas_used), - "gasUsed": str(transaction.receipt_gas_used), - "methodId": bytes_to_hex_str(transaction.input)[0:10], - } - for transaction in transactions - ] - return response_csv( - result, - "transactions-{}-{}".format(address, datetime.now().strftime("%Y%m%d%H%M%S")), - header, - ) - - -@explorer_namespace.route("/v1/explorer/export/internal_transactions/
") -class ExplorerExportInternalTransactions(Resource): - def get(self, address): - if not address or is_eth_address(address) is False: - raise APIError("Error Wallet Address", code=400) - address = address.lower() - address_bytes = hex_str_to_bytes(address) - - start_block_number, end_block_number = get_block_number_range() - - internal_transactions = get_internal_transactions_by_condition( - filter_condition=and_( - ContractInternalTransactions.block_number >= start_block_number, - ContractInternalTransactions.block_number <= end_block_number, - or_( - ContractInternalTransactions.from_address == address_bytes, - ContractInternalTransactions.to_address == address_bytes, - ), - ), - limit=5000, - ) - header = [ - "blockNumber", - "timeStamp", - "hash", - "from", - "to", - "value", - "contractAddress", - "type", - "gas", - "traceId", - "isError", - "errCode", - ] - result = [ - { - "blockNumber": str(internal_transaction.block_number), - "timeStamp": internal_transaction.block_timestamp.strftime("%s"), - "hash": bytes_to_hex_str(internal_transaction.transaction_hash), - "from": bytes_to_hex_str(internal_transaction.from_address), - "to": bytes_to_hex_str(internal_transaction.to_address), - "value": str(internal_transaction.value), - "contractAddress": ( - bytes_to_hex_str(internal_transaction.to_address) - if internal_transaction.trace_type in ["create", "create2"] - else "" - ), - "type": internal_transaction.trace_type, - "gas": str(internal_transaction.gas), - "traceId": internal_transaction.trace_id, - "isError": "1" if internal_transaction.error == 0 else "0", - "errCode": internal_transaction.error, - } - for internal_transaction in internal_transactions - ] - return response_csv( - result, - "transactions-{}-{}".format(address, datetime.now().strftime("%Y%m%d%H%M%S")), - header, - ) - - -token_relationships = { - "ERC20": { - "TokenTable": Tokens, - "TokenTransferTable": ERC20TokenTransfers, - "TokenHoldersTable": CurrentTokenBalances, - }, - "ERC721": { - "TokenTable": Tokens, - "TokenTransferTable": ERC721TokenTransfers, - "TokenHoldersTable": CurrentTokenBalances, - }, - "ERC1155": { - "TokenTable": Tokens, - "TokenTransferTable": ERC1155TokenTransfers, - "TokenHoldersTable": CurrentTokenBalances, - }, -} - - -def token_transfers(contract_address, address, start_block_number, end_block_number, token_type): - TokenTable = token_relationships[token_type]["TokenTable"] - TokenTransferTable = token_relationships[token_type]["TokenTransferTable"] - condition = True - if contract_address: - contract_address = contract_address.lower() - contract_address_bytes = hex_str_to_bytes(contract_address) - - condition = and_(condition, TokenTransferTable.token_address == contract_address_bytes) - if address: - address = address.lower() - address_bytes = hex_str_to_bytes(address) - - condition = and_( - condition, - or_( - TokenTransferTable.from_address == address_bytes, - TokenTransferTable.to_address == address_bytes, - ), - ) - if address is None and contract_address is None: - raise APIError("Error address", code=400) - - transfers = ( - TokenTransferTable.query.filter( - and_( - condition, - TokenTransferTable.block_number >= start_block_number, - TokenTransferTable.block_number <= end_block_number, - ) - ) - .join( - Transactions, - TokenTransferTable.transaction_hash == Transactions.hash, - ) - .add_columns( - Transactions.nonce, - Transactions.gas, - Transactions.gas_price, - Transactions.receipt_gas_used, - Transactions.receipt_cumulative_gas_used, - Transactions.transaction_index, - Transactions.input, - ) - .order_by(TokenTransferTable.block_number.asc()) - .limit(5000) - ) - - token_addresses = set([transfer.token_address for transfer, _, _, _, _, _, _, _ in transfers]) - - tokens = TokenTable.query.filter(TokenTable.address.in_(token_addresses)).all() - token_dict = {token.address: token for token in tokens} - - result = [] - for ( - transfer, - nonce, - gas, - gas_price, - receipt_gas_used, - receipt_cumulative_gas_used, - transaction_index, - input, - ) in transfers: - transfer_data = { - "blockNumber": str(transfer.block_number), - "timeStamp": transfer.block_timestamp.strftime("%s"), - "hash": bytes_to_hex_str(transfer.transaction_hash), - "nonce": str(nonce), - "blockHash": bytes_to_hex_str(transfer.block_hash), - "contractAddress": bytes_to_hex_str(transfer.token_address), - "from": bytes_to_hex_str(transfer.from_address), - "to": bytes_to_hex_str(transfer.to_address), - "tokenName": token_dict.get(transfer.token_address).name, - "tokenSymbol": token_dict.get(transfer.token_address).symbol, - "transactionIndex": str(transaction_index), - "gas": str(gas), - "gasPrice": str(gas_price), - "gasUsed": str(receipt_gas_used), - "cumulativeGasUsed": str(receipt_cumulative_gas_used), - # 'input': 'deprecated', // TODO - # 'confirmations': str(transaction.confirmations), // TODO - } - if token_type == "ERC20": - transfer_data["value"] = str(transfer.value) - transfer_data["tokenDecimal"] = str(token_dict.get(transfer.token_address).decimals) - elif token_type == "ERC721": - transfer_data["tokenID"] = str(transfer.token_id) - elif token_type == "ERC1155": - transfer_data["tokenValue"] = str(transfer.value) - transfer_data["tokenID"] = str(transfer.token_id) - - result.append(transfer_data) - return result - - -def token_holder_list(contract_address, token_type): - contract_address = contract_address.lower() - - TokenHoldersTable = token_relationships[token_type]["TokenHoldersTable"] - - token = get_token_by_address(contract_address) - if token is None: - return [] - - token_holders = get_token_holders( - token_address=contract_address, - model=TokenHoldersTable, - columns=["wallet_address", "balance_of"], - limit=10000, - ) - - result = [ - { - "TokenHolderAddress": token_holder.wallet_address, - "TokenHolderQuantity": str(token_holder.balance_of), - } - for token_holder in token_holders - ] - - return result - - -@explorer_namespace.route("/v1/explorer/export/token_transfers") -class ExplorerExportTokenTransfer(Resource): - def get(self): - start_block_number, end_block_number = get_block_number_range() - args = parser.parse_args() - header = [ - "blockNumber", - "timeStamp", - "hash", - "nonce", - "blockHash", - "contractAddress", - "from", - "to", - "tokenName", - "tokenSymbol", - "transactionIndex", - "gas", - "gasPrice", - "gasUsed", - "cumulativeGasUsed", - "value", - "tokenDecimal", - ] - result = token_transfers( - args.get("contractaddress"), - args.get("address"), - start_block_number, - end_block_number, - "ERC20", - ) - - return response_csv( - result, - "erc20_token_transfers-{}".format(datetime.now().strftime("%Y%m%d%H%M%S")), - header, - ) - - -@explorer_namespace.route("/v1/explorer/export/nft_token_transfers") -class ExplorerExportNFTTokenTransfer(Resource): - def get(self): - start_block_number, end_block_number = get_block_number_range() - args = parser.parse_args() - header = [ - "blockNumber", - "timeStamp", - "hash", - "nonce", - "blockHash", - "contractAddress", - "from", - "to", - "tokenName", - "tokenSymbol", - "transactionIndex", - "gas", - "gasPrice", - "gasUsed", - "cumulativeGasUsed", - "tokenID", - ] - - result = token_transfers( - args.get("contractaddress"), - args.get("address"), - start_block_number, - end_block_number, - "ERC721", - ) - return response_csv( - result, - "erc721_token_transfers-{}".format(datetime.now().strftime("%Y%m%d%H%M%S")), - header, - ) - - -@explorer_namespace.route("/v1/explorer/export/nft1155_token_transfers") -class ExplorerExportNFT1155TokenTransfer(Resource): - def get(self): - start_block_number, end_block_number = get_block_number_range() - args = parser.parse_args() - header = [ - "blockNumber", - "timeStamp", - "hash", - "nonce", - "blockHash", - "contractAddress", - "from", - "to", - "tokenName", - "tokenSymbol", - "transactionIndex", - "gas", - "gasPrice", - "gasUsed", - "cumulativeGasUsed", - "tokenValue", - "tokenID", - ] - - result = token_transfers( - args.get("contractaddress"), - args.get("address"), - start_block_number, - end_block_number, - "ERC1155", - ) - return response_csv( - result, - "erc1155_token_transfers-{}".format(datetime.now().strftime("%Y%m%d%H%M%S")), - header, - ) - - -@explorer_namespace.route("/v1/explorer/export/token_holders/") -class ExplorerExportTokenHolders(Resource): - def get(self, contract_address): - if not contract_address or (contract_address and len(contract_address) != 42): - raise APIError("Error Wallet Address", code=400) - header = ["TokenHolderAddress", "TokenHolderQuantity"] - result = token_holder_list(contract_address, "ERC20") - return response_csv( - result, - "erc20_token_holders-{}".format(datetime.now().strftime("%Y%m%d%H%M%S")), - header, - ) - - -@explorer_namespace.route("/v1/explorer/export/nft_token_holders/") -class ExplorerExportNFTTokenHolders(Resource): - def get(self, contract_address): - if not contract_address or (contract_address and len(contract_address) != 42): - raise APIError("Error Wallet Address", code=400) - header = ["TokenHolderAddress", "TokenHolderQuantity"] - result = token_holder_list(contract_address, "ERC721") - return response_csv( - result, - "erc721_token_holders-{}".format(datetime.now().strftime("%Y%m%d%H%M%S")), - header, - ) - - -@explorer_namespace.route("/v1/explorer/export/nft1155_token_holders/") -class ExplorerExportNFT1155TokenHolders(Resource): - def get(self, contract_address): - if not contract_address or (contract_address and len(contract_address) != 42): - raise APIError("Error Wallet Address", code=400) - header = ["TokenHolderAddress", "TokenHolderQuantity"] - result = token_holder_list(contract_address, "ERC1155") - return response_csv( - result, - "erc1155_token_holders-{}".format(datetime.now().strftime("%Y%m%d%H%M%S")), - header, - ) diff --git a/hemera/api/app/l2_explorer/__init__.py b/hemera/api/app/l2_explorer/__init__.py deleted file mode 100644 index b4aaddaf5..000000000 --- a/hemera/api/app/l2_explorer/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -#!/usr/bin/python3 -# -*- coding: utf-8 -*- - -from flask_restx.namespace import Namespace - -l2_explorer_namespace = Namespace( - "Blockchain Explorer L2 Endpoint", - path="/", - description="Blockchain Explorer L2 API", -) diff --git a/hemera/api/app/l2_explorer/routes.py b/hemera/api/app/l2_explorer/routes.py deleted file mode 100644 index 70fa4c460..000000000 --- a/hemera/api/app/l2_explorer/routes.py +++ /dev/null @@ -1,332 +0,0 @@ -import binascii -import re -from operator import or_ - -from flask import request -from flask_restx import Resource -from sqlalchemy import and_, func - -from hemera.api.app.cache import cache -from hemera.api.app.l2_explorer import l2_explorer_namespace -from hemera.common.models import db as postgres_db -from hemera.common.models.tokens import Tokens -from hemera.common.utils.bridge_utils import BridgeTransactionParser -from hemera.common.utils.config import get_config -from hemera.common.utils.exception_control import APIError -from hemera.common.utils.format_utils import format_to_dict -from hemera.common.utils.web3_utils import is_eth_address -from hemera_udf.bridge.models.bridge import ( - BridgeTokens, - L1ToL2BridgeTransactions, - L2ToL1BridgeTransactions, - OpBedrockStateBatches, -) - -app_config = get_config() - -PAGE_SIZE = 25 -MAX_TRANSACTION = 500000 -MAX_TRANSACTION_WITH_CONDITION = 10000 -MAX_INTERNAL_TRANSACTION = 10000 -MAX_TOKEN_TRANSFER = 10000 - -bridge_transaction_parser = BridgeTransactionParser.init_from_config(get_config()) - - -def get_deposit_count_by_address(address): - address_bin = binascii.unhexlify(address[2:]) - recently_txn_count = ( - postgres_db.session.query(L1ToL2BridgeTransactions.l1_transaction_hash) - .filter( - and_( - L1ToL2BridgeTransactions.to_address == address_bin, - ) - ) - .count() - ) - total_count = recently_txn_count - return total_count - - -def get_withdraw_count_by_address(address): - address_bin = binascii.unhexlify(address[2:]) - recently_txn_count = ( - postgres_db.session.query(L2ToL1BridgeTransactions.l2_transaction_hash) - .filter( - L2ToL1BridgeTransactions.from_address == address_bin, - ) - .count() - ) - total_count = recently_txn_count - return total_count - - -@l2_explorer_namespace.route("/v2/explorer/l1_to_l2_transactions") -@l2_explorer_namespace.route("/v1/explorer/l1_to_l2_transactions") -class ExplorerL1ToL2BridgeTransactions(Resource): - @cache.cached(timeout=10, query_string=True) - def get(self): - page_index = int(request.args.get("page", 1)) - page_size = int(request.args.get("size", PAGE_SIZE)) - address = request.args.get("address", None) - type = request.args.get("type", None) - token_address = request.args.get("token_address", None) - - if page_index <= 0 or page_size <= 0: - return {"error": "Invalid page or size"}, 400 - - if address and is_eth_address(address) is False: - return {"error": "Invalid wallet address"}, 400 - - if page_index * page_size > MAX_INTERNAL_TRANSACTION: - return {"error": f"Showing the last {MAX_INTERNAL_TRANSACTION} records only"}, 400 - - query = postgres_db.session.query(L1ToL2BridgeTransactions).filter( - L1ToL2BridgeTransactions.l1_block_number != None - ) - - if type is not None and type.isdigit(): - query = query.filter(L1ToL2BridgeTransactions._type == int(type)) - - if address: - address_bin = binascii.unhexlify(address[2:]) - query = query.filter(L1ToL2BridgeTransactions.to_address == address_bin) - - if app_config.chain in ["taiko"] or app_config.l2_config.rollup_type == "arbitrum": - query = query.outerjoin( - BridgeTokens, - or_( - L1ToL2BridgeTransactions.l1_token_address == BridgeTokens.l1_token_address, - L1ToL2BridgeTransactions.l2_token_address == BridgeTokens.l2_token_address, - ), - ).with_entities( - L1ToL2BridgeTransactions.l1_block_number, - L1ToL2BridgeTransactions.l1_block_timestamp, - L1ToL2BridgeTransactions.l1_transaction_hash, - L1ToL2BridgeTransactions.l2_block_number, - L1ToL2BridgeTransactions.l2_block_timestamp, - L1ToL2BridgeTransactions.l2_transaction_hash, - L1ToL2BridgeTransactions.amount, - L1ToL2BridgeTransactions.from_address, - L1ToL2BridgeTransactions.to_address, - func.coalesce( - BridgeTokens.l1_token_address, - L1ToL2BridgeTransactions.l1_token_address, - ).label("l1_token_address"), - func.coalesce( - BridgeTokens.l2_token_address, - L1ToL2BridgeTransactions.l2_token_address, - ).label("l2_token_address"), - L1ToL2BridgeTransactions.extra_info, - L1ToL2BridgeTransactions._type, - L1ToL2BridgeTransactions.index, - L1ToL2BridgeTransactions.l1_block_hash, - L1ToL2BridgeTransactions.l2_block_hash, - L1ToL2BridgeTransactions.l1_from_address, - L1ToL2BridgeTransactions.l2_from_address, - ) - else: - query = query.with_entities( - L1ToL2BridgeTransactions.l1_block_number, - L1ToL2BridgeTransactions.l1_block_timestamp, - L1ToL2BridgeTransactions.l1_transaction_hash, - L1ToL2BridgeTransactions.l2_block_number, - L1ToL2BridgeTransactions.l2_block_timestamp, - L1ToL2BridgeTransactions.l2_transaction_hash, - L1ToL2BridgeTransactions.amount, - L1ToL2BridgeTransactions.from_address, - L1ToL2BridgeTransactions.to_address, - L1ToL2BridgeTransactions.l1_token_address, - L1ToL2BridgeTransactions.l2_token_address, - L1ToL2BridgeTransactions.extra_info, - L1ToL2BridgeTransactions._type, - L1ToL2BridgeTransactions.index, - L1ToL2BridgeTransactions.l1_block_hash, - L1ToL2BridgeTransactions.l2_block_hash, - L1ToL2BridgeTransactions.l1_from_address, - L1ToL2BridgeTransactions.l2_from_address, - ) - - if token_address: - if not re.match(r"^0x[a-fA-F0-9]{40}$", token_address): - raise APIError("Invalid wallet address", code=400) - if token_address.lower() == "0xdeaddeaddeaddeaddeaddeaddeaddeaddead1111": - query = query.filter(L1ToL2BridgeTransactions.l2_token_address == None) - else: - token_address_bin = binascii.unhexlify(token_address[2:]) - query = query.filter(L1ToL2BridgeTransactions.l2_token_address == token_address_bin) - - transactions = ( - query.order_by(L1ToL2BridgeTransactions.l1_block_number.desc()) - .limit(page_size) - .offset((page_index - 1) * page_size) - .all() - ) - - token_addresses = {transaction.l2_token_address for transaction in transactions} - tokens = postgres_db.session.query(Tokens).filter(Tokens.address.in_(token_addresses)).all() - bridge_transaction_parser.complete_format_tokens(tokens) - token_info_dict = {token.address: token for token in tokens} - - transaction_list = [] - for transaction in transactions: - transaction_list.append( - bridge_transaction_parser.parse_bridge_l1_to_l2_transaction( - format_to_dict(transaction), - format_to_dict(token_info_dict.get(transaction.l2_token_address)), - ) - ) - if token_address is None and address is None and type is None: - total_records = L1ToL2BridgeTransactions.query.count() - elif token_address is None and address and type is not None: - total_records = get_deposit_count_by_address(address) - else: - total_records = query.count() - - response = { - "data": transaction_list, - "total": total_records, - "max_display": min(total_records, MAX_INTERNAL_TRANSACTION), - "page": page_index, - "size": page_size, - } - return response, 200 - - -@l2_explorer_namespace.route("/v2/explorer/l2_to_l1_transactions") -@l2_explorer_namespace.route("/v1/explorer/l2_to_l1_transactions") -class ExplorerL2oL1Transactions(Resource): - @cache.cached(timeout=10, query_string=True) - def get(self): - page_index = int(request.args.get("page", 1)) - page_size = int(request.args.get("size", PAGE_SIZE)) - address = request.args.get("address", None) - type = request.args.get("type", None) - token_address = request.args.get("token_address", None) - - if page_index <= 0 or page_size <= 0: - return {"error": "Invalid page or size"}, 400 - - if page_index * page_size > MAX_INTERNAL_TRANSACTION: - return {"error": f"Showing the last {MAX_INTERNAL_TRANSACTION} records only"}, 400 - - query = postgres_db.session.query(L2ToL1BridgeTransactions).filter( - L2ToL1BridgeTransactions.l2_block_number != None - ) - - if address: - address_bin = binascii.unhexlify(address[2:]) - query = query.filter(L2ToL1BridgeTransactions.to_address == address_bin) - if app_config.chain == "taiko" or app_config.chain == "arbitrum": - query = query.outerjoin( - BridgeTokens, - or_( - L2ToL1BridgeTransactions.l1_token_address == BridgeTokens.l1_token_address, - L2ToL1BridgeTransactions.l2_token_address == BridgeTokens.l2_token_address, - ), - ).with_entities( - L2ToL1BridgeTransactions.l1_block_number, - L2ToL1BridgeTransactions.l1_block_timestamp, - L2ToL1BridgeTransactions.l1_transaction_hash, - L2ToL1BridgeTransactions.l2_block_number, - L2ToL1BridgeTransactions.l2_block_timestamp, - L2ToL1BridgeTransactions.l2_transaction_hash, - L2ToL1BridgeTransactions.amount, - L2ToL1BridgeTransactions.from_address, - L2ToL1BridgeTransactions.to_address, - func.coalesce( - BridgeTokens.l1_token_address, - L2ToL1BridgeTransactions.l1_token_address, - ).label("l1_token_address"), - func.coalesce( - BridgeTokens.l2_token_address, - L2ToL1BridgeTransactions.l2_token_address, - ).label("l2_token_address"), - L2ToL1BridgeTransactions.extra_info, - L2ToL1BridgeTransactions.l1_proven_transaction_hash, - L2ToL1BridgeTransactions.l1_proven_block_number, - L2ToL1BridgeTransactions.l1_proven_block_timestamp, - L2ToL1BridgeTransactions._type, - L2ToL1BridgeTransactions.index, - L2ToL1BridgeTransactions.l1_block_hash, - L2ToL1BridgeTransactions.l2_block_hash, - L2ToL1BridgeTransactions.l1_from_address, - L2ToL1BridgeTransactions.l2_from_address, - ) - else: - query = query.with_entities( - L2ToL1BridgeTransactions.l1_block_number, - L2ToL1BridgeTransactions.l1_block_timestamp, - L2ToL1BridgeTransactions.l1_transaction_hash, - L2ToL1BridgeTransactions.l2_block_number, - L2ToL1BridgeTransactions.l2_block_timestamp, - L2ToL1BridgeTransactions.l2_transaction_hash, - L2ToL1BridgeTransactions.amount, - L2ToL1BridgeTransactions.from_address, - L2ToL1BridgeTransactions.to_address, - L2ToL1BridgeTransactions.l1_token_address, - L2ToL1BridgeTransactions.l2_token_address, - L2ToL1BridgeTransactions.extra_info, - L2ToL1BridgeTransactions.l1_proven_transaction_hash, - L2ToL1BridgeTransactions.l1_proven_block_number, - L2ToL1BridgeTransactions.l1_proven_block_timestamp, - L2ToL1BridgeTransactions._type, - L2ToL1BridgeTransactions.index, - L2ToL1BridgeTransactions.l1_block_hash, - L2ToL1BridgeTransactions.l2_block_hash, - L2ToL1BridgeTransactions.l1_from_address, - L2ToL1BridgeTransactions.l2_from_address, - ) - - if token_address: - if not re.match(r"^0x[a-fA-F0-9]{40}$", token_address): - raise APIError("Invalid wallet address", code=400) - if token_address.lower() == "0xdeaddeaddeaddeaddeaddeaddeaddeaddead1111": - query = query.filter(L2ToL1BridgeTransactions.l2_token_address == None) - else: - token_address_bin = binascii.unhexlify(token_address[2:]) - query = query.filter(L2ToL1BridgeTransactions.l2_token_address == token_address_bin) - - transactions = ( - query.order_by(L2ToL1BridgeTransactions.l2_block_number.desc()) - .limit(page_size) - .offset((page_index - 1) * page_size) - .all() - ) - - token_addresses = {transaction.l2_token_address for transaction in transactions} - tokens = postgres_db.session.query(Tokens).filter(Tokens.address.in_(token_addresses)).all() - bridge_transaction_parser.complete_format_tokens(tokens) - token_info_dict = {token.address: token for token in tokens} - - transaction_list = [] - - finalized_block_number = ( - postgres_db.session.query(OpBedrockStateBatches.end_block_number) - .order_by(OpBedrockStateBatches.batch_index.desc()) - .first() - ) - finalized_block_number = finalized_block_number[0] if finalized_block_number else None - for transaction in transactions: - transaction_list.append( - bridge_transaction_parser.parse_bridge_l2_to_l1_transaction( - format_to_dict(transaction), - format_to_dict(token_info_dict.get(transaction.l2_token_address)), - finalized_block_number, - ) - ) - - if token_address is None and address is None and type is None: - total_records = L2ToL1BridgeTransactions.query.count() - elif token_address is None and address and type is not None: - total_records = get_withdraw_count_by_address(address) - else: - total_records = query.count() - response = { - "data": transaction_list, - "total": total_records, - "max_display": min(total_records, MAX_INTERNAL_TRANSACTION), - "page": page_index, - "size": page_size, - } - return response, 200 diff --git a/hemera/api/app/limiter.py b/hemera/api/app/limiter.py deleted file mode 100644 index 3039dec49..000000000 --- a/hemera/api/app/limiter.py +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/python3 -# -*- coding: utf-8 -*- - -from flask import request -from flask_limiter import Limiter - - -def get_real_ip() -> str: - remote_address = request.remote_addr - forward_address = request.headers.get("gateway-forwarded-ip") - # current_app.logger.info(f"remote_address: {remote_address}") - # current_app.logger.info(f"gateway-forwarded-ip: {forward_address}") - # if forward_address: - # remote_address = forward_address - # current_app.logger.info(f"remote_address: {remote_address}") - return forward_address or remote_address - - -# https://flask-limiter.readthedocs.io/en/stable/index.html -limiter = Limiter( - key_func=get_real_ip, - default_limits=["1800 per hour", "180 per minute"], - storage_uri="memory://", -) diff --git a/hemera/api/app/main.py b/hemera/api/app/main.py deleted file mode 100644 index 7ebcdd37e..000000000 --- a/hemera/api/app/main.py +++ /dev/null @@ -1,127 +0,0 @@ -#!/usr/bin/python3 -# -*- coding: utf-8 -*- - -import flask -from flask import Flask, request -from flask_cors import CORS - -from hemera.api.app.cache import cache, redis_db -from hemera.api.app.limiter import limiter -from hemera.common.models import db -from hemera.common.utils.config import get_config -from hemera.common.utils.exception_control import APIError - -# from app.serializing import ma - -config = get_config() - -import logging -import os - -# logging.getLogger("sqlalchemy.pool").setLevel(logging.DEBUG) - -app = Flask(__name__) -# Get the log level from the environment variable, default to WARNING if not set -log_level = os.environ.get("LOG_LEVEL", "INFO").upper() - -# Convert the string log level to the corresponding numeric value -numeric_level = getattr(logging, log_level, None) -if not isinstance(numeric_level, int): - raise ValueError("Invalid log level: %s" % log_level) - -app.logger.setLevel(numeric_level) -# Init database -app.config["SQLALCHEMY_DATABASE_URI"] = config.db_read_sql_alchemy_database_config.get_sql_alchemy_uri() -app.config["SQLALCHEMY_BINDS"] = { - "common": config.db_common_sql_alchemy_database_config.get_sql_alchemy_uri(), - "write": config.db_write_sql_alchemy_database_config.get_sql_alchemy_uri(), -} -app.config.update( - { - "MAX_CONTENT_LENGTH": 1024 * 1024 * 1024, - "SQLALCHEMY_TRACK_MODIFICATIONS": False, - "SQLALCHEMY_ENGINE_OPTIONS": { - "pool_size": 100, - "max_overflow": 100, - }, - } -) - -db.init_app(app) - -# Add API Namespace -from hemera.api.app.api import api - -api.init_app(app) - -# Init cache -cache.init_app(app, config.cache_config.get_cache_config(redis_db)) - -# Rate limit -limiter.init_app(app) - -# ma.init_app(app) -CORS(app) -# Note: A secret key is included in the sample so that it works. -# If you use this code in your application, replace this with a truly secret -# key. See https://flask.palletsprojects.com/quickstart/#sessions. -app.secret_key = "a330c710ea827a698cf64dba73d99080b1bc38aaeedb37967ed840679a6a11c7" - - -@app.errorhandler(APIError) -def handle_exception(err): - """Return custom JSON when APIError or its children are raised""" - app.logger.error(f"New Error: {err.code}: {err.message}") - return flask.jsonify(err.to_dict()), err.code - - -@api.errorhandler(APIError) -def handle_exception(err): - """Return custom JSON when APIError or its children are raised""" - app.logger.error(f"API Error: {err.code}: {err.message}") - return err.to_dict(), err.code - - -# @app.errorhandler(GrpcError) -# def handle_exception(err): -# """Return custom JSON when Grpc or its children are raised""" -# app.logger.error(f"Grpc Error: {err.code}: {err.message}") -# return err.to_dict(), err.code - - -@app.errorhandler(500) -def handle_exception(err): - """Return JSON instead of HTML for any other server error""" - app.logger.error(f"Unknown Exception: {str(err)}") - response = { - "code": 500, - "message": "Sorry, that error is on us, please contact support if this wasn't an accident", - } - return flask.jsonify(response), 500 - - -@app.errorhandler(429) -def ratelimit_handler(e): - response = {"code": 429, "message": f"ratelimit exceeded {e.description}"} - return flask.jsonify(response), 500 - - -def _build_cors_prelight_response(): - response = flask.make_response() - response.headers.add("Access-Control-Allow-Origin", "*") - response.headers.add("Access-Control-Allow-Headers", "*") - response.headers.add("Access-Control-Allow-Methods", "*") - return response - - -@app.before_request -def hook(): - if request.method == "OPTIONS": # CORS preflight - return _build_cors_prelight_response() - - -@app.after_request -def inject_identifying_headers(response): - if flask.session.get("user_id"): - response.headers["X-User-Id"] = flask.session.get("user_id") - return response diff --git a/hemera/api/app/user_operation/__init__.py b/hemera/api/app/user_operation/__init__.py deleted file mode 100644 index ed806a4b4..000000000 --- a/hemera/api/app/user_operation/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/python3 -# -*- coding: utf-8 -*- - -from flask_restx.namespace import Namespace - -user_operation_namespace = Namespace("User Operation Namespace", path="/", description="User Operation API") diff --git a/hemera/api/app/user_operation/routes.py b/hemera/api/app/user_operation/routes.py deleted file mode 100644 index a926c43ee..000000000 --- a/hemera/api/app/user_operation/routes.py +++ /dev/null @@ -1,343 +0,0 @@ -import re - -import flask -from flask_restx import Resource - -from hemera.api.app.cache import cache -from hemera.api.app.user_operation import user_operation_namespace -from hemera.api.app.utils.fill_info import fill_address_display_to_transactions, process_token_transfer -from hemera.api.app.utils.parse_utils import parse_log_with_transaction_input_list -from hemera.common.models import db -from hemera.common.models.erc20_token_transfers import ERC20TokenTransfers -from hemera.common.models.erc721_token_transfers import ERC721TokenTransfers -from hemera.common.models.erc1155_token_transfers import ERC1155TokenTransfers -from hemera.common.models.logs import Logs -from hemera.common.models.tokens import Tokens -from hemera.common.models.transactions import Transactions -from hemera.common.utils.config import get_config -from hemera.common.utils.db_utils import get_total_row_count -from hemera.common.utils.exception_control import APIError -from hemera.common.utils.format_utils import format_value_for_json, hex_str_to_bytes -from hemera_udf.user_ops.models.user_operation_results import UserOperationResult - -PAGE_SIZE = 25 -MAX_TRANSACTION = 500000 -MAX_TRANSACTION_WITH_CONDITION = 10000 -MAX_INTERNAL_TRANSACTION = 10000 -MAX_TOKEN_TRANSFER = 10000 - -app_config = get_config() - - -@user_operation_namespace.route("/v1/explorer/ops") -class ExplorerUserOperations(Resource): - @cache.cached(timeout=10, query_string=True) - def get(self): - page_index = int(flask.request.args.get("page", 1)) - page_size = int(flask.request.args.get("size", 25)) - - size = (page_index - 1) * page_size - start_item = size + 1 - if start_item > MAX_TRANSACTION: - return {"error": f"The requested data range exceeds the maximum({MAX_TRANSACTION}) allowed."}, 400 - - sender = flask.request.args.get("sender") - - condition = True - if sender: - if not re.match(r"^0x[a-fA-F0-9]{40}$", sender): - raise APIError("Invalid wallet address", code=400) - condition = UserOperationResult.sender == sender - - user_operation_results = ( - db.session.query(UserOperationResult) - .filter(condition) - .order_by( - UserOperationResult.block_number.desc(), - ) - .limit(page_size) - .offset(size) - .all() - ) - - if sender: - total_count = db.session.query(UserOperationResult).filter(condition).count() - else: - total_count = get_total_row_count("user_operations_results") - - if not user_operation_results: - raise APIError("There are not any user operations", code=400) - - user_operation_result_list = [] - for user_operation_result in user_operation_results: - user_operation_result_dict = {} - user_operation_result_dict["user_op_hash"] = user_operation_result.user_op_hash - user_operation_result_dict["block_timestamp"] = user_operation_result.block_timestamp - user_operation_result_dict["status"] = user_operation_result.status - user_operation_result_dict["sender"] = user_operation_result.sender - user_operation_result_dict["transactions_hash"] = user_operation_result.transactions_hash - user_operation_result_dict["block_number"] = user_operation_result.block_number - wei_amount = user_operation_result.actual_gas_cost - formatted_eth = format(wei_amount / 10**18, ".10f") - user_operation_result_dict["fee"] = formatted_eth - - user_operation_result_list.append( - {k: format_value_for_json(v) for k, v in user_operation_result_dict.items()} - ) - - return { - "data": user_operation_result_list, - "total": total_count, - "max_display": min( - MAX_TRANSACTION, - total_count, - ), - "page": page_index, - "size": page_size, - }, 200 - - -@user_operation_namespace.route("/v1/explorer/op/") -class ExplorerUserOperationDetails(Resource): - @cache.cached(timeout=60, query_string=True) - def get(self, hash): - # parameter validated - if not re.match(r"^0x[a-fA-F0-9]{64}$", hash): - raise APIError("Invalid user operation hash", code=400) - - bytes_hash = hex_str_to_bytes(hash) - - user_operation_result = db.session.query(UserOperationResult).get(bytes_hash) - if not user_operation_result: - raise APIError("Cannot find user operation with hash", code=400) - - user_operation_result_dict = {} - user_operation_result_dict["user_op_hash"] = user_operation_result.user_op_hash - user_operation_result_dict["sender"] = user_operation_result.sender - user_operation_result_dict["status"] = user_operation_result.status - user_operation_result_dict["block_timestamp"] = user_operation_result.block_timestamp - user_operation_result_dict["fee"] = format(user_operation_result.actual_gas_cost / 10**18, ".10f") - - user_operation_result_dict["gas_limit"] = ( - user_operation_result.call_gas_limit - + user_operation_result.verification_gas_limit - + user_operation_result.pre_verification_gas - ) - user_operation_result_dict["gas_used"] = user_operation_result.actual_gas_used - user_operation_result_dict["transactions_hash"] = user_operation_result.transactions_hash - user_operation_result_dict["block_number"] = user_operation_result.block_number - user_operation_result_dict["user_op_hash"] = user_operation_result.user_op_hash - user_operation_result_dict["entry_point"] = ( - "0x5ff137d4b0fdcd49dca30c7cf57e578a026d2789" # todo: maybe there will be some new contract address? - ) - user_operation_result_dict["call_gas_limit"] = user_operation_result.call_gas_limit - user_operation_result_dict["verification_gas_limit"] = user_operation_result.verification_gas_limit - user_operation_result_dict["pre_verification_gas"] = user_operation_result.pre_verification_gas - user_operation_result_dict["max_fee_per_gas"] = user_operation_result.max_fee_per_gas - user_operation_result_dict["max_priority_fee_per_gas"] = user_operation_result.max_priority_fee_per_gas - user_operation_result_dict["bundler"] = user_operation_result.bundler - user_operation_result_dict["paymaster"] = user_operation_result.paymaster - user_operation_result_dict["sponsor_type"] = ( - 1 if user_operation_result.paymaster != "0x0000000000000000000000000000000000000000" else 0 - ) # todo: add more types - - user_operation_result_dict["signature"] = user_operation_result.signature - user_operation_result_dict["nonce"] = str(user_operation_result.nonce) - user_operation_result_dict["call_data"] = user_operation_result.call_data - - result_json = {k: format_value_for_json(v) for k, v in user_operation_result_dict.items()} - return result_json, 200 - - -@user_operation_namespace.route("/v1/explorer/op//token-transfers") -class ExplorerUserOperationTokenTransfers(Resource): - @cache.cached(timeout=60, query_string=True) - def get(self, hash): - if not re.match(r"^0x[a-fA-F0-9]{64}$", hash): - raise APIError("Invalid user operation hash", code=400) - - bytes_hash = hex_str_to_bytes(hash) - - user_operation_result = ( - db.session.query(UserOperationResult) - .filter_by(user_op_hash=bytes_hash) - .with_entities( - UserOperationResult.transactions_hash, - UserOperationResult.start_log_index, - UserOperationResult.end_log_index, - ) - .first() - ) - - erc20_token_transfers = ( - db.session.query(ERC20TokenTransfers) - .filter(ERC20TokenTransfers.transaction_hash == user_operation_result.transactions_hash) - .filter( - (ERC20TokenTransfers.log_index > user_operation_result.start_log_index) - & (ERC20TokenTransfers.log_index < user_operation_result.end_log_index) - ) - .join( - Tokens, # not sure Erc20Tokens - ERC20TokenTransfers.token_address == Tokens.address, - ) - .add_columns( - Tokens.name, - Tokens.symbol, - Tokens.decimals, - Tokens.icon_url, - ) - .all() - ) - - erc721_token_transfers = ( - db.session.query(ERC721TokenTransfers) - .filter(ERC721TokenTransfers.transaction_hash == user_operation_result.transactions_hash) - .filter( - (ERC721TokenTransfers.log_index > user_operation_result.start_log_index) - & (ERC721TokenTransfers.log_index < user_operation_result.end_log_index) - ) - .join( - Tokens, # Erc721Tokens - ERC721TokenTransfers.token_address == Tokens.address, - ) - .add_columns( - Tokens.name, - Tokens.symbol, - ) - .all() - ) - - # ERC1155 - erc1155_token_transfers = ( - db.session.query(ERC1155TokenTransfers) - .filter(ERC1155TokenTransfers.transaction_hash == user_operation_result.transactions_hash) - .filter( - (ERC1155TokenTransfers.log_index > user_operation_result.start_log_index) - & (ERC1155TokenTransfers.log_index < user_operation_result.end_log_index) - ) - .join( - Tokens, # Erc1155Tokens - ERC1155TokenTransfers.token_address == Tokens.address, - ) - .add_columns( - Tokens.name, - Tokens.symbol, - ) - .all() - ) - - token_transfer_list = [] - token_transfer_list.extend(process_token_transfer(erc20_token_transfers, "tokentxns")) - token_transfer_list.extend(process_token_transfer(erc721_token_transfers, "tokentxns-nft")) - token_transfer_list.extend(process_token_transfer(erc1155_token_transfers, "tokentxns-nft1155")) - fill_address_display_to_transactions(token_transfer_list) - return { - "total": len(token_transfer_list), - "data": token_transfer_list, - }, 200 - - -@user_operation_namespace.route("/v1/explorer/op//logs") -class ExplorerUserOperationLogs(Resource): - @cache.cached(timeout=60, query_string=True) - def get(self, hash): - if not re.match(r"^0x[a-fA-F0-9]{64}$", hash): - raise APIError("Invalid user operation hash", code=400) - - bytes_hash = hex_str_to_bytes(hash) - - user_operation_result = ( - db.session.query(UserOperationResult) - .filter_by(user_op_hash=bytes_hash) - .with_entities( - UserOperationResult.transactions_hash, - UserOperationResult.start_log_index, - UserOperationResult.end_log_index, - ) - .first() - ) - - logs = ( - db.session.query(Logs) - .filter(Logs.transaction_hash == user_operation_result.transactions_hash) - .filter( - (Logs.log_index > user_operation_result.start_log_index) - & (Logs.log_index < user_operation_result.end_log_index) - ) - .join(Transactions, Logs.transaction_hash == Transactions.hash) - .add_columns(Transactions.input) - .all() - ) - log_list = parse_log_with_transaction_input_list(logs) - - return {"total": len(log_list), "data": log_list}, 200 - - -@user_operation_namespace.route("/v1/explorer/op//raw") -class ExplorerUserOperationRaw(Resource): - @cache.cached(timeout=60, query_string=True) - def get(self, hash): - if not re.match(r"^0x[a-fA-F0-9]{64}$", hash): - raise APIError("Invalid user operation hash", code=400) - bytes_hash = hex_str_to_bytes(hash) - - user_operation_result = db.session.query(UserOperationResult).get(bytes_hash) - if not user_operation_result: - raise APIError("Cannot find user operation with hash", code=400) - - user_operation_result_dict = {} - user_operation_result_dict["sender"] = user_operation_result.sender - user_operation_result_dict["nonce"] = str(user_operation_result.nonce) - user_operation_result_dict["init_code"] = user_operation_result.init_code - user_operation_result_dict["call_data"] = user_operation_result.call_data - user_operation_result_dict["call_gas_limit"] = user_operation_result.call_gas_limit - user_operation_result_dict["verification_gas_limit"] = user_operation_result.verification_gas_limit - user_operation_result_dict["pre_verification_gas"] = user_operation_result.pre_verification_gas - user_operation_result_dict["max_fee_per_gas"] = user_operation_result.max_fee_per_gas - user_operation_result_dict["max_priority_fee_per_gas"] = user_operation_result.max_priority_fee_per_gas - user_operation_result_dict["paymaster_and_data"] = user_operation_result.paymaster_and_data - user_operation_result_dict["signature"] = user_operation_result.signature - result_json = {k: format_value_for_json(v) for k, v in user_operation_result_dict.items()} - return result_json, 200 - - -@user_operation_namespace.route("/v1/explorer/transaction//ops") -class ExplorerTransactionOperation(Resource): - @cache.cached(timeout=360, query_string=True) - def get(self, txn_hash): - if not re.match(r"^0x[a-fA-F0-9]{64}$", txn_hash): - raise APIError("Invalid user operation hash", code=400) - - bytes_hash = hex_str_to_bytes(txn_hash) - - user_operation_result = db.session.query(UserOperationResult).filter_by(transactions_hash=bytes_hash) - if not user_operation_result: - raise APIError("Cannot find user operation with hash", code=400) - - user_operation_result_list = [] - for user_operation_result in user_operation_result: - user_operation_result_dict = {} - user_operation_result_dict["user_op_hash"] = user_operation_result.user_op_hash - user_operation_result_dict["block_timestamp"] = user_operation_result.block_timestamp - user_operation_result_dict["status"] = user_operation_result.status - user_operation_result_dict["sender"] = user_operation_result.sender - user_operation_result_dict["transactions_hash"] = user_operation_result.transactions_hash - user_operation_result_dict["block_number"] = user_operation_result.block_number - wei_amount = user_operation_result.actual_gas_cost - formatted_eth = format(wei_amount / 10**18, ".10f") - user_operation_result_dict["fee"] = formatted_eth - user_operation_result_dict["nonce"] = str(user_operation_result.nonce) - user_operation_result_dict["init_code"] = user_operation_result.init_code - user_operation_result_dict["call_data"] = user_operation_result.call_data - user_operation_result_dict["call_gas_limit"] = user_operation_result.call_gas_limit - user_operation_result_dict["verification_gas_limit"] = user_operation_result.verification_gas_limit - user_operation_result_dict["pre_verification_gas"] = user_operation_result.pre_verification_gas - user_operation_result_dict["max_fee_per_gas"] = user_operation_result.max_fee_per_gas - user_operation_result_dict["max_priority_fee_per_gas"] = user_operation_result.max_priority_fee_per_gas - user_operation_result_dict["paymaster_and_data"] = user_operation_result.paymaster_and_data - user_operation_result_dict["signature"] = user_operation_result.signature - - user_operation_result_list.append( - {k: format_value_for_json(v) for k, v in user_operation_result_dict.items()} - ) - return user_operation_result_list, 200 diff --git a/hemera/api/app/utils/fill_info.py b/hemera/api/app/utils/fill_info.py deleted file mode 100644 index f2ff4613b..000000000 --- a/hemera/api/app/utils/fill_info.py +++ /dev/null @@ -1,76 +0,0 @@ -from hemera.api.app.db_service.contracts import get_contracts_by_addresses -from hemera.api.app.db_service.wallet_addresses import get_address_display_mapping -from hemera.common.utils.format_utils import format_to_dict, hex_str_to_bytes - - -def fill_address_display_to_logs(log_list, all_address_list=None): - if not all_address_list: - all_address_list = [] - for log in log_list: - all_address_list.append(hex_str_to_bytes(log["address"])) - - address_map = get_address_display_mapping(all_address_list) - for log in log_list: - if log["address"] in address_map: - log["address_display_name"] = address_map[log["address"]] - - -def fill_is_contract_to_transactions(transaction_list: list[dict], bytea_address_list: list[bytes] = None): - if not bytea_address_list: - bytea_address_list = [] - for transaction in transaction_list: - bytea_address_list.append(hex_str_to_bytes(transaction["from_address"])) - bytea_address_list.append(hex_str_to_bytes(transaction["to_address"])) - - contracts = get_contracts_by_addresses(address_list=bytea_address_list, columns=["address"]) - contract_list = set(map(lambda x: x.address, contracts)) - - for transaction_json in transaction_list: - if transaction_json["to_address"] in contract_list: - transaction_json["to_address_is_contract"] = True - if transaction_json["from_address"] in contract_list: - transaction_json["from_address_is_contract"] = True - - -def fill_address_display_to_transactions(transaction_list: list[dict], bytea_address_list: list[bytes] = None): - if not bytea_address_list: - bytea_address_list = [] - for transaction in transaction_list: - bytea_address_list.append(hex_str_to_bytes(transaction["from_address"])) - bytea_address_list.append(hex_str_to_bytes(transaction["to_address"])) - - address_map = get_address_display_mapping(bytea_address_list) - - for transaction_json in transaction_list: - if transaction_json["from_address"] in address_map: - transaction_json["from_address_display_name"] = address_map[transaction_json["from_address"]] - else: - transaction_json["from_address_display_name"] = transaction_json["from_address"] - - if transaction_json["to_address"] in address_map: - transaction_json["to_address_display_name"] = address_map[transaction_json["to_address"]] - else: - transaction_json["to_address_display_name"] = transaction_json["to_address"] - - -def process_token_transfer(token_transfers, token_type): - token_transfer_list = [] - for token_transfer in token_transfers: - token_transfer_json = format_to_dict(token_transfer) - token_transfer_json["type"] = token_type - token_transfer_json["token_symbol"] = token_transfer.symbol or "UNKNOWN" - token_transfer_json["token_name"] = token_transfer.name or "Unknown Token" - - if token_type == "tokentxns": - token_transfer_json["value"] = ( - "{0:.18f}".format(token_transfer.value / 10 ** (token_transfer.decimals or 18)).rstrip("0").rstrip(".") - ) - token_transfer_json["token_logo_url"] = token_transfer.icon_url or None - else: - token_transfer_json["token_id"] = "{:f}".format(token_transfer.token_id) - token_transfer_json["token_logo_url"] = None - if token_type == "tokentxns-nft1155": - token_transfer_json["value"] = "{:f}".format(token_transfer.value) - - token_transfer_list.append(token_transfer_json) - return token_transfer_list diff --git a/hemera/api/app/utils/format_utils.py b/hemera/api/app/utils/format_utils.py deleted file mode 100644 index a0dff8223..000000000 --- a/hemera/api/app/utils/format_utils.py +++ /dev/null @@ -1,83 +0,0 @@ -import copy - - -def format_transaction(GAS_FEE_TOKEN_PRICE, transaction: dict): - transaction_json = copy.copy(transaction) - transaction_json["gas_fee_token_price"] = "{0:.2f}".format(GAS_FEE_TOKEN_PRICE) - - transaction_json["value"] = format_coin_value(int(transaction["value"])) - transaction_json["value_dollar"] = "{0:.2f}".format(transaction["value"] * GAS_FEE_TOKEN_PRICE / 10**18) - - gas_price = transaction["gas_price"] or 0 - transaction_json["gas_price_gwei"] = "{0:.6f}".format(gas_price / 10**9).rstrip("0").rstrip(".") - transaction_json["gas_price"] = "{0:.15f}".format(gas_price / 10**18).rstrip("0").rstrip(".") - - transaction_fee = gas_price * transaction["receipt_gas_used"] - total_transaction_fee = gas_price * transaction["receipt_gas_used"] - - if "receipt_l1_fee" in transaction_json and transaction_json["receipt_l1_fee"]: - transaction_json["receipt_l1_fee"] = ( - "{0:.15f}".format(transaction["receipt_l1_fee"] or 0 / 10**18).rstrip("0").rstrip(".") - ) - transaction_json["receipt_l1_gas_price"] = ( - "{0:.15f}".format(transaction["receipt_l1_gas_price"] or 0 / 10**18).rstrip("0").rstrip(".") - ) - transaction_json["receipt_l1_gas_price_gwei"] = ( - "{0:.6f}".format(transaction["receipt_l1_gas_price"] or 0 / 10**9).rstrip("0").rstrip(".") - ) - - total_transaction_fee = transaction_fee + transaction["receipt_l1_fee"] - transaction_json["transaction_fee"] = "{0:.15f}".format(transaction_fee / 10**18).rstrip("0").rstrip(".") - transaction_json["transaction_fee_dollar"] = "{0:.2f}".format( - gas_price * GAS_FEE_TOKEN_PRICE * transaction["receipt_gas_used"] / 10**18 - ) - - transaction_json["total_transaction_fee"] = ( - "{0:.15f}".format(total_transaction_fee / 10**18).rstrip("0").rstrip(".") - ) - transaction_json["total_transaction_fee_dollar"] = "{0:.2f}".format( - total_transaction_fee * GAS_FEE_TOKEN_PRICE / 10**18 - ) - return transaction_json - - -def format_dollar_value(value: float) -> str: - """ """ - if value > 1: - return "{0:.2f}".format(value) - return "{0:.6}".format(value) - - -def format_coin_value(value: int, decimal: int = 18) -> str: - """ - Formats a given integer value into a string that represents a token value. - Parameters: - value (int): The value to be formatted - - Returns: - str: The formatted token value as a string. - """ - if value < 1000: - return str(value) - else: - return "{0:.15f}".format(value / 10**18).rstrip("0").rstrip(".") - - -def format_coin_value_with_unit(value: int, native_token: str) -> str: - """ - Formats a given integer value into a string that represents a token value with the appropriate unit. - For values below 1000, it returns the value in WEI. - For higher values, it converts the value to a floating-point representation in the native token unit, - stripping unnecessary zeros. - - Parameters: - value (int): The value to be formatted, typically representing a token amount in WEI. - native_token (str): - - Returns: - str: The formatted token value as a string with the appropriate unit. - """ - if value < 1000: - return str(value) + " WEI" - else: - return "{0:.15f}".format(value / 10**18).rstrip("0").rstrip(".") + " " + native_token diff --git a/hemera/api/app/utils/parse_utils.py b/hemera/api/app/utils/parse_utils.py deleted file mode 100644 index 2ce4987d3..000000000 --- a/hemera/api/app/utils/parse_utils.py +++ /dev/null @@ -1,285 +0,0 @@ -import json -import re -from datetime import datetime - -from flask import current_app -from web3 import Web3 - -from hemera.api.app.contract.contract_verify import get_abis_for_logs, get_names_from_method_or_topic_list -from hemera.api.app.db_service.contracts import get_contracts_by_addresses -from hemera.api.app.db_service.tokens import get_token_by_address -from hemera.api.app.utils.fill_info import fill_address_display_to_logs, fill_address_display_to_transactions -from hemera.api.app.utils.format_utils import format_transaction -from hemera.api.app.utils.token_utils import get_token_price -from hemera.common.models.transactions import Transactions -from hemera.common.utils.abi_code_utils import decode_log_data -from hemera.common.utils.config import get_config -from hemera.common.utils.format_utils import bytes_to_hex_str, format_to_dict, row_to_dict -from hemera.common.utils.web3_utils import chain_id_name_mapping - -app_config = get_config() - -SUPPORT_BRIDGES = { - "0x99c9fc46f92e8a1c0dec1b1747d010903e884be1": { - "bridge_name": "Optimism Bridge", - "bridge_logo": "https://storage.googleapis.com/socialscan-public-asset/bridge/optimism.png", - }, - "0x3154cf16ccdb4c6d922629664174b904d80f2c35": { - "bridge_name": "Base Bridge", - "bridge_logo": "https://www.base.org/_next/static/media/logoBlack.4dc25558.svg", - }, - "0x72ce9c846789fdb6fc1f34ac4ad25dd9ef7031ef": { - "bridge_name": "Arbitrum One: L1 Gateway Router", - "bridge_logo": "https://cryptologos.cc/logos/arbitrum-arb-logo.svg?v=035", - }, - "0x4dbd4fc535ac27206064b68ffcf827b0a60bab3f": { - "bridge_name": "Arbitrum: Delayed Inbox", - "bridge_logo": "https://cryptologos.cc/logos/arbitrum-arb-logo.svg?v=035", - }, - "0x051f1d88f0af5763fb888ec4378b4d8b29ea3319": { - "bridge_name": "Linea: ERC20 Bridge", - "bridge_logo": "https://images.seeklogo.com/logo-png/52/1/linea-logo-png_seeklogo-527155.png", - }, - "0x504a330327a089d8364c4ab3811ee26976d388ce": { - "bridge_name": "Linea: USDC Bridge", - "bridge_logo": "https://images.seeklogo.com/logo-png/52/1/linea-logo-png_seeklogo-527155.png", - }, - "0xd19d4b5d358258f05d7b411e21a1460d11b0876f": { - "bridge_name": "Linea: L1 Message Service", - "bridge_logo": "https://images.seeklogo.com/logo-png/52/1/linea-logo-png_seeklogo-527155.png", - }, -} - - -def parse_deposit_assets(assets): - asset_list = [] - for asset in assets: - asset_dict = row_to_dict(asset) - - token_info = get_token_by_address( - asset_dict["token_address"], ["name", "symbol", "decimals", "icon_url", "token_type"] - ) - decimals = int(token_info.decimals) if token_info else 18 - asset_list.append( - { - "chain": chain_id_name_mapping[asset_dict["chain_id"]], - "bridge_contract_address": asset_dict["contract_address"], - "bridge_name": SUPPORT_BRIDGES[asset_dict["contract_address"]]["bridge_name"], - "bridge_logo": SUPPORT_BRIDGES[asset_dict["contract_address"]]["bridge_logo"], - "token": asset_dict["token_address"], - "token_name": token_info.name if token_info else None, - "token_symbol": token_info.symbol if token_info else None, - "token_icon_url": token_info.icon_url if token_info else None, - "token_type": token_info.token_type if token_info else None, - "amount": "{0:.18f}".format(asset_dict["value"] / 10**decimals).rstrip("0").rstrip("."), - } - ) - - return asset_list - - -def parse_deposit_transactions(transactions): - transaction_list = [] - for transaction in transactions: - tx_dict = row_to_dict(transaction) - tx_dict["chain_name"] = chain_id_name_mapping[tx_dict["chain_id"]] - - token_info = get_token_by_address( - tx_dict["token_address"], ["name", "symbol", "decimals", "icon_url", "token_type"] - ) - decimals = int(token_info.decimals) if token_info else 18 - tx_dict["token_name"] = token_info.name if token_info else None - tx_dict["token_symbol"] = token_info.symbol if token_info else None - tx_dict["token_icon_url"] = token_info.icon_url if token_info else None - tx_dict["token_type"] = token_info.token_type if token_info else None - - tx_dict["value"] = "{0:.18f}".format(tx_dict["value"] / 10**decimals).rstrip("0").rstrip(".") - - transaction_list.append(tx_dict) - return transaction_list - - -def parse_transactions(transactions: list[Transactions]): - transaction_list = [] - if len(transactions) <= 0: - return transaction_list - - GAS_FEE_TOKEN_PRICE = get_token_price(app_config.token_configuration.gas_fee_token, transactions[0].block_timestamp) - - to_address_list = [] - bytea_address_list = [] - for transaction in transactions: - to_address_list.append(transaction.to_address) - bytea_address_list.append(transaction.from_address) - bytea_address_list.append(transaction.to_address) - - transaction_json = format_to_dict(transaction) - transaction_json["method_id"] = "0x" + transaction_json["method_id"] - transaction_json["method"] = transaction_json["method_id"] - transaction_json["is_contract"] = False - transaction_json["contract_name"] = None - - if not transaction_json["to_address"]: - transaction_json["to_address"] = transaction_json["receipt_contract_address"] - - transaction_list.append(format_transaction(float(GAS_FEE_TOKEN_PRICE), transaction_json)) - - # Doing this early so we don't need to query contracts twice - fill_address_display_to_transactions(transaction_list, bytea_address_list) - - # Find contract - contracts = get_contracts_by_addresses(address_list=to_address_list, columns=["address"]) - contract_list = set(map(lambda x: bytes_to_hex_str(x.address), contracts)) - - method_list = [] - for transaction_json in transaction_list: - if transaction_json["receipt_contract_address"] is not None: - transaction_json["method"] = "Contract Creation" - elif transaction_json["method"] == "0x64617461": - decode_input = Web3.to_text(hexstr=transaction_json["input"]) - if "data:," in decode_input: - try: - inscription = json.loads(decode_input.split("data:,")[1]) - if inscription: - transaction_json["method"] = "Inscription: " + inscription["op"] - except: - pass - elif transaction_json["to_address"] in contract_list: - transaction_json["is_contract"] = True - method_list.append(transaction_json["method"]) - else: - transaction_json["method"] = "Transfer" - - # match function and function name - contract_function_abis = get_names_from_method_or_topic_list(method_list) - - for transaction_json in transaction_list: - for function_abi in contract_function_abis: - if transaction_json["method"] == function_abi.get("signed_prefix"): - transaction_json["method"] = " ".join( - re.sub( - "([A-Z][a-z]+)", - r" \1", - re.sub("([A-Z]+)", r" \1", function_abi.get("function_name")), - ).split() - ).title() - - return transaction_list - - -def parse_log_with_transaction_input_list(log_with_transaction_input_list): - log_list = [] - contract_topic_list = [] - transaction_method_list = [] - count_non_none = lambda x: 0 if x is None else 1 - for log in log_with_transaction_input_list: - - # values as dict format - log_json = format_to_dict(log.Logs) # log_with_transaction_input[0] - indexed_true_count = sum( - count_non_none(topic) for topic in [log_json["topic1"], log_json["topic2"], log_json["topic3"]] - ) - contract_topic_list.append((log_json["address"], log_json["topic0"], indexed_true_count)) - log_input = bytes_to_hex_str(log.input) - - if log_input and len(log_input) >= 10: - transaction_method = log_input[0:10] - transaction_method_list.append(transaction_method) - log_json["transaction_method_id"] = transaction_method - log_list.append(log_json) - - # Get method list by transaction_method_list - method_list = get_names_from_method_or_topic_list(transaction_method_list) - method_map = {method.get("signed_prefix"): method for method in method_list} - - address_sign_contract_abi_dict = get_abis_for_logs(contract_topic_list) - for log_json in log_list: - # Continue loop if 'topic0' is missing or has a falsy/empty value - if not log_json.get("topic0"): - continue - # Set method id - topic0_value = log_json["topic0"] - log_json["method_id"] = topic0_value[0:10] - - # Set function method - if "transaction_method_id" in log_json and log_json["transaction_method_id"] in method_map: - log_json["transaction_method"] = method_map[log_json["transaction_method_id"]].get("function_name") - log_json["transaction_function_unsigned"] = method_map[log_json["transaction_method_id"]].get( - "function_unsigned" - ) - - event_abi = address_sign_contract_abi_dict.get((log_json["address"], topic0_value)) - if not event_abi: - continue - try: - event_abi_json = json.loads(event_abi.get("function_abi")) - # Get full data types - index_data_types = [] - data_types = [] - - # Get full data string - index_data_str = "" - data_str = log_json["data"][2:] - - for param in event_abi_json["inputs"]: - if param["indexed"]: - index_data_types.append(param["type"]) - index_data_str += log_json[f"topic{len(index_data_types)}"][2:] - else: - data_types.append(param["type"]) - decoded_index_data, endcoded_index_data = decode_log_data(index_data_types, index_data_str) - decoded_data, endcoded_data = decode_log_data(data_types, data_str) - - index_input_data = [] - input_data = [] - full_function_name = "" - for index in range(len(event_abi_json["inputs"])): - param = event_abi_json["inputs"][index] - if param["indexed"]: - index_input_data.append( - { - "indexed": param["indexed"], - "name": param["name"], - "data_type": param["type"], - "hex_data": decoded_index_data[len(index_input_data)], - "dec_data": endcoded_index_data[len(index_input_data)], - } - ) - else: - input_data.append( - { - "indexed": param["indexed"], - "name": param["name"], - "data_type": param["type"], - "hex_data": decoded_data[len(input_data)], - "dec_data": endcoded_data[len(input_data)], - } - ) - if param["indexed"]: - full_function_name += f"index_topic_{index + 1} {param['type']} {param['name']}, " - else: - full_function_name += f"{param['type']} {param['name']}, " - function_name = event_abi.get("function_name") - full_function_name = f"{function_name}({full_function_name[:-2]})" - log_json["input_data"] = index_input_data + input_data - log_json["function_name"] = function_name - log_json["function_unsigned"] = event_abi.get("function_unsigned") - log_json["full_function_name"] = full_function_name - except Exception as e: - current_app.logger.info(e) - - fill_address_display_to_logs(log_list) - return log_list - - -def day_parse_int_to_str(day): - day = str(day) - date_obj = datetime.strptime(day, "%Y%m%d") - formatted_date = date_obj.strftime("%Y-%m-%d") - return formatted_date - - -def day_parse_str_to_int(day): - date_obj = datetime.strptime(day, "%Y-%m-%d") - formatted_date = date_obj.strftime("%Y%m%d") - return formatted_date diff --git a/hemera/api/app/utils/token_utils.py b/hemera/api/app/utils/token_utils.py deleted file mode 100644 index f29d0c75e..000000000 --- a/hemera/api/app/utils/token_utils.py +++ /dev/null @@ -1,45 +0,0 @@ -from datetime import datetime -from decimal import Decimal -from typing import List - -from hemera.api.app.cache import cache -from hemera.common.models import db -from hemera.common.models.token_hourly_price import CoinPrices, TokenHourlyPrices -from hemera.common.models.token_prices import TokenPrices -from hemera.common.utils.format_utils import as_dict - - -@cache.memoize(300) -def get_token_price(symbol, date=None) -> Decimal: - if date: - token_price = ( - db.session.query(TokenHourlyPrices) - .filter( - TokenHourlyPrices.symbol == symbol, - TokenHourlyPrices.timestamp <= date, - ) - .order_by(TokenHourlyPrices.timestamp.desc()) - .first() - ) - else: - token_price = ( - db.session.query(TokenPrices) - .filter(TokenPrices.symbol == symbol) - .order_by(TokenPrices.timestamp.desc()) - .first() - ) - if token_price: - return token_price.price - return Decimal(0.0) - - -@cache.memoize(300) -def get_coin_prices(date: List[datetime]): - coin_prices = db.session.query(CoinPrices).filter(CoinPrices.block_date.in_(date)).all() - return [as_dict(coin_price) for coin_price in coin_prices] - - -@cache.memoize(300) -def get_latest_coin_prices(): - res = db.session.query(CoinPrices).order_by(CoinPrices.block_date.desc()).first() - return float(res.price) if res.price else 0.0 diff --git a/hemera/api/app/utils/utils.py b/hemera/api/app/utils/utils.py deleted file mode 100644 index 6619e2508..000000000 --- a/hemera/api/app/utils/utils.py +++ /dev/null @@ -1,50 +0,0 @@ -#!/usr/bin/python3 -# -*- coding: utf-8 -*- -from datetime import datetime, timedelta - -from hemera.common.utils.config import get_config -from hemera.common.utils.db_utils import get_total_row_count - -app_config = get_config() - - -def get_count_by_address(table, chain, wallet_address=None): - if not wallet_address: - return get_total_row_count(table) - - # Try to get count from redis - # get_count(f"{chain}_{table}_{wallet_address}") - - -def solve_nested_components(json_object, sb): - string = json_object.get("type") - - if json_object.get("components"): - components = json_object.get("components") - sb.append("(") - - for j in range(len(components)): - json_object1 = components[j] - if json_object1.get("components"): - solve_nested_components(json_object1, sb) - else: - inner_type = json_object1.get("type") - sb.append(inner_type) - - if j < len(components) - 1: - sb.append(",") - - sb.append(")") - while string.endswith("[]"): - sb.append("[]") - string = string[:-2] - else: - sb.append(string) - - -def is_l1_block_finalized(block_number, timestamp): - return timestamp < datetime.utcnow() - timedelta(minutes=15) - - -def is_l2_challenge_period_pass(block_number, timestamp): - return timestamp < datetime.utcnow() - timedelta(days=7) - timedelta(minutes=15) diff --git a/hemera/api/tests/app/test_cyber_mainnet_explorer.py b/hemera/api/tests/app/test_cyber_mainnet_explorer.py deleted file mode 100644 index eb738ff42..000000000 --- a/hemera/api/tests/app/test_cyber_mainnet_explorer.py +++ /dev/null @@ -1,816 +0,0 @@ -import pytest - - -@pytest.mark.explorer_api -def test_stats(test_client): - response = test_client.get("/v1/explorer/stats") - response_json = response.json - - assert response.status_code == 200 - assert response_json["total_transactions"] > 0 - assert "transaction_tps" in response_json - assert "latest_batch" in response_json - assert "latest_block" in response_json - assert "avg_block_time" in response_json - assert "eth_price" in response_json - assert "eth_price_btc" in response_json - assert "eth_price_diff" in response_json - assert "native_token_price" in response_json - assert "native_token_price_eth" in response_json - assert "native_token_price_diff" in response_json - assert "dashboard_token_price_eth" in response_json - assert "dashboard_token_price" in response_json - assert "dashboard_token_price_diff" in response_json - assert "gas_fee" in response_json - - -@pytest.mark.explorer_api -def test_transactions_per_day(test_client): - response = test_client.get("/v1/explorer/charts/transactions_per_day") - response_json = response.json - - assert response.status_code == 200 - assert "title" in response_json - assert response_json["title"] == "Daily Transactions Chart" - assert "data" in response_json - assert isinstance(response_json["data"], list) - assert all("value" in item and "count" in item for item in response_json["data"]) - - -@pytest.mark.explorer_api -def test_explorer_search(test_client): - q_list = [ - "131", - "0x319b69888b0d11cec22caa5034e25fffbdc88421", - "0x9e4ea822b615f8d7f98098a9c7e3950e6acaad2f", - "0x2D11ae7a83cc5C31093e9F8918E6A905222f536C", - "0xb3bfa1476895da112550aeb6bb494a25f0fb5302a701fe94433d538211f25619", - "USDT", - "0x551f5b690409b9e0482589b1a5b3d32237972f44af8fdb8b6f334c036a943770", - "godshan.eth", - ] - - for q in q_list: - response = test_client.get(f"/v1/explorer/search?q={q}") - response_json = response.json - - assert response.status_code == 200 - assert isinstance(response_json, list) - assert all(isinstance(item, dict) and "type" in item for item in response_json) - - -@pytest.mark.explorer_api -def test_internal_transactions_page1_size10(test_client): - response = test_client.get("/v1/explorer/internal_transactions?page=1&size=10") - response_json = response.json - - assert response.status_code == 200 - assert isinstance(response_json, dict) - assert "data" in response_json - assert "total" in response_json - assert "max_display" in response_json - assert "page" in response_json - assert "size" in response_json - - assert isinstance(response_json["data"], list) - assert len(response_json["data"]) <= 10 - for transaction in response_json["data"]: - assert isinstance(transaction, dict) - assert "from_address" in transaction - assert "to_address" in transaction - assert "value" in transaction - assert "from_address_is_contract" in transaction - assert "to_address_is_contract" in transaction - - -@pytest.mark.explorer_api -def test_internal_transactions_page_size_over_10000(test_client): - response = test_client.get("/v1/explorer/internal_transactions?page=25&size=500") - assert response.status_code == 400 - - -@pytest.mark.explorer_api -def test_internal_transactions_page_size_either_0(test_client): - response = test_client.get("/v1/explorer/internal_transactions?page=0&size=20") - assert response.status_code == 400 - - -@pytest.mark.explorer_api -def test_transactions(test_client): - response = test_client.get("/v1/explorer/transactions") - response_json = response.json - assert response.status_code == 200 - - assert response_json["total"] > 0 - - -@pytest.mark.explorer_api -def test_transactions_with_block_num(test_client): - response = test_client.get("/v1/explorer/transactions?page=10&size=10&block=123") - response_json = response.json - - assert response.status_code == 200 - assert isinstance(response_json, dict) - assert "data" in response_json - assert "total" in response_json - assert "max_display" in response_json - assert "page" in response_json - assert "size" in response_json - - assert isinstance(response_json["data"], list) - assert len(response_json["data"]) <= 10 - for transaction in response_json["data"]: - assert isinstance(transaction, dict) - assert "hash" in transaction - assert "block_number" in transaction - assert "from_address" in transaction - assert "to_address" in transaction - assert "value" in transaction - assert "block_timestamp" in transaction - - -@pytest.mark.explorer_api -def test_transactions_with_block_hash(test_client): - response = test_client.get( - "/v1/explorer/transactions?page=10&size=10&block=0x41A44E8B108A9EB075FC9297C32D6E0CC6A960DCA7CE8216563D723118E7A953" - ) - response_json = response.json - - assert response.status_code == 200 - assert isinstance(response_json, dict) - assert "data" in response_json - assert "total" in response_json - assert "max_display" in response_json - assert "page" in response_json - assert "size" in response_json - - assert isinstance(response_json["data"], list) - assert len(response_json["data"]) <= 10 - for transaction in response_json["data"]: - assert isinstance(transaction, dict) - assert "hash" in transaction - assert "block_number" in transaction - assert "from_address" in transaction - assert "to_address" in transaction - assert "value" in transaction - assert "block_timestamp" in transaction - - -@pytest.mark.explorer_api -def test_transactions_with_address(test_client): - response = test_client.get( - "/v1/explorer/transactions?page=10&size=10&address=0xDEADDEADDEADDEADDEADDEADDEADDEADDEAD0001" - ) - response_json = response.json - - assert response.status_code == 200 - assert isinstance(response_json, dict) - assert "data" in response_json - assert "total" in response_json - assert "max_display" in response_json - assert "page" in response_json - assert "size" in response_json - - assert isinstance(response_json["data"], list) - assert len(response_json["data"]) <= 10 - for transaction in response_json["data"]: - assert isinstance(transaction, dict) - assert "hash" in transaction - assert "block_number" in transaction - assert "from_address" in transaction - assert "to_address" in transaction - assert "value" in transaction - assert "block_timestamp" in transaction - - -@pytest.mark.explorer_api -def test_transactions_with_date(test_client): - response = test_client.get("/v1/explorer/transactions?page=10&size=10&date=20240527") - response_json = response.json - - assert response.status_code == 200 - assert isinstance(response_json, dict) - assert "data" in response_json - assert "total" in response_json - assert "max_display" in response_json - assert "page" in response_json - assert "size" in response_json - - assert isinstance(response_json["data"], list) - assert len(response_json["data"]) <= 10 - for transaction in response_json["data"]: - assert isinstance(transaction, dict) - assert "hash" in transaction - assert "block_number" in transaction - assert "from_address" in transaction - assert "to_address" in transaction - assert "value" in transaction - assert "block_timestamp" in transaction - - -@pytest.mark.explorer_api -def test_explorer_transaction_detail_no_trace(test_client): - hash = "0xD2F9DF901DD1BC79D9854D8B245597F82C0CD144E4898C3945096E197838FC83" - - response = test_client.get(f"/v1/explorer/transaction/{hash}") - response_json = response.json - - assert response.status_code == 200 - - assert "hash" in response_json - assert "block_number" in response_json - assert "from_address" in response_json - assert "to_address" in response_json - - -@pytest.mark.explorer_api -def test_explorer_transaction_detail_with_trace(test_client): - hash = "0xABDA79232D8787FFD190CEC2D2EA3894AD9C899D1D1AAA2937A369717A0D2CFF" - - response = test_client.get(f"/v1/explorer/transaction/{hash}") - print(response.text) - response_json = response.json - - assert response.status_code == 200 - - assert "hash" in response_json - assert "block_number" in response_json - assert "from_address" in response_json - assert "to_address" in response_json - - -@pytest.mark.explorer_api -def test_explorer_transaction_logs(test_client): - hash = "0x42F6C9379551E09E7CCDA661DD0A5C8208A4767803EDA1F8240E47B6B44E48F0" - - response = test_client.get(f"/v1/explorer/transaction/{hash}/logs") - response_json = response.json - - assert response.status_code == 200 - - assert "total" in response_json - assert "data" in response_json - - assert response_json["total"] >= 0 - - -@pytest.mark.explorer_api -def test_explorer_transaction_token_transfers(test_client): - hash = "0xDD279FF5C07C5AC723AED96BC34C797A79044A2D6D3539758F975BF499EBC530" - - response = test_client.get(f"/v1/explorer/transaction/{hash}/token_transfers") - response_json = response.json - - assert response.status_code == 200 - - assert "total" in response_json - assert "data" in response_json - - assert response_json["total"] >= 0 - - -@pytest.mark.explorer_api -def test_explorer_transaction_internal_transactions(test_client): - hash = "0x3C3EA0E4082D4C1DD98AD9216AF9283A91173B1C5086BBB7CDF790B74CD71D2F" - - response = test_client.get(f"/v1/explorer/transaction/{hash}/internal_transactions") - response_json = response.json - - assert response.status_code == 200 - assert "total" in response_json - assert "data" in response_json - assert response_json["total"] >= 0 - - -@pytest.mark.explorer_api -def test_explorer_transaction_traces(test_client): - hash = "0x3C3EA0E4082D4C1DD98AD9216AF9283A91173B1C5086BBB7CDF790B74CD71D2F" - - response = test_client.get(f"/v1/explorer/transaction/{hash}/traces") - response_json = response.json - - assert response.status_code == 200 - assert "data" in response_json - assert response_json["data"] - - -@pytest.mark.explorer_api -def test_explorer_tokens_erc20(test_client): - response = test_client.get("/v1/explorer/tokens?type=erc20&is_verified=False") - response_json = response.json - - assert response.status_code == 200 - assert "page" in response_json - assert "size" in response_json - assert "total" in response_json - assert "data" in response_json - assert response_json["data"] is not None - - -@pytest.mark.explorer_api -def test_explorer_tokens_erc721(test_client): - response = test_client.get("/v1/explorer/tokens?type=erc721&is_verified=False") - response_json = response.json - - assert response.status_code == 200 - assert "page" in response_json - assert "size" in response_json - assert "total" in response_json - assert "data" in response_json - assert response_json["data"] is not None - - -@pytest.mark.explorer_api -def test_explorer_tokens_erc1155(test_client): - response = test_client.get("/v1/explorer/tokens?type=erc1155&is_verified=False") - response_json = response.json - - assert response.status_code == 200 - assert "page" in response_json - assert "size" in response_json - assert "total" in response_json - assert "data" in response_json - assert response_json["data"] is not None - - -@pytest.mark.explorer_api -def test_explorer_tokens_transfers(test_client): - response = test_client.get("/v1/explorer/token_transfers?type=tokentxns") - response_json = response.json - - assert response.status_code == 200 - assert "page" in response_json - assert "size" in response_json - assert "total" in response_json - assert "max_display" in response_json - assert "data" in response_json - assert response_json["data"] - - -@pytest.mark.explorer_api -def test_explorer_tokens_transfers_token_erc20(test_client): - response = test_client.get( - "/v1/explorer/token_transfers?type=tokentxns&token_address=0x6F6238C8EAEA56F54DF418823585D61FDD7DE5DA" - ) - response_json = response.json - - assert response.status_code == 200 - assert "page" in response_json - assert "size" in response_json - assert "total" in response_json - assert "max_display" in response_json - assert "data" in response_json - assert response_json["data"] is not None - - -@pytest.mark.explorer_api -def test_explorer_tokens_transfers_token_erc721(test_client): - response = test_client.get( - "/v1/explorer/token_transfers?type=tokentxns-nft&token_address=0x2473E8D725F7B3ECA344C272F110948D63280F96" - ) - response_json = response.json - - assert response.status_code == 200 - assert "page" in response_json - assert "size" in response_json - assert "total" in response_json - assert "max_display" in response_json - assert "data" in response_json - assert response_json["data"] - - -@pytest.mark.explorer_api -def test_explorer_tokens_transfers_token_erc1155(test_client): - response = test_client.get( - "/v1/explorer/token_transfers?type=tokentxns-nft1155&token_address=0x2E421EB05FFA719C42C280EC0D52B38BB9E7923C" - ) - response_json = response.json - - assert response.status_code == 200 - assert "page" in response_json - assert "size" in response_json - assert "total" in response_json - assert "max_display" in response_json - assert "data" in response_json - assert response_json["data"] - - -@pytest.mark.explorer_api -def test_explorer_tokens_transfers_address_erc20(test_client): - response = test_client.get( - "/v1/explorer/token_transfers?type=tokentxns&address=0xC5A076CAD94176C2996B32D8466BE1CE757FAA27" - ) - response_json = response.json - - assert response.status_code == 200 - assert "page" in response_json - assert "size" in response_json - assert "total" in response_json - assert "max_display" in response_json - assert "data" in response_json - assert response_json["data"] - - -@pytest.mark.explorer_api -def test_explorer_tokens_transfers_address_erc721(test_client): - response = test_client.get( - "/v1/explorer/token_transfers?type=tokentxns-nft&address=0x9CDCBF212CCF4F11BCBC25CCDE18FFCE886F0CA6" - ) - response_json = response.json - - assert response.status_code == 200 - assert "page" in response_json - assert "size" in response_json - assert "total" in response_json - assert "max_display" in response_json - assert "data" in response_json - assert response_json["data"] - - -@pytest.mark.explorer_api -def test_explorer_tokens_transfers_address_erc1155(test_client): - response = test_client.get( - "/v1/explorer/token_transfers?type=tokentxns-nft1155&address=0x9DAB1402AFC2511FCE1C81695DEABD0D79628EBB" - ) - response_json = response.json - - assert response.status_code == 200 - assert "page" in response_json - assert "size" in response_json - assert "total" in response_json - assert "max_display" in response_json - assert "data" in response_json - assert response_json["data"] - - -@pytest.mark.explorer_api -def test_blocks(test_client): - response = test_client.get("/v1/explorer/blocks") - response_json = response.json - assert response_json["total"] > 0 - assert response.status_code == 200 - - -@pytest.mark.explorer_api -def test_explorer_block_detail_with_number(test_client): - response = test_client.get("/v1/explorer/block/2024") - response_json = response.json - - assert response.status_code == 200 - - assert "internal_transaction_count" in response_json - assert "gas_fee_token_price" in response_json - assert "seconds_since_last_block" in response_json - assert "is_last_block" in response_json - - -@pytest.mark.explorer_api -def test_explorer_block_detail_with_hash(test_client): - response = test_client.get("/v1/explorer/block/0xF4CF31E1084D299C68D5CF9C87F080C90AA3E73B97CC79C296E92F088E1241C7") - response_json = response.json - - assert response.status_code == 200 - - assert "internal_transaction_count" in response_json - assert "gas_fee_token_price" in response_json - assert "seconds_since_last_block" in response_json - assert "is_last_block" in response_json - - -@pytest.mark.explorer_api -def test_explorer_address_profile(test_client): - address = "0x65FDF210CC6681A7CC911EC101A6F014F6798BE2" - response = test_client.get(f"/v1/explorer/address/{address}/profile") - response_json = response.json - - assert response.status_code == 200 - - assert "balance" in response_json - assert "native_token_price" in response_json - assert "balance_dollar" in response_json - assert "is_contract" in response_json - assert "is_token" in response_json - - if response_json["is_contract"]: - assert "contract_creator" in response_json - assert "transaction_hash" in response_json - assert "is_verified" in response_json - assert "is_proxy" in response_json - assert "implementation_contract" in response_json - assert "verified_implementation_contract" in response_json - - if response_json["is_token"]: - assert "token_type" in response_json - assert "token_name" in response_json - assert "token_symbol" in response_json - assert "token_logo_url" in response_json - - -@pytest.mark.explorer_api -def test_explorer_address_token_holdings_v1(test_client): - address = "0xA3DF90BF8E8183A74B537B27E3955BA7D8DE199C" - response = test_client.get(f"/v1/explorer/address/{address}/token_holdings") - response_json = response.json - - assert response.status_code == 200 - - assert "data" in response_json - assert "total" in response_json - - token_holder_list = response_json["data"] - total_count = response_json["total"] - - assert isinstance(token_holder_list, list) - assert isinstance(total_count, int) - - if token_holder_list: - sample_token = token_holder_list[0] - assert "token_address" in sample_token - assert "balance" in sample_token - assert "token_id" in sample_token - assert "token_name" in sample_token - assert "token_symbol" in sample_token - assert "token_logo_url" in sample_token - assert "type" in sample_token - - assert sample_token["type"] in [ - "tokentxns", - "tokentxns-nft", - "tokentxns-nft1155", - ] - - -@pytest.mark.explorer_api -def test_explorer_address_token_holdings_v2(test_client): - address = "0xA3DF90BF8E8183A74B537B27E3955BA7D8DE199C" - response = test_client.get(f"/v2/explorer/address/{address}/token_holdings") - response_json = response.json - - assert response.status_code == 200 - - assert "data" in response_json - assert "total" in response_json - - token_holder_list = response_json["data"] - total_count = response_json["total"] - - assert isinstance(token_holder_list, list) - assert isinstance(total_count, int) - - if token_holder_list: - sample_token = token_holder_list[0] - assert "token_address" in sample_token - assert "balance" in sample_token - assert "token_id" in sample_token - assert "token_name" in sample_token - assert "token_symbol" in sample_token - assert "token_logo_url" in sample_token - assert "type" in sample_token - - assert sample_token["type"] in [ - "tokentxns", - "tokentxns-nft", - "tokentxns-nft1155", - ] - - -@pytest.mark.explorer_api -def test_explorer_address_transactions(test_client): - address = "0xDEADDEADDEADDEADDEADDEADDEADDEADDEAD0001" - response = test_client.get(f"/v1/explorer/address/{address}/transactions") - response_json = response.json - - assert response.status_code == 200 - - assert "data" in response_json - assert "total" in response_json - - transaction_list = response_json["data"] - total_count = response_json["total"] - - assert isinstance(transaction_list, list) - assert isinstance(total_count, int) - - if transaction_list: - sample_transaction = transaction_list[0] - assert "hash" in sample_transaction - assert "from_address" in sample_transaction - assert "to_address" in sample_transaction - assert "value" in sample_transaction - assert "block_number" in sample_transaction - assert "block_timestamp" in sample_transaction - - -@pytest.mark.explorer_api -def test_explorer_address_token_transfers(test_client): - paras = [ - { - "address": "0xA3DF90BF8E8183A74B537B27E3955BA7D8DE199C", - "transfer_type": "tokentxns", - }, - { - "address": "0x9F4BF1FAFD578AE36A96C1E44ED8EC9DAFCF4593", - "transfer_type": "tokentxns-nft", - }, - { - "address": "0xEA792BF7B860C0D074DB81B0BE91F41B1B9C1641", - "transfer_type": "tokentxns-nft1155", - }, - ] - - for para in paras: - address = para["address"] - transfer_type = para["transfer_type"] - - response = test_client.get(f"/v1/explorer/address/{address}/token_transfers?type={transfer_type}") - response_json = response.json - - assert response.status_code == 200 - - assert "total" in response_json - assert "data" in response_json - assert "type" in response_json - - total_count = response_json["total"] - token_transfer_list = response_json["data"] - transfer_type_response = response_json["type"] - - assert isinstance(total_count, int) - assert isinstance(token_transfer_list, list) - assert isinstance(transfer_type_response, str) - - assert transfer_type_response == transfer_type - - if token_transfer_list: - sample_transfer = token_transfer_list[0] - assert "from_address" in sample_transfer - assert "to_address" in sample_transfer - assert "token_address" in sample_transfer - assert "token_name" in sample_transfer - assert "token_symbol" in sample_transfer - assert "token_logo_url" in sample_transfer - - -@pytest.mark.explorer_api -def test_explorer_address_internal_transactions(test_client): - address = "0x1A8D6D5ABD8948B647C51BB7B071B718FD90D6FF" - - response = test_client.get(f"/v1/explorer/address/{address}/internal_transactions") - response_json = response.json - - assert response.status_code == 200 - - assert "total" in response_json - assert "data" in response_json - - total_count = response_json["total"] - transaction_list = response_json["data"] - - assert isinstance(total_count, int) - assert isinstance(transaction_list, list) - - if transaction_list: - sample_transaction = transaction_list[0] - assert "from_address" in sample_transaction - assert "to_address" in sample_transaction - assert "value" in sample_transaction - assert "from_address_is_contract" in sample_transaction - assert "to_address_is_contract" in sample_transaction - assert "block_number" in sample_transaction - assert "transaction_index" in sample_transaction - assert "value" in sample_transaction - - assert isinstance(sample_transaction["from_address"], str) - assert isinstance(sample_transaction["to_address"], str) - assert isinstance(sample_transaction["value"], str) - assert isinstance(sample_transaction["from_address_is_contract"], bool) - assert isinstance(sample_transaction["to_address_is_contract"], bool) - assert isinstance(sample_transaction["block_number"], int) - assert isinstance(sample_transaction["transaction_index"], int) - - -@pytest.mark.explorer_api -def test_explorer_address_logs(test_client): - address = "0x19F4147568D76B8A68B3755589ECD09A6B97ACB7" - - response = test_client.get(f"/v1/explorer/address/{address}/logs") - response_json = response.json - - assert response.status_code == 200 - - assert "total" in response_json - assert "data" in response_json - - total_count = response_json["total"] - log_list = response_json["data"] - - assert isinstance(total_count, int) - assert isinstance(log_list, list) - - if log_list: - sample_log = log_list[0] - assert "transaction_hash" in sample_log - assert "block_number" in sample_log - assert "log_index" in sample_log - assert "address" in sample_log - assert "data" in sample_log - assert "topic0" in sample_log - - assert isinstance(sample_log["transaction_hash"], str) - assert isinstance(sample_log["block_number"], int) - assert isinstance(sample_log["log_index"], int) - assert isinstance(sample_log["address"], str) - assert isinstance(sample_log["data"], str) - assert isinstance(sample_log["topic0"], str) - - -@pytest.mark.explorer_api -def test_explorer_token_profile(test_client): - addresses = [ - "0x7A524C7E82874226F0B51AADE60A1BE4D430CF0F", - "0xEB5FB40B071C0D59449A3E12A09DBD0E23F4836E", - "0x2E421EB05FFA719C42C280EC0D52B38BB9E7923C", - ] - - for address in addresses: - response = test_client.get(f"/v1/explorer/token/{address}/profile") - response_json = response.json - - assert response.status_code == 200 - - assert "token_name" in response_json - assert "token_checksum_address" in response_json - assert "token_address" in response_json - assert "token_symbol" in response_json - assert "token_logo_url" in response_json - assert "token_urls" in response_json - assert "social_medias" in response_json - assert "token_description" in response_json - assert "total_supply" in response_json - assert "total_holders" in response_json - assert "total_transfers" in response_json - assert "type" in response_json - - -@pytest.mark.explorer_api -def test_explorer_token_transfers(test_client): - addresses = [ - "0x7A524C7E82874226F0B51AADE60A1BE4D430CF0F", - "0xEB5FB40B071C0D59449A3E12A09DBD0E23F4836E", - "0x2E421EB05FFA719C42C280EC0D52B38BB9E7923C", - ] - - for address in addresses: - response = test_client.get(f"/v1/explorer/token/{address}/token_transfers") - response_json = response.json - - assert response.status_code == 200 - - assert "total" in response_json - assert "data" in response_json - assert "type" in response_json - - assert isinstance(response_json["total"], int) - assert isinstance(response_json["data"], list) - assert isinstance(response_json["type"], str) - - -@pytest.mark.explorer_api -def test_explorer_token_top_holders_v2(test_client): - token_addresses = [ - "0x7A524C7E82874226F0B51AADE60A1BE4D430CF0F", - "0xEB5FB40B071C0D59449A3E12A09DBD0E23F4836E", - "0x2E421EB05FFA719C42C280EC0D52B38BB9E7923C", - ] - for token_address in token_addresses: - response = test_client.get(f"/v2/explorer/token/{token_address}/top_holders?page=1&size=10") - response_json = response.json - - assert response.status_code == 200 - - assert "data" in response_json - assert "total" in response_json - - assert isinstance(response_json["total"], int) - assert isinstance(response_json["data"], list) - - for holder in response_json["data"]: - assert "token_address" in holder - assert "wallet_address" in holder - assert "balance" in holder - assert isinstance(holder["token_address"], str) - assert isinstance(holder["wallet_address"], str) - assert isinstance(holder["balance"], str) - - -@pytest.mark.explorer_api -def test_charts(test_client): - response = test_client.get("/v1/explorer/charts/transactions_per_day") - assert response.status_code == 200 - response_json = response.json - assert "data" in response_json - # assert len(response_json["data"]) == 14 - - -if __name__ == "__main__": - pytest.main([__file__]) diff --git a/hemera/api/tests/conftest.py b/hemera/api/tests/conftest.py deleted file mode 100644 index ab580c758..000000000 --- a/hemera/api/tests/conftest.py +++ /dev/null @@ -1,44 +0,0 @@ -import pytest - -from hemera.api.app.config import * -from hemera.common.utils.config import set_config - - -@pytest.fixture(scope="module") -def test_client(): - app_config = AppConfig( - api_modules=[ - APIModule.EXPLORER, - ], - env="ut", - chain="test", - contract_service="", - db_read_sql_alchemy_database_config=DatabaseConfig( - host="localhost", - port=5432, - database="indexer_test", - username="postgres", - password="admin", - ), - db_write_sql_alchemy_database_config=DatabaseConfig( - host="localhost", - port=5432, - database="indexer_test", - username="postgres", - password="admin", - ), - db_common_sql_alchemy_database_config=DatabaseConfig( - host="localhost", - port=5432, - database="indexer_test", - username="postgres", - password="admin", - ), - rpc="https://story-network.rpc.caldera.xyz/http", - ) - set_config(app_config) - from hemera.api.app.main import app - - with app.test_client() as testing_client: - with app.app_context(): - yield testing_client diff --git a/hemera/api/__init__.py b/hemera/app/__init__.py similarity index 100% rename from hemera/api/__init__.py rename to hemera/app/__init__.py diff --git a/hemera/api/app/__init__.py b/hemera/app/api/__init__.py similarity index 100% rename from hemera/api/app/__init__.py rename to hemera/app/api/__init__.py diff --git a/hemera/app/api/deps.py b/hemera/app/api/deps.py new file mode 100644 index 000000000..92d69380c --- /dev/null +++ b/hemera/app/api/deps.py @@ -0,0 +1,62 @@ +from collections.abc import Generator +from typing import Annotated + +from fastapi import Depends +from sqlmodel import Session + +from hemera.app.core.db import Database + + +def get_read_db() -> Generator[Session, None, None]: + with Session(Database.get_read_engine()) as session: + yield session + + +def get_write_db() -> Generator[Session, None, None]: + with Session(Database.get_write_engine()) as session: + yield session + + +def get_common_db() -> Generator[Session, None, None]: + with Session(Database.get_common_engine()) as session: + yield session + + +ReadSessionDep = Annotated[Session, Depends(get_read_db)] +WriteSessionDep = Annotated[Session, Depends(get_write_db)] +CommonSessionDep = Annotated[Session, Depends(get_common_db)] +""" +reusable_oauth2 = OAuth2PasswordBearer( + tokenUrl=f"/login/access-token" +) + +TokenDep = Annotated[str, Depends(reusable_oauth2)] + +def get_current_user(session: ReadSessionDep, token: TokenDep) -> User: + try: + payload = jwt.decode( + token, settings.SECRET_KEY, algorithms=[]#zzsecurity.ALGORITHM] + ) + token_data = TokenPayload(**payload) + except (InvalidTokenError, ValidationError): + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Could not validate credentials", + ) + user = session.get(User, token_data.sub) + if not user: + raise HTTPException(status_code=404, detail="User not found") + if not user.is_active: + raise HTTPException(status_code=400, detail="Inactive user") + return user + +CurrentUser = Annotated[User, Depends(get_current_user)] + + +def get_current_active_superuser(current_user: CurrentUser) -> User: + if not current_user.is_superuser: + raise HTTPException( + status_code=403, detail="The user doesn't have enough privileges" + ) + return current_user +""" diff --git a/hemera/api/app/db_service/__init__.py b/hemera/app/api/routes/__init__.py similarity index 100% rename from hemera/api/app/db_service/__init__.py rename to hemera/app/api/routes/__init__.py diff --git a/hemera/api/app/ens/__init__.py b/hemera/app/api/routes/aci/__init__.py similarity index 100% rename from hemera/api/app/ens/__init__.py rename to hemera/app/api/routes/aci/__init__.py diff --git a/hemera/app/api/routes/developer/__init__.py b/hemera/app/api/routes/developer/__init__.py new file mode 100644 index 000000000..acd7710ac --- /dev/null +++ b/hemera/app/api/routes/developer/__init__.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2025/1/31 22:27 +# @Author ideal93 +# @File __init__.py.py +# @Brief diff --git a/hemera/api/app/token/__init__.py b/hemera/app/api/routes/developer/es_adapter/__init__.py similarity index 100% rename from hemera/api/app/token/__init__.py rename to hemera/app/api/routes/developer/es_adapter/__init__.py diff --git a/hemera/app/api/routes/developer/es_adapter/helper.py b/hemera/app/api/routes/developer/es_adapter/helper.py new file mode 100644 index 000000000..b9f4064d7 --- /dev/null +++ b/hemera/app/api/routes/developer/es_adapter/helper.py @@ -0,0 +1,1152 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2025/2/1 01:38 +# @Author ideal93 +# @File helper.py +# @Brief +import json +from datetime import datetime +from typing import List, Optional, Union + +from pydantic import BaseModel, Field +from sqlmodel import Session, and_, or_, select + +from hemera.common.enumeration.token_type import TokenType +from hemera.common.models.base.blocks import Blocks +from hemera.common.models.base.logs import Logs +from hemera.common.models.base.transactions import Transactions +from hemera.common.models.stats.daily_addresses_stats import DailyAddressesStats +from hemera.common.models.stats.daily_blocks_stats import DailyBlocksStats +from hemera.common.models.stats.daily_transactions_stats import DailyTransactionsStats +from hemera.common.models.token.nft import NFTDetails +from hemera.common.models.token.token_balances import AddressTokenBalances, CurrentTokenBalances +from hemera.common.models.token.token_id_balances import AddressTokenIdBalances, CurrentTokenIdBalances +from hemera.common.models.token.token_transfers import ERC20TokenTransfers, ERC721TokenTransfers, ERC1155TokenTransfers +from hemera.common.models.token.tokens import Tokens +from hemera.common.models.trace.address_coin_balances import AddressCoinBalances +from hemera.common.models.trace.contracts import Contracts +from hemera.common.models.trace.traces import ContractInternalTransactions, Traces +from hemera.common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes + + +# Get Ether Balance for a Single Address +def account_balance(session: Session, address, tag=None) -> Optional[int]: + if isinstance(address, str): + address = hex_str_to_bytes(address) + balance_record = ( + session.query(AddressCoinBalances) + .where(AddressCoinBalances.address == address) + .order_by(AddressCoinBalances.block_number.desc()) + .limit(1) + .first() + ) + return int(balance_record.balance) if balance_record else None + + +class AddressBalance(BaseModel): + address: str + balance: int + + +# Get Ether Balance for Multiple Addresses in a Single Call +def account_balancemulti(session: Session, addresses, tag=None) -> List[AddressBalance]: + results = [] + for address in addresses: + if isinstance(address, str): + address = hex_str_to_bytes(address) + balance_record = ( + session.query(AddressCoinBalances) + .where(AddressCoinBalances.address == address) + .order_by(AddressCoinBalances.block_number.desc()) + .limit(1) + .first() + ) + if balance_record: + results.append(AddressBalance(address=address, balance=int(balance_record.balance))) + return results + + +# Get Historical Ether Balance for a Single Address By BlockNo +def account_balancehistory(session: Session, address, blockno) -> int: + if isinstance(address, str): + address = hex_str_to_bytes(address) + + balance_record = ( + session.query(AddressCoinBalances) + .where( + AddressCoinBalances.address == address, + AddressCoinBalances.block_number <= blockno, + ) + .order_by(AddressCoinBalances.block_number.desc()) + .limit(1) + .first() + ) + return int(balance_record.balance) if balance_record else 0 + + +class TransactionData(BaseModel): + blockNumber: str + timeStamp: str + hash: str + nonce: str + blockHash: str + transactionIndex: str + fromAddress: str + toAddress: str + value: str + gas: str + gasPrice: str + isError: str + txreceipt_status: str + input: str + contractAddress: Optional[str] + cumulativeGasUsed: str + gasUsed: str + confirmations: str # TODO: + methodId: str + functionName: str # TODO: methodId + + +# (P0)Get a list of 'Normal' Transactions By Address +# (P0)Get 'Normal Transactions' by Transaction Hash +# (P0)Get "Normal Transactions" by Block Range +def account_txlist( + session: Session, + txhash: Optional[str], + address: Optional[str], + start_block: int, + end_block: int, + page: int, + offset: int, + sort_order: str, +) -> List[TransactionData]: + if txhash: + query = session.query(Transactions).filter_by(hash=hex_str_to_bytes(txhash)) + else: + query = session.query(Transactions).where( + and_(Transactions.block_number >= start_block, Transactions.block_number <= end_block) + ) + + if address: + condition = or_( + Transactions.from_address == hex_str_to_bytes(address), + Transactions.to_address == hex_str_to_bytes(address), + ) + query = query.where(condition) + + if sort_order == "asc": + query = query.order_by(Transactions.block_number.asc()) + else: + query = query.order_by(Transactions.block_number.desc()) + + transactions = query.limit(offset).offset((page - 1) * offset).all() + + return [ + TransactionData( + blockNumber=str(tx.block_number), + timeStamp=tx.block_timestamp.strftime("%s"), + hash=bytes_to_hex_str(tx.hash), + nonce=str(tx.nonce), + blockHash=bytes_to_hex_str(tx.block_hash), + transactionIndex=str(tx.transaction_index), + fromAddress=bytes_to_hex_str(tx.from_address), + toAddress=bytes_to_hex_str(tx.to_address), + value=str(tx.value), + gas=str(tx.gas), + gasPrice=str(tx.gas_price), + isError="0" if tx.receipt_status == 1 else "1", + txreceipt_status=str(tx.receipt_status), + input=bytes_to_hex_str(tx.input) or "", + contractAddress=bytes_to_hex_str(tx.receipt_contract_address), + cumulativeGasUsed=str(tx.receipt_cumulative_gas_used), + gasUsed=str(tx.receipt_gas_used), + confirmations="", # + methodId=bytes_to_hex_str(tx.input)[0:10] if tx.input else "", + functionName="", # + ) + for tx in transactions + ] + + +class TransactionInternalData(BaseModel): + blockNumber: str + timeStamp: str + hash: str + fromAddress: str + toAddress: str + value: str + contractAddress: Optional[str] + input: str + type: str + gas: str + gasUsed: str + traceId: str + isError: str + errCode: Optional[int] + + +# (P0)Get a list of 'Internal' Transactions by Address +# (P0)Get 'Internal Transactions' by Transaction Hash +# (P0)Get "Internal Transactions" by Block Range +def account_txlistinternal( + session: Session, + txhash: Optional[str], + address: Optional[str], + start_block: int, + end_block: int, + page: int, + offset: int, + sort_order: str, +) -> List[TransactionInternalData]: + if txhash: + query = session.query(ContractInternalTransactions).filter_by(transaction_hash=hex_str_to_bytes(txhash)) + else: + query = session.query(ContractInternalTransactions).where( + and_( + ContractInternalTransactions.block_number >= start_block, + ContractInternalTransactions.block_number <= end_block, + ), + ) + + if address: + condition = or_( + ContractInternalTransactions.from_address == hex_str_to_bytes(address), + ContractInternalTransactions.to_address == hex_str_to_bytes(address), + ) + query = query.where(condition) + + query = query.order_by( + ContractInternalTransactions.block_number.asc() + if sort_order == "asc" + else ContractInternalTransactions.block_number.desc() + ) + + internal_transactions = query.limit(offset).offset((page - 1) * offset).all() + + return [ + TransactionInternalData( + blockNumber=str(tx.block_number), + timeStamp=tx.block_timestamp.strftime("%s"), + hash=bytes_to_hex_str(tx.transaction_hash), + fromAddress=bytes_to_hex_str(tx.from_address), + toAddress=bytes_to_hex_str(tx.to_address), + value=str(tx.value), + contractAddress=bytes_to_hex_str(tx.to_address) if tx.trace_type in ["create", "create2"] else "", + input=bytes_to_hex_str(tx.input) or "", # TODO + type=tx.trace_type, + gas=str(tx.gas), + gasUsed=str(tx.gas_used), + traceId=tx.trace_id, + isError="1" if tx.error == 0 else "0", + errCode=tx.error, + ) + for tx in internal_transactions + ] + + +# (P0)Get a list of 'ERC20 - Token Transfer Events' by Address +# (P0)Get a list of 'ERC721 - Token Transfer Events' by Address +# (P0)Get a list of 'ERC1155 - Token Transfer Events' by Address + + +class TokenTransferBase(BaseModel): + block_number: str = Field(alias="blockNumber") + time_stamp: str = Field(alias="timeStamp") + hash: str + nonce: str + block_hash: str = Field(alias="blockHash") + from_address: str = Field(alias="from") + contract_address: str = Field(alias="contractAddress") + to: str + token_name: str = Field(alias="tokenName") + token_symbol: str = Field(alias="tokenSymbol") + transaction_index: str = Field(alias="transactionIndex") + gas: str + gas_price: str = Field(alias="gasPrice") + gas_used: str = Field(alias="gasUsed") + cumulative_gas_used: str = Field(alias="cumulativeGasUsed") + + class Config: + populate_by_name = True + allow_population_by_field_name = True + + +class ERC20Transfer(TokenTransferBase): + value: str + token_decimal: str = Field(alias="tokenDecimal") + + +class ERC721Transfer(TokenTransferBase): + token_id: str = Field(alias="tokenID") + + +class ERC1155Transfer(TokenTransferBase): + token_value: str = Field(alias="tokenValue") + token_id: str = Field(alias="tokenID") + + +def get_account_token_transfers( + session: Session, + contract_address: Optional[str] = None, + address: Optional[str] = None, + page: int = 1, + offset: int = 10, + sort_order: str = "desc", + start_block: int = 0, + end_block: int = 99999999, + token_type: TokenType = TokenType.ERC20, +) -> List[Union[ERC20Transfer, ERC721Transfer, ERC1155Transfer]]: + # Input validation + if address is None and contract_address is None: + return [] + if token_type not in [TokenType.ERC20, TokenType.ERC721, TokenType.ERC1155]: + token_type = TokenType.ERC20 + transfer_model = { + TokenType.ERC20: ERC20TokenTransfers, + TokenType.ERC721: ERC721TokenTransfers, + TokenType.ERC1155: ERC1155TokenTransfers, + }[token_type] + response_model = { + TokenType.ERC20: ERC20Transfer, + TokenType.ERC721: ERC721Transfer, + TokenType.ERC1155: ERC1155Transfer, + }[token_type] + + # Build conditions + conditions = [] + if contract_address: + conditions.append(transfer_model.token_address == hex_str_to_bytes(contract_address)) + if address: + conditions.append( + or_( + transfer_model.from_address == hex_str_to_bytes(address), + transfer_model.to_address == hex_str_to_bytes(address), + ) + ) + conditions.extend([transfer_model.block_number >= start_block, transfer_model.block_number <= end_block]) + + query = ( + select(transfer_model, Transactions) + .where(and_(*conditions)) + .join(Transactions, transfer_model.transaction_hash == Transactions.hash) + .order_by(transfer_model.block_number.desc() if sort_order == "desc" else transfer_model.block_number.asc()) + .offset((page - 1) * offset) + .limit(offset) + ) + + # Execute query + transfers = session.exec(query).all() + + if not transfers: + return [] + + # Get tokens info + token_addresses = {transfer.token_address for transfer, _ in transfers} + tokens = session.exec(select(Tokens).where(Tokens.address.in_(token_addresses))).all() + token_dict = {token.address: token for token in tokens} + + # Format results + result = [] + for transfer, tx in transfers: + base_data = { + "blockNumber": str(transfer.block_number), + "timeStamp": transfer.block_timestamp.strftime("%s"), + "hash": bytes_to_hex_str(transfer.transaction_hash) or "", + "nonce": str(tx.nonce), + "blockHash": bytes_to_hex_str(transfer.block_hash), + "from": bytes_to_hex_str(transfer.from_address), + "contractAddress": bytes_to_hex_str(transfer.token_address), + "to": bytes_to_hex_str(transfer.to_address), + "tokenName": token_dict[transfer.token_address].name, + "tokenSymbol": token_dict[transfer.token_address].symbol, + "transactionIndex": str(tx.transaction_index), + "gas": str(tx.gas), + "gasPrice": str(tx.gas_price), + "gasUsed": str(tx.receipt_gas_used), + "cumulativeGasUsed": str(tx.receipt_cumulative_gas_used), + } + + # Add token type specific fields + if token_type == TokenType.ERC20: + base_data["value"] = str(transfer.value) + base_data["tokenDecimal"] = str(token_dict[transfer.token_address].decimals) + elif token_type == TokenType.ERC721: + base_data["tokenID"] = str(transfer.token_id) + elif token_type == TokenType.ERC1155: + base_data["tokenValue"] = str(transfer.value) + base_data["tokenID"] = str(transfer.token_id) + + result.append(response_model(**base_data)) + + return result + + +# Check Contract Execution Status +class ContractExecutionStatus(BaseModel): + isError: str + errDescription: Optional[str] + + +def check_contract_execution_status(session: Session, txn_hash: str) -> Optional[ContractExecutionStatus]: + transaction = ( + session.exec( + select(Traces.status, Traces.error).where( + Traces.transaction_hash == hex_str_to_bytes(txn_hash), + Traces.trace_address == "{}", + ) + ) + ).first() + + if transaction: + return ContractExecutionStatus( + isError="1" if transaction.status == 0 else "0", + errDescription=transaction.error if transaction.status == 0 else "", + ) + else: + return None + + +class TransactionReceiptStatus(BaseModel): + status: str + + +# Check Transaction Receipt Status +def check_transaction_receipt_status(session: Session, txn_hash: str) -> Optional[TransactionReceiptStatus]: + receipt_status = ( + session.exec( + select(Transactions.receipt_status).where( + Transactions.hash == hex_str_to_bytes(txn_hash), + ) + ) + ).first() + if receipt_status: + return TransactionReceiptStatus(status=str(receipt_status)) + else: + return None + + +# (P0)Get Event Logs by Address +# (P0)Get Event Logs by Topics +# (P0)Get Event Logs by Address filtered by Topics + + +class APILogResponse(BaseModel): + transactionHash: str + logIndex: str + address: str + data: str + blockNumber: str + timeStamp: str + topics: List[str] + + +def get_event_logs( + session: Session, + topic0: Optional[str] = None, + topic1: Optional[str] = None, + topic2: Optional[str] = None, + topic3: Optional[str] = None, + topic0_1_opr: str = "and", + topic1_2_opr: str = "and", + topic2_3_opr: str = "and", + topic0_2_opr: str = "and", + topic1_3_opr: str = "and", + topic0_3_opr: str = "and", + address: Optional[str] = None, + from_block: int = 0, + to_block: int = 999999999, + page: int = 1, + offset: int = 10, + sort_order: str = "desc", +) -> List[APILogResponse]: + conditions = [] + + if topic0: + conditions.append(("topic0", Logs.topic0 == hex_str_to_bytes(topic0))) + if topic1: + conditions.append(("topic1", Logs.topic1 == hex_str_to_bytes(topic1))) + if topic2: + conditions.append(("topic2", Logs.topic2 == hex_str_to_bytes(topic2))) + if topic3: + conditions.append(("topic3", Logs.topic3 == hex_str_to_bytes(topic3))) + + opr_funcs = {"and": and_, "or": or_} + + def get_operator(opr_key: str, default_opr: str = "and"): + return opr_funcs.get(opr_key, opr_funcs[default_opr]) + + opr_mapping = { + ("topic0", "topic1"): topic0_1_opr, + ("topic1", "topic2"): topic1_2_opr, + ("topic2", "topic3"): topic2_3_opr, + ("topic0", "topic2"): topic0_2_opr, + ("topic1", "topic3"): topic1_3_opr, + ("topic0", "topic3"): topic0_3_opr, + } + + # Build the final condition + final_condition = True + if conditions: + final_condition = conditions[0][1] + for i in range(1, len(conditions)): + prev_topic, current_topic = conditions[i - 1][0], conditions[i][0] + opr_key = opr_mapping.get((prev_topic, current_topic), "and") + opr_func = get_operator(opr_key) + final_condition = opr_func(final_condition, conditions[i][1]) + + if address: + final_condition = and_(final_condition, Logs.address == hex_str_to_bytes(address)) + + # Build and execute query + query = select(Logs).where(final_condition, Logs.block_number >= from_block, Logs.block_number <= to_block) + + if sort_order == "asc": + query = query.order_by(Logs.block_number.asc()) + else: + query = query.order_by(Logs.block_number.desc()) + + query = query.offset((page - 1) * offset).limit(offset) + logs = session.exec(query).all() + + # Format results + result = [ + APILogResponse( + transactionHash=bytes_to_hex_str(log.transaction_hash) or "", + logIndex=str(log.log_index), + address=bytes_to_hex_str(log.address) or "", + data=bytes_to_hex_str(log.data) or "", + blockNumber=str(log.block_number), + timeStamp=log.block_timestamp.strftime("%s"), + topics=[ + topic + for topic in [ + bytes_to_hex_str(log.topic0), + bytes_to_hex_str(log.topic1), + bytes_to_hex_str(log.topic2), + bytes_to_hex_str(log.topic3), + ] + if topic is not None + ], + ) + for log in logs + ] + + return result + + +# (P0)Get ERC20-Token TotalSupply by ContractAddress +# (P0)Get ERC721-Token TotalSupply by ContractAddress +# (P0)Get ERC1155-Token TotalSupply by ContractAddress +def stats_token_supply(session: Session, contract_address: str) -> Optional[int]: + total_supply = session.exec( + select(Tokens.total_supply).where(Tokens.address == hex_str_to_bytes(contract_address)) + ).first() + + if not total_supply: + return None + else: + return int(total_supply) + + +# Get ERC20-Token Account Balance for TokenContractAddress +# Get ERC721-Token Account Balance for TokenContractAddress +# Get ERC1155-Token Account Balance for TokenContractAddress +def account_token_balance( + session: Session, contract_address: str, address: str, token_type: TokenType = TokenType.ERC20, token_id: int = -1 +) -> Optional[str]: + if not address or not contract_address: + return "0" + if token_type in [TokenType.ERC20, TokenType.ERC721]: + token_balance = session.exec( + select(CurrentTokenBalances.balance).where( + CurrentTokenBalances.address == hex_str_to_bytes(address), + CurrentTokenBalances.token_address == hex_str_to_bytes(contract_address), + ) + ).first() + elif token_type == TokenType.ERC1155: + token_balance = session.exec( + select(CurrentTokenIdBalances.balance).where( + CurrentTokenIdBalances.address == hex_str_to_bytes(address), + CurrentTokenIdBalances.token_address == hex_str_to_bytes(contract_address), + CurrentTokenIdBalances.token_id == token_id, + ) + ).first() + else: + token_balance = 0 + + return str(token_balance or 0) + + +# Get Historical ERC20-Token Account Balance for TokenContractAddress by BlockNo +# Get Historical ERC721-Token Account Balance for TokenContractAddress by BlockNo +# Get Historical ERC1155-Token Account Balance for TokenContractAddress by BlockNo +def account_token_balance_with_block_number( + session: Session, + contract_address: str, + address: str, + block_number: int, + token_type: TokenType = TokenType.ERC20, + token_id: int = -1, +) -> str: + if not address or not contract_address or not block_number: + return "0" + if token_type in [TokenType.ERC20, TokenType.ERC721]: + token_balance = session.exec( + select(AddressTokenBalances.balance) + .where( + and_( + AddressTokenBalances.address == hex_str_to_bytes(address), + AddressTokenBalances.token_address == hex_str_to_bytes(contract_address), + AddressTokenBalances.block_number <= block_number, + ) + ) + .order_by(AddressTokenBalances.block_number.desc()) + ).first() + elif token_type == TokenType.ERC1155: + token_balance = session.exec( + select(AddressTokenIdBalances.balance) + .where( + and_( + AddressTokenIdBalances.address == hex_str_to_bytes(address), + AddressTokenIdBalances.token_address == hex_str_to_bytes(contract_address), + AddressTokenIdBalances.block_number <= block_number, + AddressTokenIdBalances.token_id == token_id, + ) + ) + .order_by(AddressTokenIdBalances.block_number.desc()) + ).first() + + else: + token_balance = 0 + + return str(token_balance or 0) + + +class TokenHolderResponse(BaseModel): + TokenHolderAddress: str + TokenId: Optional[str] + TokenHolderQuantity: str + + +# (P0)Get Token Holder List by Contract Address +def token_holder_list( + session: Session, contract_address: str, page: int, offset: int, sort_order: str +) -> Optional[List[TokenHolderResponse]]: + token = session.exec(select(Tokens).where(Tokens.address == hex_str_to_bytes(contract_address))).first() + + if token is None: + return None + + query = select(CurrentTokenBalances).where(CurrentTokenBalances.token_address == hex_str_to_bytes(contract_address)) + + if sort_order == "asc": + query = query.order_by(CurrentTokenBalances.balance.asc()) + else: + query = query.order_by(CurrentTokenBalances.balance.desc()) + + query = query.offset((page - 1) * offset).limit(offset) + token_holders = session.exec(query).all() + + return [ + TokenHolderResponse( + TokenHolderAddress=bytes_to_hex_str(token_holder.address), + TokenId=None, + TokenHolderQuantity=str(token_holder.balance), + ) + for token_holder in token_holders + ] + + +class TokenInfoResponse(BaseModel): + TokenName: str + TokenSymbol: str + TokenTotalSupply: str + TokenType: str + TokenDecimals: Optional[str] = None + + +# Get Token Info by ContractAddress +def token_info(session: Session, contract_address: str) -> Optional[TokenInfoResponse]: + query = select(Tokens).where(Tokens.address == hex_str_to_bytes(contract_address)) + token = session.exec(query).first() + + if token: + return TokenInfoResponse( + TokenName=token.name, + TokenSymbol=token.symbol, + TokenTotalSupply=str(token.total_supply), + TokenType=token.token_type, + TokenDecimals=str(token.decimals) if token.decimals else None, + ) + else: + return None + + +class NftInventory(BaseModel): + tokenID: str + + +# Get Address ERC721 Token Inventory By Contract Address +def account_address_nft_inventory(session: Session, contract_address, address, page, offset) -> List[NftInventory]: + if not address or not contract_address: + return [] + address = address.lower() + contract_address = contract_address.lower() + query = ( + select(NFTDetails) + .where( + NFTDetails.token_address == hex_str_to_bytes(contract_address), + NFTDetails.token_owner == hex_str_to_bytes(address), + ) + .order_by(NFTDetails.token_id.asc()) + .limit(offset) + .offset((page - 1) * offset) + ) + result = session.exec(query) + result = [ + NftInventory( + tokenID=str(token.token_id), + ) + for token in result + ] + return result + + +# Get Contract Creator and Creation Tx Hash +class ContractInfo(BaseModel): + contractAddress: str + contractCreator: str + txHash: str + + +def get_contract_creator_and_creation_tx_hash(session: Session, contract_addresses) -> List[ContractInfo]: + if not contract_addresses: + return [] + contract_addresses = [hex_str_to_bytes(address) for address in contract_addresses] + query = select(Contracts).where(Contracts.address.in_(contract_addresses)) + contracts = session.exec(query).all() + result = [ + ContractInfo( + contractAddress=bytes_to_hex_str(contract.address), + contractCreator=bytes_to_hex_str(contract.contract_creator), + txHash=bytes_to_hex_str(contract.transaction_hash), + ) + for contract in contracts + ] + return result + + +# (P0)Get Address ERC20 Token Holding +# (P0)Get Address ERC721 Token Holding +# (P0)Get Address ERC1155 Token Holding + + +class AddressTokenQuantity(BaseModel): + TokenAddress: str + TokenName: str + TokenType: str + TokenSymbol: str + TokenQuantity: str + + TokenDecimals: Optional[str] + TokenID: Optional[str] + + +def account_address_token_holding( + session: Session, address: str, page: int, offset: int, token_type: Optional[TokenType] = None +) -> List[AddressTokenQuantity]: + if address is None: + return [] + # Main query with joins + query = ( + select( + CurrentTokenBalances.token_address, + CurrentTokenBalances.balance, + Tokens.name, + Tokens.token_type, + Tokens.symbol, + Tokens.icon_url, + Tokens.decimals, + ) + .outerjoin( + Tokens, + and_( + CurrentTokenBalances.token_address == Tokens.address, + ), + ) + .order_by(CurrentTokenBalances.token_address.asc()) + ) + query = query.where(CurrentTokenBalances.address == hex_str_to_bytes(address)) + if token_type: + query = query.where(Tokens.token_type == token_type.value) + result = session.exec(query.offset((page - 1) * offset).limit(offset)).all() + + token_holder_list = [] + for token_holder in result: + token_holder_list.append( + AddressTokenQuantity( + TokenAddress=bytes_to_hex_str(token_holder.token_address), + TokenName=token_holder.name or "Unknown Token", + TokenType=token_holder.token_type, + TokenSymbol=token_holder.symbol or "UNKNOWN", + # TokenQuantity=str(token_holder.balance / (10**token_holder.decimals or 0)), + TokenQuantity=str(token_holder.balance or 0), + TokenDecimals=str(token_holder.decimals) if token_holder.decimals else None, + TokenID=None, + ) + ) + return token_holder_list + + +# Get Block Number by Timestamp +def block_number_by_timestamp(session: Session, timestamp: int, closest) -> Optional[int]: + if closest == "before": + block_number = session.exec( + select(Blocks.number) + .where(Blocks.timestamp <= datetime.fromtimestamp(timestamp)) + .order_by(Blocks.number.desc()) + .limit(1) + ).first() + elif closest == "after": + block_number = session.exec( + select(Blocks.number) + .where(Blocks.timestamp >= datetime.fromtimestamp(timestamp)) + .order_by(Blocks.number.asc()) + .limit(1) + ).first() + else: + return None + return block_number + + +# Get Daily Network Transaction Fee +class DailyTransactionFeeResponse(BaseModel): + UTCDate: str + unixTimeStamp: str + transactionFee: str + + +def stats_daily_network_transaction_fee( + session: Session, start_date: datetime, end_date: datetime, sort_order: str +) -> List[DailyTransactionFeeResponse]: + query = select( + DailyTransactionsStats.block_date, + DailyTransactionsStats.avg_transaction_fee, + ).where(and_(DailyTransactionsStats.block_date >= start_date, DailyTransactionsStats.block_date <= end_date)) + + if sort_order == "asc": + query = query.order_by(DailyTransactionsStats.block_date.asc()) + else: + query = query.order_by(DailyTransactionsStats.block_date.desc()) + + results = session.exec(query).all() + + return [ + DailyTransactionFeeResponse( + UTCDate=row.block_date.strftime("%Y-%m-%d"), + unixTimeStamp=row.block_date.strftime("%s"), + transactionFee=str(row.avg_transaction_fee), + ) + for row in results + ] + + +# Get Daily New Address Count + + +class DailyNewAddressCountResponse(BaseModel): + UTCDate: str + unixTimeStamp: str + newAddressCount: str + + +def stats_daily_new_address_count( + session: Session, start_date: datetime, end_date: datetime, sort_order: str +) -> List[DailyNewAddressCountResponse]: + query = select( + DailyAddressesStats.block_date, + DailyAddressesStats.new_address_cnt, + ).where(DailyAddressesStats.block_date >= start_date, DailyAddressesStats.block_date <= end_date) + + if sort_order == "asc": + query = query.order_by(DailyAddressesStats.block_date.asc()) + else: + query = query.order_by(DailyAddressesStats.block_date.desc()) + + results = session.exec(query).all() + + return [ + DailyNewAddressCountResponse( + UTCDate=row.block_date.strftime("%Y-%m-%d"), + unixTimeStamp=row.block_date.strftime("%s"), + newAddressCount=str(row.new_address_cnt), + ) + for row in results + ] + + +# Get Daily Network Utilization +class DailyNetworkUtilizationResponse(BaseModel): + UTCDate: str + unixTimeStamp: str + networkUtilization: str + + +def stats_daily_network_utilization( + session: Session, start_date: datetime, end_date: datetime, sort_order: str +) -> List[DailyNetworkUtilizationResponse]: + query = select( + DailyBlocksStats.block_date, + DailyBlocksStats.avg_gas_used_percentage, + ).where(and_(DailyBlocksStats.block_date >= start_date, DailyBlocksStats.block_date <= end_date)) + + if sort_order == "asc": + query = query.order_by(DailyBlocksStats.block_date.asc()) + else: + query = query.order_by(DailyBlocksStats.block_date.desc()) + + results = session.exec(query).all() + + return [ + DailyNetworkUtilizationResponse( + UTCDate=row.block_date.strftime("%Y-%m-%d"), + unixTimeStamp=row.block_date.strftime("%s"), + networkUtilization=str(row.avg_gas_used_percentage), + ) + for row in results + ] + + +# Get Daily Transaction Count +class DailyTransactionCountResponse(BaseModel): + UTCDate: str + unixTimeStamp: str + transactionCount: str + + +def stats_daily_transaction_count( + session: Session, start_date: datetime, end_date: datetime, sort_order: str +) -> List[DailyTransactionCountResponse]: + query = select(DailyTransactionsStats.block_date, DailyTransactionsStats.cnt).where( + DailyTransactionsStats.block_date >= start_date, DailyTransactionsStats.block_date <= end_date + ) + + if sort_order == "asc": + query = query.order_by(DailyTransactionsStats.block_date.asc()) + else: + query = query.order_by(DailyTransactionsStats.block_date.desc()) + + results = session.exec(query).all() + + return [ + DailyTransactionCountResponse( + UTCDate=row.block_date.strftime("%Y-%m-%d"), + unixTimeStamp=row.block_date.strftime("%s"), + transactionCount=str(row.cnt), + ) + for row in results + ] + + +class DailyAverageBlockSizeResponse(BaseModel): + UTCDate: str + unixTimeStamp: str + averageBlockSize: str + + +def stats_daily_average_block_size( + session: Session, start_date: datetime, end_date: datetime, sort_order: str +) -> List[DailyAverageBlockSizeResponse]: + query = select(DailyBlocksStats.block_date, DailyBlocksStats.avg_size).where( + DailyBlocksStats.block_date >= start_date, DailyBlocksStats.block_date <= end_date + ) + + if sort_order == "asc": + query = query.order_by(DailyBlocksStats.block_date.asc()) + else: + query = query.order_by(DailyBlocksStats.block_date.desc()) + + results = session.exec(query).all() + + return [ + DailyAverageBlockSizeResponse( + UTCDate=row.block_date.strftime("%Y-%m-%d"), + unixTimeStamp=row.block_date.strftime("%s"), + averageBlockSize=str(row.avg_size), + ) + for row in results + ] + + +# Get Daily Block Count and Rewards +class DailyBlockCountAndRewardsResponse(BaseModel): + UTCDate: str + unixTimeStamp: str + blockCount: str + + +def stats_daily_block_count_and_rewards( + session: Session, start_date: datetime, end_date: datetime, sort_order: str +) -> List[DailyBlockCountAndRewardsResponse]: + query = select(DailyBlocksStats.block_date, DailyBlocksStats.cnt).where( + and_(DailyBlocksStats.block_date >= start_date, DailyBlocksStats.block_date <= end_date) + ) + + if sort_order == "asc": + query = query.order_by(DailyBlocksStats.block_date.asc()) + else: + query = query.order_by(DailyBlocksStats.block_date.desc()) + + results = session.exec(query).all() + + return [ + DailyBlockCountAndRewardsResponse( + UTCDate=row.block_date.strftime("%Y-%m-%d"), + unixTimeStamp=row.block_date.strftime("%s"), + blockCount=str(row.cnt), + ) + for row in results + ] + + +# Get Daily Average Time for A Block to be Included in the Ethereum Blockchain +class DailyAverageBlockTimeResponse(BaseModel): + UTCDate: str + unixTimeStamp: str + blockTime: str + + +def stats_daily_average_block_time( + session: Session, start_date: datetime, end_date: datetime, sort_order: str +) -> List[DailyAverageBlockTimeResponse]: + query = select(DailyBlocksStats.block_date, DailyBlocksStats.block_interval).where( + and_(DailyBlocksStats.block_date >= start_date, DailyBlocksStats.block_date <= end_date) + ) + + if sort_order == "asc": + query = query.order_by(DailyBlocksStats.block_date.asc()) + else: + query = query.order_by(DailyBlocksStats.block_date.desc()) + + results = session.exec(query).all() + + return [ + DailyAverageBlockTimeResponse( + UTCDate=row.block_date.strftime("%Y-%m-%d"), + unixTimeStamp=row.block_date.strftime("%s"), + blockTime=str(row.block_interval), + ) + for row in results + ] + + +# Get Contract ABI for Verified Contract Source Codes +def get_contract_abi(session: Session, contract_address): + if not contract_address: + return None + contract = get_contract_verification_abi_by_address(contract_address) + if contract: + return contract.get("abi") + else: + return None + + +class ContractInfoResponse(BaseModel): + SourceCode: str + ABI: str + ContractName: str + CompilerVersion: str + OptimizationUsed: str + Runs: str + ConstructorArguments: str + EVMVersion: str + Library: str + LicenseType: str + Proxy: str + Implementation: str + + +# Get Contract Source for Verified Contract Source Codes +def get_contract_source_code(session: Session, contract_address: str): + if not contract_address: + return None + + contract_address = contract_address.lower() + + # Query the contract from the database + contract = session.exec(select(Contracts).where(Contracts.address == hex_str_to_bytes(contract_address))).first() + + if not contract or not contract.is_verified: + return None + + # Fetch contract verification details + contracts_verification = get_contract_code_by_address(address=contract_address) + if not contracts_verification: + return None + + source_code = {} + code = {} + + # Check if the folder path is available for contract verification + if "folder_path" in contracts_verification and len(contracts_verification["folder_path"]) > 0: + if len(contracts_verification["folder_path"]) == 1: + source_code = aws_service.get_file_content( + "contract-verify-files", contracts_verification["folder_path"][0] + ) + else: + settings = json.loads(contracts_verification.get("settings", "{}")) + source_code["settings"] = settings + + remappings = settings.get("remappings", []) + remappings_dict = {"src/": ""} + for remapping_str in remappings: + remapping = remapping_str.split("=") + if len(remapping) == 2: + remappings_dict[remapping[1]] = remapping[0] + + sorted_remappings = sorted(remappings_dict.items(), key=lambda x: len(x[0]), reverse=True) + + for file in contracts_verification["folder_path"]: + content = aws_service.get_file_content("contract-verify-files", file) + if content: + key = file.removeprefix(f"{CHAIN_ID}/{contract_address}/") + for remap_key, remap_value in sorted_remappings: + if key.startswith(remap_key): + key = key.replace(remap_key, remap_value) + break + code[key] = {"content": content} + + source_code["sources"] = code + source_code["language"] = contracts_verification.get("language", "Solidity") + + contracts_verification["files"] = [] + + # Construct the output data to return using ContractInfoResponse model + output_data = [ + ContractInfoResponse( + SourceCode=str(source_code), + ABI=contracts_verification.get("abi") or "", + ContractName=contracts_verification.get("contract_name") or "", + CompilerVersion=contracts_verification.get("compiler_version") or "", + OptimizationUsed="1" if contracts_verification.get("optimization_used") else "0", + Runs=str(contracts_verification.get("optimization_runs") or ""), + ConstructorArguments=contracts_verification.get("constructor_arguments") or "", + EVMVersion=contracts_verification.get("evm_version") or "", + Library=( + ",".join(contracts_verification.get("libraries", [])) if contracts_verification.get("libraries") else "" + ), + LicenseType=contracts_verification.get("license_type") or "", + Proxy="1" if contracts_verification.get("proxy") else "0", + Implementation=contracts_verification.get("implementation") or "", + ) + ] + + return output_data + + +# Get Historical ERC20-Token TotalSupply by ContractAddress & BlockNo (TODO) +# Get Address ERC721 Token Inventory By Contract Address (TODO) +# Get Daily Average Network Hash Rate (TODO) +# Get Daily Average Network Difficulty (TODO) +# Get Ether Historical Daily Market Cap (TODO) +# Get Ether Historical Price (TODO) diff --git a/hemera/app/api/routes/developer/es_adapter/router.py b/hemera/app/api/routes/developer/es_adapter/router.py new file mode 100644 index 000000000..69519e487 --- /dev/null +++ b/hemera/app/api/routes/developer/es_adapter/router.py @@ -0,0 +1,323 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2025/2/5 01:07 +# @Author ideal93 +# @File route.py +# @Brief + +from enum import Enum +from typing import Dict, Set + +from fastapi import APIRouter, Depends, HTTPException +from pydantic import conint, constr, model_validator, validator + +from hemera.app.api.deps import ReadSessionDep +from hemera.app.api.routes.developer.es_adapter.helper import * + +router = APIRouter(tags=["DEVELOPER"]) + + +def limit_address_validator(value: Optional[str]) -> Optional[str]: + if value is None: + return value + if not isinstance(value, str): + raise ValueError("Invalid contract address format, must be a string.") + if len(value) != 42: + raise ValueError("Invalid contract address format, must be 42 characters long.") + if not value.startswith("0x"): + raise ValueError("Invalid contract address format, must start with '0x'.") + if not all(c in "0123456789abcdefABCDEF" for c in value[2:]): + raise ValueError("Invalid contract address format, must contain only hexadecimal characters.") + return value.lower() + + +def limit_hash_validator(value: Optional[str]) -> Optional[str]: + if value is None: + return value + if not isinstance(value, str): + raise ValueError("Invalid hash format, must be a string.") + if len(value) != 66: + raise ValueError("Invalid hash format, must be 66 characters long.") + if not value.startswith("0x"): + raise ValueError("Invalid hash format, must start with '0x'.") + if not all(c in "0123456789abcdefABCDEF" for c in value[2:]): + raise ValueError("Invalid hash format, must contain only hexadecimal characters.") + return value.lower() + + +class ModuleEnum(str, Enum): + account = "account" + contract = "contract" + transaction = "transaction" + block = "block" + token = "token" + logs = "logs" + stats = "stats" + + +ALLOWED_ACTIONS: Dict[ModuleEnum, Set[str]] = { + ModuleEnum.account: { + "balance", + "balancemulti", + "balancehistory", + "txlist", + "txlistinternal", + "tokentx", + "tokennfttx", + "tokenbalance", + "tokenbalancehistory", + "addresstokenbalance", + "addresstokennftbalance", + "addresstoken1155balance", + "addresstokennftinventory", + }, + ModuleEnum.transaction: {"getstatus", "gettxreceiptstatus"}, + ModuleEnum.logs: {"getLogs"}, + ModuleEnum.stats: { + "tokensupply", + "tokennftsupply", + "token1155supply", + "dailyavgblocktime", + "dailyblkcount", + "dailytxnfee", + "dailynewaddress", + "dailynetutilization", + "dailytx", + "dailyavgblocksize", + }, + ModuleEnum.token: {"tokenholderlist", "tokeninfo"}, + ModuleEnum.block: {"getblocknobytime"}, + ModuleEnum.contract: {"getabi", "getcontractcreation", "getsourcecode"}, +} + + +class DeveloperAPIRequest(BaseModel): + module: ModuleEnum = Field(None, description="Module to query", example=ModuleEnum.account) + action: Optional[str] = None + tag: Optional[str] = None + startblock: int = 0 + endblock: int = 99999999 + fromBlock: int = 0 + toBlock: int = 99999999 + startdate: Optional[datetime] = None + enddate: Optional[datetime] = None + tokenid: int = -1 + blockno: int = 0 + timestamp: int = 0 + closest: constr(pattern=r"^(before|after)$") = "before" + page: conint(ge=1, le=1000) = 1 + offset: conint(ge=1, le=100) = 5 + sort: constr(pattern=r"^(asc|desc)$") = "asc" + address: Optional[str] = None + contractaddress: Optional[str] = None + contractaddresses: Optional[List[str]] = None + txhash: Optional[str] = None + topic0: Optional[str] = None + topic1: Optional[str] = None + topic2: Optional[str] = None + topic3: Optional[str] = None + topic0_1_opr: constr(pattern=r"^(and|or)$") = "and" + topic1_2_opr: constr(pattern=r"^(and|or)$") = "and" + topic2_3_opr: constr(pattern=r"^(and|or)$") = "and" + topic0_2_opr: constr(pattern=r"^(and|or)$") = "and" + topic1_3_opr: constr(pattern=r"^(and|or)$") = "and" + topic0_3_opr: constr(pattern=r"^(and|or)$") = "and" + token_type: Optional[TokenType] = Field(None, description="Token type", example=TokenType.ERC20) + + @validator("txhash", pre=True, always=True) + def validate_txhash(cls, value): + return limit_hash_validator(value) + + @validator("address", "contractaddress", pre=True, always=True) + def validate_address(cls, value): + return limit_address_validator(value) + + @validator("contractaddresses", pre=True) + def split_and_validate_addresses(cls, value): + if value is None: + return value + if isinstance(value, str): + addresses = value.split(",") + elif isinstance(value, list): + addresses = value + else: + raise ValueError("contractaddresses must be a comma separated string or a list of addresses") + return [limit_address_validator(addr.strip()) for addr in addresses] + + @model_validator(mode="after") + def check_module_action(self) -> "DeveloperAPIRequest": + # Here, self is the model instance after individual validations have passed. + if self.module is None: + raise ValueError("The 'module' field is required") + if self.action is None: + raise ValueError("The 'action' field is required") + allowed = ALLOWED_ACTIONS.get(self.module) + if allowed is None: + raise ValueError(f"No action list defined for module '{self.module}'") + if self.action not in allowed: + raise ValueError(f"For module '{self.module}', the action must be one of {allowed}") + return self + + +@router.get("/v1/developer/api") +async def developer_api(session: ReadSessionDep, request: DeveloperAPIRequest = Depends()): + module = request.module + action = request.action + + if module == ModuleEnum.account: + if action == "balance": + result = account_balance(session, address=request.address) + elif action == "balancemulti": + result = account_balancemulti(session, addresses=request.contractaddresses) + elif action == "balancehistory": + result = account_balancehistory(session, address=request.address, blockno=request.blockno) + elif action == "txlist": + result = account_txlist( + session, + txhash=request.txhash, + address=request.address, + start_block=request.startblock, + end_block=request.endblock, + page=request.page, + offset=request.offset, + sort_order=request.sort, + ) + elif action == "txlistinternal": + result = account_txlistinternal( + session, + txhash=request.txhash, + address=request.address, + start_block=request.startblock, + end_block=request.endblock, + page=request.page, + offset=request.offset, + sort_order=request.sort, + ) + elif action == "tokentx": + result = get_account_token_transfers( + session, + contract_address=request.contractaddress, + address=request.address, + page=request.page, + offset=request.offset, + sort_order=request.sort, + start_block=request.startblock, + end_block=request.endblock, + token_type=request.token_type, + ) + elif action == "tokenbalance": + result = account_token_balance( + session, + contract_address=request.contractaddress, + address=request.address, + token_type=request.token_type, + token_id=request.tokenid, + ) + elif action == "addresstokenbalance": + result = account_address_token_holding( + session, + address=request.address, + page=request.page, + offset=request.offset, + token_type=request.token_type, + ) + elif action == "addresstokennftinventory": + result = account_address_nft_inventory( + session, + address=request.address, + contract_address=request.contractaddress, + page=request.page, + offset=request.offset, + ) + else: + raise HTTPException(status_code=400, detail="Invalid action in account module") + + elif module == ModuleEnum.transaction: + if action == "getstatus": + result = check_contract_execution_status(session, txn_hash=request.txhash) + elif action == "gettxreceiptstatus": + result = check_transaction_receipt_status(session, txn_hash=request.txhash) + else: + raise HTTPException(status_code=400, detail="Invalid action in transaction module") + + elif module == ModuleEnum.logs: + if action == "getLogs": + result = get_event_logs( + session, + topic0=request.topic0, + topic1=request.topic1, + topic2=request.topic2, + topic3=request.topic3, + topic0_1_opr=request.topic0_1_opr, + topic1_2_opr=request.topic1_2_opr, + topic2_3_opr=request.topic2_3_opr, + topic0_2_opr=request.topic0_2_opr, + topic1_3_opr=request.topic1_3_opr, + topic0_3_opr=request.topic0_3_opr, + address=request.address, + from_block=request.fromBlock, + to_block=request.toBlock, + page=request.page, + offset=request.offset, + sort_order=request.sort, + ) + else: + raise HTTPException(status_code=400, detail="Invalid action in logs module") + + elif module == ModuleEnum.stats: + if action == "tokensupply": + result = stats_token_supply(session, contract_address=request.contractaddress) + elif action == "dailytxnfee": + result = stats_daily_network_transaction_fee( + session, start_date=request.startdate, end_date=request.enddate, sort_order=request.sort + ) + elif action == "dailynewaddress": + result = stats_daily_new_address_count( + session, start_date=request.startdate, end_date=request.enddate, sort_order=request.sort + ) + elif action == "dailytx": + result = stats_daily_transaction_count( + session, start_date=request.startdate, end_date=request.enddate, sort_order=request.sort + ) + elif action == "dailynetutilization": + result = stats_daily_network_utilization( + session, start_date=request.startdate, end_date=request.enddate, sort_order=request.sort + ) + else: + raise HTTPException(status_code=400, detail="Invalid action in stats module") + + elif module == ModuleEnum.token: + if action == "tokeninfo": + result = token_info(session, contract_address=request.contractaddress) + elif action == "tokenholderlist": + result = token_holder_list( + session, + contract_address=request.contractaddress, + page=request.page, + offset=request.offset, + sort_order=request.sort, + ) + else: + raise HTTPException(status_code=400, detail="Invalid action in token module") + + elif module == ModuleEnum.block: + if action == "getblocknobytime": + result = block_number_by_timestamp(session, timestamp=request.timestamp, closest=request.closest) + else: + raise HTTPException(status_code=400, detail="Invalid action in block module") + + elif module == ModuleEnum.contract: + if action == "getcontractcreation": + result = get_contract_creator_and_creation_tx_hash(session, contract_addresses=request.contractaddresses) + # elif action == "getabi": + # result = get_contract_abi(session, contract_address=request.address) + # elif action == "getsourcecode": + # result = get_contract_source_code(session, contract_address=request.address) + else: + raise HTTPException(status_code=400, detail="Invalid action in contract module") + else: + raise HTTPException(status_code=400, detail="Invalid module") + + if not result: + return {"status": "0", "message": "No data found"} + return {"status": "1", "message": "OK", "result": result} diff --git a/hemera/app/api/routes/enricher/__init__.py b/hemera/app/api/routes/enricher/__init__.py new file mode 100644 index 000000000..b099ab2e2 --- /dev/null +++ b/hemera/app/api/routes/enricher/__init__.py @@ -0,0 +1,20 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2025/2/20 15:29 +# @Author ideal93 +# @File __init__.py.py +# @Brief +from typing import Annotated + +from fastapi import Depends +from sqlmodel import Session + +from hemera.app.api.deps import get_read_db +from hemera.app.api.routes.enricher.address_enricher import Address, AddressEnricher, EnricherManager, EnricherType + + +def get_blockchain_enricher() -> AddressEnricher: + return EnricherManager.get_instance().get_enricher() + + +BlockchainEnricherDep = Annotated[AddressEnricher, Depends(get_blockchain_enricher)] diff --git a/hemera/app/api/routes/enricher/address_enricher.py b/hemera/app/api/routes/enricher/address_enricher.py new file mode 100644 index 000000000..66939a85e --- /dev/null +++ b/hemera/app/api/routes/enricher/address_enricher.py @@ -0,0 +1,171 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2025/2/20 15:29 +# @Author ideal93 +# @File address_enricher.py +# @Brief +from datetime import datetime +from enum import Enum +from typing import Any, Callable, Dict, List, Optional, Type, TypeVar + +from pydantic import BaseModel +from sqlmodel import Session + +from hemera.app.api.routes.helper.contract import _get_contracts_by_addresses +from hemera.app.api.routes.helper.format import format_coin_value, format_dollar_value +from hemera.app.api.routes.helper.token import TokenInfo, get_coin_prices, get_token_map +from hemera.common.utils.format_utils import bytes_to_hex_str + + +class ExtraInfo(BaseModel): + ens: Optional[str] = None + tvl: Optional[float] = None + + +class Address(BaseModel): + address: str = "" + is_contract: bool = False + name: Optional[str] = None + extra_info: ExtraInfo = ExtraInfo() + + +class AddressEnricherServiceType(Enum): + ENS = "ens" + CONTRACT = "contract" + + +class EnricherType(Enum): + ADDRESS = "address" + COIN_VALUE = "coin_value" + COIN_PRICE = "coin_price" + TOKEN_INFO = "token_info" + TOKEN_VALUE = "token_value" + + +# Contract +# ENS(Service) +# Contract(Service) + + +class AddressEnricher: + """Class to enrich address data with additional information""" + + def __init__(self, services_list: List[AddressEnricherServiceType] = []): + self.services_list = services_list + self.coin_decimals = 18 + + def get_address(self, session: Session, addresses: set[str]) -> Dict[str, Address]: + enriched_address_dict = {address: Address(address=address) for address in addresses} + contracts = _get_contracts_by_addresses(session, list(addresses)) + contracts_dict = {bytes_to_hex_str(contract.address): contract for contract in contracts} + + for address in enriched_address_dict: + if address in contracts_dict: + enriched_address_dict[address].is_contract = True + enriched_address_dict[address].name = contracts_dict[address].name + + return enriched_address_dict + + def get_token_address(self, session: Session, addresses: set[str]) -> Dict[str, TokenInfo]: + return get_token_map(session, list(addresses)) + + def get_coin_price(self, session: Session, block_dates: set[datetime]) -> Dict[datetime, float]: + prices = get_coin_prices(session, list(block_dates)) + return {price.block_date: price.price for price in prices} + + def enrich( + self, items: List[Dict[str, Any]], fields_mapper: Dict[EnricherType, Dict[str, str]], session: Session + ) -> List[Dict[str, Any]]: + + for enricher_type in fields_mapper: + if enricher_type == EnricherType.ADDRESS: + items = self.enrich_address(items, fields_mapper[enricher_type], session) + elif enricher_type == EnricherType.COIN_VALUE: + items = self.enrich_coin_value(items, fields_mapper[enricher_type]) + elif enricher_type == EnricherType.COIN_PRICE: + items = self.enrich_coin_price(items, fields_mapper[enricher_type], session) + elif enricher_type == EnricherType.TOKEN_INFO: + items = self.enrich_token_info(items, fields_mapper[enricher_type], session) + elif enricher_type == EnricherType.TOKEN_VALUE: + items = self.enrich_token_value(items, fields_mapper[enricher_type]) + else: + pass + return items + + def enrich_address( + self, items: List[Dict[str, Any]], fields_mapper: Dict[str, str], session: Session + ) -> List[Dict[str, Any]]: + addresses = set() + for item in items: + for field in fields_mapper: + addresses.add(item[field]) + + enriched_address_dict = self.get_address(session, addresses) + + enriched_items = [] + for item in items: + for field in fields_mapper: + item[fields_mapper[field]] = enriched_address_dict.get(item[field]) + enriched_items.append(item) + return enriched_items + + def enrich_coin_value(self, items: List[Dict[str, Any]], fields_mapper: Dict[str, str]) -> List[Dict[str, Any]]: + for item in items: + for field in fields_mapper: + item[fields_mapper[field]] = format_coin_value(item[field]) + return items + + def enrich_coin_price( + self, items: List[Dict[str, Any]], fields_mapper: Dict[str, str], session: Session + ) -> List[Dict[str, Any]]: + block_dates = set() + for item in items: + block_dates.add(item["block_timestamp"].replace(second=0, microsecond=0)) + price_map = self.get_coin_price(session, block_dates) + + for item in items: + coin_price = price_map.get(item["block_timestamp"].replace(second=0, microsecond=0), 0.0) + for field in fields_mapper: + item[fields_mapper[field]] = format_dollar_value(coin_price * float(item[field])) + return items + + def enrich_token_info( + self, items: List[Dict[str, Any]], fields_mapper: Dict[str, str], session: Session + ) -> List[Dict[str, Any]]: + addresses = set() + for item in items: + for field in fields_mapper: + addresses.add(item[field]) + + token_info_dict = self.get_token_address(session, addresses) + + enriched_items = [] + for item in items: + for field in fields_mapper: + item[fields_mapper[field]] = token_info_dict.get(item[field]) + enriched_items.append(item) + return enriched_items + + def enrich_token_value(self, items: List[Dict[str, Any]], fields_mapper: Dict[str, str]) -> List[Dict[str, Any]]: + for item in items: + for field in fields_mapper: + item[fields_mapper[field]] = format_coin_value( + item[field], (item.get("token_info").decimals if item.get("token_info") else 18) or 18 + ) + return items + + +class EnricherManager: + _instance = None + _enricher = None + + @classmethod + def get_instance(cls) -> "EnricherManager": + if cls._instance is None: + cls._instance = cls() + return cls._instance + + def get_enricher(self) -> AddressEnricher: + if self._enricher is None: + self._enricher = AddressEnricher() + return self._enricher diff --git a/hemera/api/app/utils/__init__.py b/hemera/app/api/routes/explorer/__init__.py similarity index 100% rename from hemera/api/app/utils/__init__.py rename to hemera/app/api/routes/explorer/__init__.py diff --git a/hemera/app/api/routes/explorer/address.py b/hemera/app/api/routes/explorer/address.py new file mode 100644 index 000000000..4990cdf17 --- /dev/null +++ b/hemera/app/api/routes/explorer/address.py @@ -0,0 +1,220 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2024/12/20 15:01 +# @Author ideal93 +# @File address.py +# @Brief +from http.client import HTTPException +from typing import List, Optional + +from fastapi import APIRouter, Depends, Query +from pydantic import BaseModel + +from hemera.app.api.deps import ReadSessionDep +from hemera.app.api.routes.enricher import BlockchainEnricherDep, EnricherType +from hemera.app.api.routes.explorer.token import TokenTransferItem, TokenTransferResponse +from hemera.app.api.routes.explorer.transaction import ( + InternalTransactionItem, + InternalTransactionResponse, + LogResponse, + TransactionItem, + TransactionResponse, +) +from hemera.app.api.routes.helper.contract import ContractInfo as DBContractInfo +from hemera.app.api.routes.helper.contract import get_contract_by_address +from hemera.app.api.routes.helper.format import format_coin_value, format_dollar_value +from hemera.app.api.routes.helper.internal_transaction import ( + get_internal_transactions_by_address, + get_internal_transactions_count_by_address, +) +from hemera.app.api.routes.helper.log import get_logs_by_address +from hemera.app.api.routes.helper.token import TokenInfo, get_latest_coin_price, get_token_info +from hemera.app.api.routes.helper.token_balances import TokenBalanceAbbr, get_address_token_balances +from hemera.app.api.routes.helper.token_transfers import get_token_transfers_by_address +from hemera.app.api.routes.helper.transaction import get_transactions_by_address, get_transactions_count_by_address +from hemera.app.api.routes.parameters.validate_address import internal_api_validate_address +from hemera.app.core.config import settings +from hemera.app.utils.web3_utils import get_balance + +router = APIRouter(tags=["ADDRESS"]) + + +class TokenHoldingItem(TokenBalanceAbbr): + display_value: str + token_info: TokenInfo + + +class TokenHoldingResponse(BaseModel): + data: List[TokenHoldingItem] + total: int + + +class ContractInfo(DBContractInfo): + similar_verified_addresses: List[str] = [] + + +class AddressProfileResponse(BaseModel): + balance: Optional[str] = "0" + native_token_price: Optional[str] = "0" + balance_usd: Optional[str] = "0" + + contract_info: Optional[ContractInfo] = None + token_info: Optional[TokenInfo] = None + + +@router.get("/v1/explorer/address/{address}/profile", response_model=AddressProfileResponse) +async def get_address_profile(session: ReadSessionDep, address: str = Depends(internal_api_validate_address)): + """Get address profile with balance, contract and token info.""" + + balance = get_balance(address) + native_token_price = get_latest_coin_price(session) + + contract_info = get_contract_by_address(session, address) + token_info = None + if contract_info: + token_info = get_token_info(session, address) + + return AddressProfileResponse( + balance=format_coin_value(int(balance)), + native_token_price=format_dollar_value(native_token_price), + balance_usd=format_dollar_value((balance / 10**18) * native_token_price), + contract_info=contract_info, + token_info=token_info, + ) + + +@router.get("/v1/explorer/address/
/token_holdings", response_model=TokenHoldingResponse) +@router.get("/v2/explorer/address/
/token_holdings", response_model=TokenHoldingResponse) +async def get_address_token_holdings( + session: ReadSessionDep, enricher: BlockchainEnricherDep, address: str = Depends(internal_api_validate_address) +): + token_balances = get_address_token_balances(session, address) + enriched_token_balances = enricher.enrich( + [token_balance.dict() for token_balance in token_balances], + { + EnricherType.TOKEN_INFO: {"token_address": "token_info"}, + EnricherType.TOKEN_VALUE: {"balance": "display_value"}, + }, + session, + ) + return TokenHoldingResponse( + data=[TokenHoldingItem(**item) for item in enriched_token_balances], + total=len(token_balances), + ) + + +@router.get("/v1/explorer/address/
/transactions", response_model=TransactionResponse) +async def get_address_transactions( + session: ReadSessionDep, + enricher: BlockchainEnricherDep, + address: str = Depends(internal_api_validate_address), + page: int = Query(1, gt=0), + size: int = Query(25, gt=0), +): + """Get transactions list with various filters and pagination. + + Args: + session: Database session + page: Page number, starting from 1 + size: Items per page + Returns: + TransactionResponse: Paginated transaction list with metadata + + Raises: + HTTPException: If page*size exceeds limits or invalid parameters + """ + # Check pagination limits + + # Get all transactions with pagination + + total_records = (get_transactions_count_by_address(session, address, use_address_index=True),) + transactions = ( + get_transactions_by_address(session, address, use_address_index=True, limit=size, offset=(page - 1) * size), + ) + + enriched_transactions = enricher.enrich( + [transaction.dict() for transaction in transactions], + { + EnricherType.ADDRESS: {"to_address": "to_addr", "from_address": "from_addr"}, + EnricherType.COIN_VALUE: {"value": "display_value"}, + EnricherType.COIN_PRICE: {"transaction_fee": "transaction_fee_usd", "display_value": "value_usd"}, + }, + session, + ) + + return TransactionResponse( + data=[TransactionItem(**tx) for tx in enriched_transactions], + total=total_records, + max_display=min(settings.MAX_TRANSACTION_WITH_CONDITION, total_records), + page=page, + size=size, + ) + + +@router.get("/v1/explorer/address/
/token_transfers", response_model=TokenTransferResponse) +async def get_address_token_transfers( + session: ReadSessionDep, + enricher: BlockchainEnricherDep, + address: str = Depends(internal_api_validate_address), + page: int = Query(1, gt=0), + size: int = Query(25, gt=0), +): + token_transfers = get_token_transfers_by_address( + session, address, limit=size, offset=(page - 1) * size, use_address_index=True + ) + enriched_token_transfers = enricher.enrich( + [token_transfer.dict() for token_transfer in token_transfers], + { + EnricherType.ADDRESS: {"to_address": "to_addr", "from_address": "from_addr"}, + EnricherType.TOKEN_INFO: {"token_address": "token_info"}, + }, + session, + ) + + return TokenTransferResponse( + total=len(enriched_token_transfers), data=[TokenTransferItem(**item) for item in enriched_token_transfers] + ) + + +@router.get("/v1/explorer/address/
/internal_transactions", response_model=InternalTransactionResponse) +async def get_address_internal_transactions( + session: ReadSessionDep, + enricher: BlockchainEnricherDep, + address: str = Depends(internal_api_validate_address), + page: int = Query(1, gt=0), + size: int = Query(25, gt=0), +): + if page * size > settings.MAX_INTERNAL_TRANSACTION: + raise HTTPException( + status_code=400, + detail=f"Showing the last {settings.MAX_INTERNAL_TRANSACTION} records only", + ) + offset = (page - 1) * size + total_count = get_internal_transactions_count_by_address(session, address, use_address_index=True) + transactions = get_internal_transactions_by_address( + session, address, use_address_index=True, limit=size, offset=offset + ) + + enriched_transactions = enricher.enrich( + [transaction.dict() for transaction in transactions], + { + EnricherType.ADDRESS: {"to_address": "to_addr", "from_address": "from_addr"}, + EnricherType.COIN_VALUE: {"value": "display_value"}, + }, + session, + ) + data = [InternalTransactionItem(**tx) for tx in enriched_transactions] + + return InternalTransactionResponse( + data=data, + total=total_count, + max_display=min(total_count, settings.MAX_INTERNAL_TRANSACTION), + page=page, + size=size, + ) + + +@router.get("/v1/explorer/address/
/logs", response_model=LogResponse) +async def get_address_logs(session: ReadSessionDep, address: str = Depends(internal_api_validate_address)): + logs = get_logs_by_address(session, address) + return LogResponse(total=len(logs), data=logs) diff --git a/hemera/app/api/routes/explorer/base.py b/hemera/app/api/routes/explorer/base.py new file mode 100644 index 000000000..e8afa5c11 --- /dev/null +++ b/hemera/app/api/routes/explorer/base.py @@ -0,0 +1,229 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2024/12/19 18:31 +# @Author ideal93 +# @File base.py +# @Brief + +from datetime import datetime, time, timedelta +from operator import or_ +from typing import List + +from fastapi import APIRouter, HTTPException, Query +from sqlmodel import select + +from hemera.app.api.deps import ReadSessionDep +from hemera.app.api.routes.helper.block import _get_last_block +from hemera.app.api.routes.helper.format import format_dollar_value +from hemera.app.api.routes.helper.stats import get_daily_transactions_cnt +from hemera.app.api.routes.helper.token import get_token_price +from hemera.app.api.routes.helper.transaction import get_total_txn_count, get_tps_latest_10min +from hemera.app.core.config import settings +from hemera.app.core.db import Database +from hemera.app.models import ( + AddressSearchResult, + BlockSearchResult, + ExplorerStats, + HealthCheckResponse, + SearchResult, + TokenSearchResult, + TransactionsDayResponse, + TransactionSearchResult, +) +from hemera.app.utils.web3_utils import get_gas_price +from hemera.common.models.base.blocks import Blocks +from hemera.common.models.base.transactions import Transactions +from hemera.common.models.token.tokens import Tokens +from hemera.common.models.trace.contracts import Contracts +from hemera.common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes +from hemera.common.utils.web3_utils import is_eth_address, is_eth_transaction_hash + +router = APIRouter(tags=["BASE"]) + + +def get_engines_status(): + _to_status = lambda pool_status: { + "checked_in": pool_status.checked_in(), + "checked_out": pool_status.checked_out(), + "overflow": pool_status.overflow(), + "connections": pool_status.size(), + } + + return { + "engine_pool_status": Database.get_read_engine().pool.status(), + "read_pool_status": Database.get_read_engine().pool.status(), + "write_pool_status": Database.get_write_engine().pool.status(), + "common_pool_status": Database.get_common_engine().pool.status(), + } + + +@router.get("/v1/explorer/health", response_model=HealthCheckResponse) +async def health_check(session: ReadSessionDep): + block = _get_last_block(session=session, columns=["number", "timestamp"]) + if not block: + raise HTTPException(status_code=404, detail="No blocks found") + + return { + "latest_block_number": block.number, + "latest_block_timestamp": block.timestamp.isoformat(), + **get_engines_status(), + "status": "OK", + } + + +@router.get("/v1/explorer/stats", response_model=ExplorerStats) +async def get_stats(session: ReadSessionDep): + transaction_count = get_total_txn_count(session) + + latest_block = _get_last_block(session) + + if not latest_block: + raise HTTPException(status_code=404, detail="No blocks found") + + latest_block_number = latest_block.number + + earlier_block_number = max(latest_block_number - 5000, 1) + earlier_block = session.exec(select(Blocks).where(Blocks.number == earlier_block_number)).first() + + if not earlier_block: + earlier_block = latest_block + + avg_block_time = (latest_block.timestamp.timestamp() - earlier_block.timestamp.timestamp()) / ( + (latest_block_number - earlier_block_number) or 1 + ) + + transaction_tps = get_tps_latest_10min(session, latest_block.timestamp) + + latest_batch_number = 0 + + btc_price = get_token_price(session, "WBTC") + eth_price = get_token_price(session, "ETH") + eth_price_previous = get_token_price(session, "ETH", datetime.combine(datetime.now() - timedelta(days=1), time.min)) + + if settings.token_configuration.native_token == "ETH": + native_token_price = eth_price + native_token_price_previous = eth_price_previous + else: + native_token_price = get_token_price(session, settings.token_configuration.native_token) + native_token_price_previous = get_token_price( + session, + settings.token_configuration.native_token, + datetime.combine(datetime.now() - timedelta(days=1), time.min), + ) + + if settings.token_configuration.dashboard_token == settings.token_configuration.native_token: + dashboard_token_price = native_token_price + dashboard_token_price_previous = native_token_price_previous + else: + dashboard_token_price = get_token_price(session, settings.token_configuration.dashboard_token) + dashboard_token_price_previous = get_token_price( + session, + settings.token_configuration.dashboard_token, + datetime.combine(datetime.now() - timedelta(days=1), time.min), + ) + + return ExplorerStats( + total_transactions=transaction_count, + transaction_tps=round(transaction_tps, 2), + latest_batch=latest_batch_number, + latest_block=latest_block_number, + avg_block_time=avg_block_time, + eth_price=format_dollar_value(eth_price), + eth_price_btc=f"{eth_price / (btc_price or 1):.5f}", + eth_price_diff=f"{(eth_price - eth_price_previous) / (eth_price_previous or 1):.4f}", + native_token_price=format_dollar_value(native_token_price), + native_token_price_eth=f"{native_token_price / (eth_price or 1):.5f}", + native_token_price_diff=( + f"{(native_token_price - native_token_price_previous) / (native_token_price_previous or 1):.4f}" + if native_token_price_previous != 0 + else "0" + ), + dashboard_token_price_eth=f"{dashboard_token_price / (eth_price or 1):.5f}", + dashboard_token_price=format_dollar_value(dashboard_token_price), + dashboard_token_price_diff=( + f"{(dashboard_token_price - dashboard_token_price_previous) / (dashboard_token_price_previous or 1):.4f}" + if dashboard_token_price_previous != 0 + else "0" + ), + gas_fee=f"{get_gas_price() / 10 ** 9:.1f}".rstrip("0").rstrip(".") + " Gwei", + ) + + +@router.get("/v1/explorer/charts/transactions_per_day", response_model=TransactionsDayResponse) +async def get_transactions_per_day(session: ReadSessionDep): + results = get_daily_transactions_cnt(session, columns=[("block_date", "date"), "cnt"], limit=14) + + date_list = [{"value": item.date.isoformat(), "count": item.cnt} for item in results] + + return {"title": "Daily Transactions Chart", "data": date_list} + + +@router.get("/v1/explorer/search", response_model=List[SearchResult]) +async def explorer_search( + session: ReadSessionDep, q: str = Query(..., min_length=1, description="Search query") +) -> List[SearchResult]: + """Search for blocks, addresses, transactions and tokens.""" + query_string = q.lower() + search_result = [] + + # Block number search + if query_string.isdigit(): + block = session.exec(select(Blocks.hash, Blocks.number).where(Blocks.number == int(query_string))).first() + if block: + return [BlockSearchResult(block_hash=bytes_to_hex_str(block.hash), block_number=block.number)] + + # Wallet/contract address search + if is_eth_address(query_string): + # Check contract first + contract_address = session.exec( + select(Contracts.address).where(Contracts.address == hex_str_to_bytes(query_string)) + ).first() + if contract_address: + return [AddressSearchResult(wallet_address=bytes_to_hex_str(contract_address))] + + # Check from/to addresses + for query in [ + select(Transactions.from_address.label("address")).where(Transactions.from_address == query_string), + select(Transactions.to_address.label("address")).where(Transactions.to_address == query_string), + ]: + wallet = session.exec(query).first() + if wallet: + return [AddressSearchResult(wallet_address=bytes_to_hex_str(wallet.address))] + + # Transaction/block hash search + if is_eth_transaction_hash(query_string): + # Check transaction + transaction = session.exec( + select(Transactions.hash).where(Transactions.hash == hex_str_to_bytes(query_string)) + ).first() + if transaction: + return [TransactionSearchResult(transaction_hash=bytes_to_hex_str(transaction.hash))] + + # Check block + block = session.exec( + select(Blocks.hash, Blocks.number).where(Blocks.hash == hex_str_to_bytes(query_string)) + ).first() + if block: + return [BlockSearchResult(block_hash=bytes_to_hex_str(block.hash), block_number=block.number)] + + # Token search + if len(query_string) > 1: + tokens = session.exec( + select(Tokens.name, Tokens.symbol, Tokens.address, Tokens.icon_url) + .where(or_(Tokens.name.ilike(f"%{query_string}%"), Tokens.symbol.ilike(f"%{query_string}%"))) + .limit(5) + ).all() + + search_result.extend( + [ + TokenSearchResult( + token_name=token.name, + token_symbol=token.symbol, + token_address=bytes_to_hex_str(token.address), + token_logo_url=token.icon_url, + ) + for token in tokens + ] + ) + + return search_result diff --git a/hemera/app/api/routes/explorer/block.py b/hemera/app/api/routes/explorer/block.py new file mode 100644 index 000000000..a883d9911 --- /dev/null +++ b/hemera/app/api/routes/explorer/block.py @@ -0,0 +1,92 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2024/12/23 11:56 +# @Author ideal93 +# @File block.py.py +# @Brief + +from operator import and_ +from typing import List, Union + +from fastapi import APIRouter, Depends, HTTPException, Query +from pydantic import BaseModel +from sqlmodel import select + +from hemera.app.api.deps import ReadSessionDep +from hemera.app.api.routes.helper.block import ( + BlockAbbr, + BlockDetails, + _get_last_block, + get_block_by_number_or_hash, + get_blocks_by_range, +) +from hemera.app.api.routes.helper.token import get_token_price +from hemera.app.api.routes.parameters.validate_block import validate_block_identifier +from hemera.app.core.config import settings +from hemera.common.models.base.blocks import Blocks + +router = APIRouter(tags=["BLOCK"]) + + +class BlockListResponse(BaseModel): + data: List[BlockAbbr] + total: int + page: int + size: int + + +@router.get("/v1/explorer/blocks", response_model=BlockListResponse) +async def get_blocks(session: ReadSessionDep, page: int = Query(1, gt=0), size: int = Query(25, gt=0)): + """Get list of blocks with pagination. + + Args: + session: Database session + page: Page number + size: Page size + """ + latest_block_number = _get_last_block(session, columns="number") or 0 + end_block = latest_block_number - (page - 1) * size + start_block = end_block - size + 1 + blocks = get_blocks_by_range(session, max(0, start_block), end_block) + + return BlockListResponse(data=blocks, total=latest_block_number, page=page, size=size) + + +@router.get("/v1/explorer/block/{number_or_hash}", response_model=BlockDetails) +async def get_block_detail( + session: ReadSessionDep, number_or_hash: Union[str, int] = Depends(validate_block_identifier) +): + """Get detailed information about a specific block. + + Args: + session: Database session + number_or_hash: Block number or hash + """ + # Get block by number or hash + + block = get_block_by_number_or_hash(session, number_or_hash) + + if not block: + raise HTTPException(status_code=404, detail="Cannot find block with block number or block hash") + + # Get gas fee token price + block.gas_fee_token_price = "{0:.2f}".format( + get_token_price(session, settings.token_configuration.gas_fee_token, block.timestamp) + ) + + # Get previous block info + earlier_block = session.exec( + select(Blocks).where(and_(Blocks.number == max(block.number - 1, 1), Blocks.reorg == False)) + ).first() + + if earlier_block: + block.seconds_since_last_block = block.timestamp.timestamp() - earlier_block.timestamp.timestamp() + else: + block.seconds_since_last_block = None + + # Check if it's the latest block + latest_block = _get_last_block(session, columns="number") or 0 + + block.is_last_block = latest_block == block.number + + return block diff --git a/hemera/app/api/routes/explorer/contract.py b/hemera/app/api/routes/explorer/contract.py new file mode 100644 index 000000000..038ec802c --- /dev/null +++ b/hemera/app/api/routes/explorer/contract.py @@ -0,0 +1,302 @@ +from typing import List, Optional + +from fastapi import APIRouter, File, Form, HTTPException, UploadFile +from pydantic import BaseModel +from sqlmodel import select + +from hemera.app.api.deps import ReadSessionDep +from hemera.common.models.trace.contracts import Contracts +from hemera.common.utils.format_utils import as_dict, hex_str_to_bytes +from hemera.common.utils.web3_utils import ZERO_ADDRESS + +router = APIRouter(tags=["contracts"]) + + +# Models +class CompilerVersionResponse(BaseModel): + compiler_versions: List[str] + + +class VerifyContractRequest(BaseModel): + address: str + compiler_type: str + compiler_version: str + evm_version: Optional[str] = "default" + proxy: Optional[str] = None + implementation: Optional[str] = None + license_type: str = "None" + optimization: Optional[bool] = None + optimization_runs: Optional[int] = None + constructor_arguments: Optional[str] = None + input_str: Optional[str] = None + + +class VerifyResponse(BaseModel): + message: str + status: Optional[str] = None + result: Optional[str] = None + + +class CheckVerificationResponse(BaseModel): + message: str + already_verified: bool + + +class ProxyVerificationResponse(BaseModel): + implementation_contract_address: Optional[str] = None + implementation_address: Optional[str] = None + message: str + is_verified: Optional[bool] = None + + +# Contract Verification Endpoints +@router.post("/v1/explorer/verify_contract/verify", response_model=VerifyResponse) +async def verify_contract( + session: ReadSessionDep, + files: List[UploadFile] = File(None), + address: str = Form(...), + compiler_type: str = Form(...), + compiler_version: str = Form(...), + evm_version: Optional[str] = Form("default"), + proxy: Optional[str] = Form(None), + implementation: Optional[str] = Form(None), + license_type: str = Form("None"), + optimization: Optional[bool] = Form(None), + optimization_runs: Optional[int] = Form(None), + constructor_arguments: Optional[str] = Form(None), + input_str: Optional[str] = Form(None), +): + """Verify a smart contract with provided source code and compiler settings.""" + validate_input(address, compiler_type, compiler_version) + contracts = get_contract_by_address(address) + check_contract_verification_status(contracts) + + creation_code, deployed_code = get_creation_or_deployed_code(contracts) + + payload = { + "address": address, + "wallet_address": ZERO_ADDRESS, + "compiler_type": compiler_type, + "compiler_version": compiler_version, + "evm_version": evm_version, + "license_type": license_type, + "optimization": optimization, + "optimization_runs": optimization_runs, + "input_str": input_str, + "constructor_arguments": constructor_arguments, + "proxy": proxy, + "implementation": implementation, + "creation_code": creation_code, + "deployed_code": deployed_code, + } + + if compiler_type != "Solidity (Standard-Json-Input)": + libraries = Form(None) + if libraries: + payload["libraries_data"] = libraries + + response = await send_sync_verification_request(payload, files) + if response.status_code == 200: + contracts.is_verified = True + session.commit() + return VerifyResponse(message="Contract verified successfully") + + return VerifyResponse(message=f"Verified contract failed: {response.text}") + + +@router.get("/v1/explorer/verify_contract/solidity_versions", response_model=CompilerVersionResponse) +async def get_solidity_compiler_versions(): + """Get supported Solidity compiler versions.""" + response = await get_solidity_version() + if not response: + raise HTTPException(status_code=400, detail="Failed to retrieve compiler versions") + return CompilerVersionResponse(compiler_versions=response.get("compiler_versions")) + + +@router.get("/v1/explorer/verify_contract/compiler_types") +async def get_compiler_types(): + """Get available compiler types for smart contract verification.""" + return { + "compiler_types": [ + "Solidity (Single file)", + "Solidity (Multi-Part files)", + "Solidity (Standard-Json-Input)", + "Vyper (Experimental)", + ] + } + + +@router.get("/v1/explorer/verify_contract/evm_versions") +async def get_evm_version_list(): + """Get supported EVM versions for contract compilation.""" + evm_versions = await get_evm_versions() + if not evm_versions: + raise HTTPException(status_code=400, detail="Failed to retrieve evm versions") + return evm_versions + + +@router.get("/v1/explorer/verify_contract/license_types") +async def get_license_types(): + """Get available license types for smart contracts.""" + license_types = await get_explorer_license_type() + if not license_types: + raise HTTPException(status_code=400, detail="Failed to retrieve license types") + return license_types + + +@router.get("/v1/explorer/verify_contract/vyper_versions", response_model=CompilerVersionResponse) +async def get_vyper_compiler_versions(): + """Get supported Vyper compiler versions.""" + response = await get_vyper_version() + if not response: + raise HTTPException(status_code=400, detail="Failed to retrieve compiler versions") + return CompilerVersionResponse(compiler_versions=response.get("compiler_versions")) + + +@router.post("/v1/explorer/verify_contract/check", response_model=CheckVerificationResponse) +async def check_contract_verification(session: ReadSessionDep, address: str): + """Check if a contract is eligible for verification.""" + if not address: + raise HTTPException(status_code=400, detail="Missing required data") + + address = address.lower() + contract = session.exec(select(Contracts).where(Contracts.address == hex_str_to_bytes(address))).first() + + if not contract or not contract.transaction_hash: + raise HTTPException(status_code=400, detail="The address is not a contract") + + if contract.is_verified: + return CheckVerificationResponse(message="This contract already verified", already_verified=True) + + return CheckVerificationResponse(message="This contract can be verified", already_verified=False) + + +@router.post("/v1/explorer/verify_contract/verify_proxy", response_model=ProxyVerificationResponse) +async def verify_proxy_contract( + proxy_contract_address: str, +): + """Verify a proxy contract and get its implementation details.""" + if not proxy_contract_address: + raise HTTPException(status_code=400, detail="Please sent correct proxy contract address") + + implementation_address = await get_implementation_contract(proxy_contract_address) + if not implementation_address: + return ProxyVerificationResponse( + implementation_address=None, + message="This contract does not look like it contains any delegatecall opcode sequence.", + ) + + exists = await get_abi_by_chain_id_address(address=implementation_address) + return ProxyVerificationResponse( + implementation_contract_address=implementation_address, + message=f"The {'proxy' if exists else ''} implementation contract at {implementation_address} " + f"{'is' if exists else 'is not'} verified.", + is_verified=exists, + ) + + +@router.post("/v1/explorer/verify_contract/save_proxy") +async def save_proxy_mapping( + session: ReadSessionDep, + proxy_contract_address: str, + implementation_contract_address: str, +): + """Save the mapping between proxy and implementation contracts.""" + if not proxy_contract_address or not implementation_contract_address: + raise HTTPException(status_code=400, detail="Not such proxy contract address") + + contract = session.exec( + select(Contracts).where(Contracts.address == hex_str_to_bytes(proxy_contract_address.lower())) + ).first() + + if not contract: + raise HTTPException(status_code=404, detail="Contract not found") + + contract.verified_implementation_contract = implementation_contract_address.lower() + session.add(contract) + session.commit() + + return as_dict(contract) + + +@router.get("/v1/explorer/command_api/contract") +async def command_contract_api( + session: ReadSessionDep, module: str, action: str, guid: Optional[str] = None, address: Optional[str] = None +): + """Query contract command API status.""" + return await command_normal_contract_data(module, action, address, guid) + + +@router.post("/v1/explorer/command_api/contract") +async def command_contract_verify( + session: ReadSessionDep, + action: str = Form(...), + module: str = Form(...), + contractaddress: Optional[str] = Form(None), + codeformat: Optional[str] = Form(None), + compilerversion: Optional[str] = Form(None), + optimizationUsed: Optional[str] = Form(None), + runs: Optional[int] = Form(None), + sourceCode: Optional[str] = Form(None), + constructorArguments: Optional[str] = Form(None), + guid: Optional[str] = Form(None), + address: Optional[str] = Form(None), +): + """Handle contract verification through command API.""" + if module != "contract": + return {"message": "The module is error", "status": "0"} + + if action != "verifysourcecode": + return await command_normal_contract_data(module, action, address, guid) + + address = contractaddress.lower() + contracts = get_contract_by_address(address) + if contracts.is_verified: + return {"message": "This contract is verified", "status": "0"} + + creation_code, deployed_code = get_creation_or_deployed_code(contracts) + payload = { + "address": address, + "compiler_type": codeformat, + "compiler_version": compilerversion, + "evm_version": "default", + "license_type": "None", + "optimization": True if optimizationUsed == "1" else False, + "optimization_runs": runs or 0, + "input_str": sourceCode, + "constructor_arguments": constructorArguments, + "creation_code": creation_code, + "deployed_code": deployed_code, + } + + response = await send_async_verification_request(payload) + if response.status_code == 202: + contracts.is_verified = True + session.commit() + return {"message": "Contract successfully verified", "result": response.json()["guid"], "status": "1"} + return {"message": response.text, "status": "0"} + + +@router.get("/v1/explorer/contract/{contract_address}/code") +async def get_contract_code(session: ReadSessionDep, contract_address: str): + """Get verified contract source code and related files.""" + contract_address = contract_address.lower() + contract = session.get(Contracts, hex_str_to_bytes(contract_address)) + + if not contract or not contract.is_verified: + raise HTTPException(status_code=400, detail="Contract not exist or contract is not verified.") + + contracts_verification = await get_contract_code_by_address(address=contract_address) + if not contracts_verification: + raise HTTPException(status_code=400, detail="Contract code not found!") + + # Format file paths + files = [] + if "folder_path" in contracts_verification: + files = [ + {"name": file.split("/")[-1], "path": f"https://contract-verify-files.s3.amazonaws.com/{file}"} + for file in contracts_verification["folder_path"] + ] + contracts_verification["files"] = files + + return contracts_verification diff --git a/hemera/app/api/routes/explorer/export.py b/hemera/app/api/routes/explorer/export.py new file mode 100644 index 000000000..ce1bad4b2 --- /dev/null +++ b/hemera/app/api/routes/explorer/export.py @@ -0,0 +1,454 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2024/12/25 15:41 +# @Author ideal93 +# @File developer.py.py +# @Brief + +import csv +import io +from datetime import date, datetime, time +from typing import List, Optional, Tuple + +from fastapi import APIRouter, Depends, HTTPException, Path, Query, Response +from sqlmodel import Session, and_, func, or_, select + +from hemera.app.api.deps import ReadSessionDep +from hemera.app.api.routes.parameters.validate_address import external_api_validate_address +from hemera.common.models.base.blocks import Blocks +from hemera.common.models.base.transactions import Transactions +from hemera.common.models.token.token_balances import CurrentTokenBalances +from hemera.common.models.token.token_transfers import ERC20TokenTransfers, ERC721TokenTransfers, ERC1155TokenTransfers +from hemera.common.models.token.tokens import Tokens +from hemera.common.models.trace.traces import ContractInternalTransactions +from hemera.common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes + + +def response_csv(data: list[dict], filename: str, header: list[str]) -> Response: + si = io.StringIO() + cw = csv.DictWriter(si, fieldnames=header) + + if header: + cw.writeheader() + + cw.writerows(data) + csv_content = si.getvalue() + + response = Response(content=csv_content, media_type="text/csv") + response.headers["Content-Disposition"] = f"attachment; filename={filename}.csv" + response.headers["Content-Type"] = "text/csv; charset=utf-8" + return response + + +router = APIRouter(tags=["EXPORT"]) + + +def get_block_range( + session: ReadSessionDep, + filtertype: Optional[str] = Query( + None, + regex="^(date|block)$", + description="Query type: 'date' to filter by date range, 'block' to filter by block numbers", + ), + startblock: int = Query(0, ge=0, description="Start block number (used when filtertype is 'block')"), + endblock: int = Query(4999, ge=0, description="End block number (used when filtertype is 'block')"), + startdate: Optional[date] = Query( + None, description="Start date in YYYY-MM-DD format (used when filtertype is 'date')" + ), + enddate: Optional[date] = Query(None, description="End date in YYYY-MM-DD format (used when filtertype is 'date')"), +) -> Tuple[int, int]: + """ + Determine the block range. If filtertype=="date", convert the provided startdate and enddate + into a block range using the Blocks table; otherwise, use the provided startblock and endblock. + """ + if filtertype == "date": + if not startdate or not enddate: + raise HTTPException( + status_code=400, detail="Start date and end date must be provided when using date filter" + ) + start_timestamp = datetime.combine(startdate, time.min) + end_timestamp = datetime.combine(enddate, time.max) + start_block_obj = session.exec( + select(Blocks).where(Blocks.timestamp >= start_timestamp).order_by(Blocks.timestamp.asc()).limit(1) + ).first() + end_block_obj = session.exec( + select(Blocks).where(Blocks.timestamp <= end_timestamp).order_by(Blocks.timestamp.desc()).limit(1) + ).first() + start_block_number = start_block_obj.number if start_block_obj else 0 + end_block_number = end_block_obj.number if end_block_obj else 0 + return start_block_number, end_block_number + else: + return startblock, endblock + + +@router.get("/v1/explorer/export/transactions/{address}") +async def export_transactions( + session: ReadSessionDep, + address: str = Depends(external_api_validate_address), + block_range: Tuple[int, int] = Depends(get_block_range), +): + start_block_number, end_block_number = block_range + + stmt = ( + select(Transactions) + .where( + and_( + Transactions.block_number >= start_block_number, + Transactions.block_number <= end_block_number, + or_( + Transactions.from_address == hex_str_to_bytes(address), + Transactions.to_address == hex_str_to_bytes(address), + ), + ) + ) + .order_by(Transactions.block_number.asc(), Transactions.transaction_index.asc()) + .limit(5000) + ) + transactions: List[Transactions] = session.exec(stmt).all() + + header = [ + "blockNumber", + "timeStamp", + "hash", + "nonce", + "blockHash", + "transactionIndex", + "from", + "to", + "value", + "gas", + "gasPrice", + "isError", + "receiptStatus", + "contractAddress", + "cumulativeGasUsed", + "gasUsed", + "methodId", + ] + result = [ + { + "blockNumber": str(tx.block_number), + "timeStamp": tx.block_timestamp.strftime("%s"), + "hash": bytes_to_hex_str(tx.hash), + "nonce": str(tx.nonce), + "blockHash": bytes_to_hex_str(tx.block_hash), + "transactionIndex": str(tx.transaction_index), + "from": bytes_to_hex_str(tx.from_address), + "to": bytes_to_hex_str(tx.to_address), + "value": str(tx.value), + "gas": str(tx.gas), + "gasPrice": str(tx.gas_price), + "isError": "0" if tx.receipt_status == 1 else "1", + "receiptStatus": str(tx.receipt_status), + "contractAddress": bytes_to_hex_str(tx.receipt_contract_address) if tx.receipt_contract_address else "", + "cumulativeGasUsed": str(tx.receipt_cumulative_gas_used), + "gasUsed": str(tx.receipt_gas_used), + "methodId": "0x" + tx.method_id if tx.method_id else "", + } + for tx in transactions + ] + filename = f"transactions-{address}-{datetime.now().strftime('%Y%m%d%H%M%S')}" + return response_csv(result, filename, header) + + +@router.get("/v1/explorer/export/internal_transactions/{address}") +async def export_internal_transactions( + session: ReadSessionDep, + address: str = Depends(external_api_validate_address), + block_range: Tuple[int, int] = Depends(get_block_range), +): + if not address: + raise HTTPException(status_code=400, detail="Invalid wallet address") + start_block_number, end_block_number = block_range + + stmt = ( + select(ContractInternalTransactions) + .where( + and_( + ContractInternalTransactions.block_number >= start_block_number, + ContractInternalTransactions.block_number <= end_block_number, + or_( + ContractInternalTransactions.from_address == hex_str_to_bytes(address), + ContractInternalTransactions.to_address == hex_str_to_bytes(address), + ), + ) + ) + .order_by(ContractInternalTransactions.block_number.asc(), ContractInternalTransactions.transaction_index.asc()) + .limit(5000) + ) + internal_transactions: List[ContractInternalTransactions] = session.exec(stmt).all() + + header = [ + "blockNumber", + "timeStamp", + "hash", + "from", + "to", + "value", + "contractAddress", + "type", + "gas", + "traceId", + "isError", + "errCode", + ] + result = [ + { + "blockNumber": str(tx.block_number), + "timeStamp": tx.block_timestamp.strftime("%s"), + "hash": tx.transaction_hash, + "from": tx.from_address, + "to": tx.to_address, + "value": str(tx.value), + "contractAddress": tx.to_address if tx.trace_type in ["create", "create2"] else "", + "type": tx.trace_type, + "gas": str(tx.gas), + "traceId": tx.trace_id, + "isError": "1" if tx.error == 0 else "0", + "errCode": str(tx.error), + } + for tx in internal_transactions + ] + filename = f"internal_transactions-{address}-{datetime.now().strftime('%Y%m%d%H%M%S')}" + return response_csv(result, filename, header) + + +def token_transfers( + session: Session, + contract_address: Optional[str], + address: Optional[str], + start_block_number: int, + end_block_number: int, + token_type: str, +) -> List[dict]: + TokenTable = Tokens + TokenTransferTable = { + "ERC20": ERC20TokenTransfers, + "ERC721": ERC721TokenTransfers, + "ERC1155": ERC1155TokenTransfers, + }[token_type] + + # Build filtering condition. + condition = True + if contract_address: + condition = and_(condition, TokenTransferTable.token_address == hex_str_to_bytes(contract_address)) + if address: + condition = and_( + condition, + or_( + TokenTransferTable.from_address == hex_str_to_bytes(address), + TokenTransferTable.to_address == hex_str_to_bytes(address), + ), + ) + if not address and not contract_address: + raise HTTPException(status_code=400, detail="Error address") + + stmt = ( + select( + TokenTransferTable, + Transactions.nonce, + Transactions.gas, + Transactions.gas_price, + Transactions.receipt_gas_used, + Transactions.receipt_cumulative_gas_used, + Transactions.transaction_index, + Transactions.input, + ) + .join(Transactions, TokenTransferTable.transaction_hash == Transactions.hash) + .where( + and_( + condition, + TokenTransferTable.block_number >= start_block_number, + TokenTransferTable.block_number <= end_block_number, + ) + ) + .order_by(TokenTransferTable.block_number.asc()) + .limit(5000) + ) + transfers = session.exec(stmt).all() + + # Get token details for each unique token address. + token_addresses = {transfer.token_address for transfer, *_ in transfers} + tokens = session.exec(select(TokenTable).where(TokenTable.address.in_(token_addresses))).all() + token_dict = {token.address: token for token in tokens} + + result = [] + for row in transfers: + ( + transfer, + nonce, + gas, + gas_price, + receipt_gas_used, + receipt_cumulative_gas_used, + transaction_index, + input_val, + ) = row + token_info = token_dict.get(transfer.token_address) + transfer_data = { + "blockNumber": str(transfer.block_number), + "timeStamp": transfer.block_timestamp.strftime("%s"), + "hash": bytes_to_hex_str(transfer.transaction_hash), + "nonce": str(nonce), + "blockHash": bytes_to_hex_str(transfer.block_hash), + "contractAddress": transfer.token_address, + "from": bytes_to_hex_str(transfer.from_address), + "to": bytes_to_hex_str(transfer.to_address), + "tokenName": token_info.name if token_info else "", + "tokenSymbol": token_info.symbol if token_info else "", + "transactionIndex": str(transaction_index), + "gas": str(gas), + "gasPrice": str(gas_price), + "gasUsed": str(receipt_gas_used), + "cumulativeGasUsed": str(receipt_cumulative_gas_used), + } + if token_type == "ERC20": + transfer_data["value"] = str(transfer.value) + transfer_data["tokenDecimal"] = str(token_info.decimals) if token_info else "" + elif token_type == "ERC721": + transfer_data["tokenID"] = str(transfer.token_id) + elif token_type == "ERC1155": + transfer_data["tokenValue"] = str(transfer.value) + transfer_data["tokenID"] = str(transfer.token_id) + result.append(transfer_data) + return result + + +def token_holder_list(session: Session, contract_address: str, token_type: str) -> List[dict]: + token = session.exec(select(Tokens).where(Tokens.address == contract_address)).first() + if token is None: + return [] + stmt = ( + select(CurrentTokenBalances.address, func.sum(CurrentTokenBalances.balance).label("balance")) + .where(and_(CurrentTokenBalances.address == contract_address, CurrentTokenBalances.balance > 0)) + .group_by(CurrentTokenBalances.address) + .order_by(func.sum(CurrentTokenBalances.balance).desc()) + .limit(10000) + ) + holders = session.exec(stmt).all() + return [{"TokenHolderAddress": holder[0], "TokenHolderQuantity": str(holder[1])} for holder in holders] + + +@router.get("/v1/explorer/export/token_transfers") +async def export_erc20_token_transfers( + session: ReadSessionDep, + block_range: Tuple[int, int] = Depends(get_block_range), + contractaddress: Optional[str] = Query(None, description="Contract address"), + address: Optional[str] = Query(None, description="Wallet address"), +): + start_block_number, end_block_number = block_range + header = [ + "blockNumber", + "timeStamp", + "hash", + "nonce", + "blockHash", + "contractAddress", + "from", + "to", + "tokenName", + "tokenSymbol", + "transactionIndex", + "gas", + "gasPrice", + "gasUsed", + "cumulativeGasUsed", + "value", + "tokenDecimal", + ] + result = token_transfers(session, contractaddress, address, start_block_number, end_block_number, "ERC20") + filename = f"erc20_token_transfers-{datetime.now().strftime('%Y%m%d%H%M%S')}" + return response_csv(result, filename, header) + + +@router.get("/v1/explorer/export/nft_token_transfers") +async def export_erc721_token_transfers( + session: ReadSessionDep, + block_range: Tuple[int, int] = Depends(get_block_range), + contractaddress: Optional[str] = Query(None, description="Contract address"), + address: Optional[str] = Query(None, description="Wallet address"), +): + start_block_number, end_block_number = block_range + header = [ + "blockNumber", + "timeStamp", + "hash", + "nonce", + "blockHash", + "contractAddress", + "from", + "to", + "tokenName", + "tokenSymbol", + "transactionIndex", + "gas", + "gasPrice", + "gasUsed", + "cumulativeGasUsed", + "tokenID", + ] + result = token_transfers(session, contractaddress, address, start_block_number, end_block_number, "ERC721") + filename = f"erc721_token_transfers-{datetime.now().strftime('%Y%m%d%H%M%S')}" + return response_csv(result, filename, header) + + +@router.get("/v1/explorer/export/nft1155_token_transfers") +async def export_erc1155_token_transfers( + session: ReadSessionDep, + block_range: Tuple[int, int] = Depends(get_block_range), + contractaddress: Optional[str] = Query(None, description="Contract address"), + address: Optional[str] = Query(None, description="Wallet address"), +): + start_block_number, end_block_number = block_range + header = [ + "blockNumber", + "timeStamp", + "hash", + "nonce", + "blockHash", + "contractAddress", + "from", + "to", + "tokenName", + "tokenSymbol", + "transactionIndex", + "gas", + "gasPrice", + "gasUsed", + "cumulativeGasUsed", + "tokenValue", + "tokenID", + ] + result = token_transfers(session, contractaddress, address, start_block_number, end_block_number, "ERC1155") + filename = f"erc1155_token_transfers-{datetime.now().strftime('%Y%m%d%H%M%S')}" + return response_csv(result, filename, header) + + +@router.get("/v1/explorer/export/token_holders/{contract_address}") +async def export_erc20_token_holders( + session: ReadSessionDep, contract_address: str = Path(..., description="Contract address") +): + header = ["TokenHolderAddress", "TokenHolderQuantity"] + result = token_holder_list(session, contract_address, "ERC20") + filename = f"erc20_token_holders-{datetime.now().strftime('%Y%m%d%H%M%S')}" + return response_csv(result, filename, header) + + +@router.get("/v1/explorer/export/nft_token_holders/{contract_address}") +async def export_erc721_token_holders( + session: ReadSessionDep, contract_address: str = Path(..., description="Contract address") +): + header = ["TokenHolderAddress", "TokenHolderQuantity"] + result = token_holder_list(session, contract_address, "ERC721") + filename = f"erc721_token_holders-{datetime.now().strftime('%Y%m%d%H%M%S')}" + return response_csv(result, filename, header) + + +@router.get("/v1/explorer/export/nft1155_token_holders/{contract_address}") +async def export_erc1155_token_holders( + session: ReadSessionDep, contract_address: str = Path(..., description="Contract address") +): + header = ["TokenHolderAddress", "TokenHolderQuantity"] + result = token_holder_list(session, contract_address, "ERC1155") + filename = f"erc1155_token_holders-{datetime.now().strftime('%Y%m%d%H%M%S')}" + return response_csv(result, filename, header) diff --git a/hemera/app/api/routes/explorer/statistics.py b/hemera/app/api/routes/explorer/statistics.py new file mode 100644 index 000000000..dbfa3b889 --- /dev/null +++ b/hemera/app/api/routes/explorer/statistics.py @@ -0,0 +1,138 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2024/12/23 11:59 +# @Author ideal93 +# @File statistics.py +# @Brief + +## /v1/explorer/statistics/contract/ranks +## /v1/explorer/statistics/address/ranks +## /v1/explorer/chart-data/daily + +from datetime import datetime, timedelta +from enum import Enum +from typing import List, Optional + +from fastapi import APIRouter, Query +from pydantic import BaseModel +from sqlmodel import func, select + +from hemera.app.api.deps import ReadSessionDep +from hemera.common.models.address.stats.address_index_stats import AddressIndexStats +from hemera.common.models.base.transactions import Transactions +from hemera.common.models.trace.contracts import Contracts +from hemera.common.utils.format_utils import bytes_to_hex_str + +router = APIRouter(tags=["statistics"]) + + +class ContractStatisticsType(str, Enum): + TRANSACTIONS_RECEIVED = "transactions_received" + + +class AddressStatisticsType(str, Enum): + GAS_USED = "gas_used" + TRANSACTIONS_SENT = "transactions_sent" + + +class RankResponse(BaseModel): + address: str + transaction_count: Optional[int] + gas_used: Optional[float] + tag: Optional[str] + ens_name: Optional[str] + + +class RankListResponse(BaseModel): + data: List[RankResponse] + + +@router.get("/v1/explorer/statistics/contract/ranks", response_model=RankListResponse) +async def get_contract_ranks( + session: ReadSessionDep, statistics: ContractStatisticsType, limit: int = Query(10, le=100) +): + """Get contract statistics rankings. + + Args: + session: Database session + statistics: Type of statistics to fetch + limit: Number of results to return (max 100) + """ + # Get contract addresses + contract_addresses = select(Contracts.address) + one_day_ago = datetime.now() - timedelta(days=1) + + if statistics == ContractStatisticsType.TRANSACTIONS_RECEIVED: + query = ( + select( + Transactions.to_address.label("address"), func.count().label("transaction_count"), AddressIndexStats.tag + ) + .join(AddressIndexStats, Transactions.to_address == AddressIndexStats.address, isouter=True) + .where(Transactions.block_timestamp > one_day_ago, Transactions.to_address.in_(contract_addresses)) + .group_by(Transactions.to_address, AddressIndexStats.tag) + .order_by(func.count().desc()) + .limit(limit) + ) + + results = session.exec(query).all() + + return { + "data": [ + RankResponse( + address=bytes_to_hex_str(result.address), transaction_count=result.transaction_count, tag=result.tag + ) + for result in results + ] + } + + +@router.get("/v1/explorer/statistics/address/ranks", response_model=RankListResponse) +async def get_address_ranks(session: ReadSessionDep, statistics: AddressStatisticsType, limit: int = Query(10, le=100)): + """Get address statistics rankings.""" + one_day_ago = datetime.now() - timedelta(days=1) + + if statistics == AddressStatisticsType.GAS_USED: + query = ( + select( + Transactions.from_address.label("address"), + func.sum(Transactions.receipt_gas_used).label("gas_used"), + AddressIndexStats.tag, + ) + .join(AddressIndexStats, Transactions.from_address == AddressIndexStats.address, isouter=True) + .where(Transactions.block_timestamp > one_day_ago) + .group_by(Transactions.from_address, AddressIndexStats.tag) + .order_by(func.sum(Transactions.receipt_gas_used).desc()) + .limit(limit) + ) + else: # TRANSACTIONS_SENT + query = ( + select( + Transactions.from_address.label("address"), + func.count().label("transaction_count"), + AddressIndexStats.tag, + ) + .join(AddressIndexStats, Transactions.from_address == AddressIndexStats.address, isouter=True) + .where(Transactions.block_timestamp > one_day_ago) + .group_by(Transactions.from_address, AddressIndexStats.tag) + .order_by(func.count().desc()) + .limit(limit) + ) + + results = session.exec(query).all() + + # Get ENS names for addresses + addresses = [bytes_to_hex_str(r.address) for r in results] + ens_mapping = get_ens_mapping(addresses) + + return { + "data": [ + RankResponse( + address=bytes_to_hex_str(result.address), + transaction_count=getattr(result, "transaction_count", None), + gas_used=getattr(result, "gas_used", None), + tag=result.tag, + ens_name=ens_mapping.get(bytes_to_hex_str(result.address)), + ) + for result in results + ] + } diff --git a/hemera/app/api/routes/explorer/token.py b/hemera/app/api/routes/explorer/token.py new file mode 100644 index 000000000..8c3c8f2e3 --- /dev/null +++ b/hemera/app/api/routes/explorer/token.py @@ -0,0 +1,323 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2024/12/20 15:02 +# @Author ideal93 +# @File token.py +# @ +import datetime +from enum import Enum +from typing import List, Optional + +from fastapi import APIRouter, Depends, HTTPException, Path, Query +from pydantic import BaseModel +from sqlmodel import Session, and_, desc, func, nullslast, select + +from hemera.app.api.deps import ReadSessionDep +from hemera.app.api.routes.enricher import BlockchainEnricherDep +from hemera.app.api.routes.enricher.address_enricher import Address, EnricherType +from hemera.app.api.routes.helper.token import TokenInfo +from hemera.app.api.routes.helper.token_transfers import ( + TokenTransferAbbr, + get_token_transfers, + get_token_transfers_by_token_address, +) +from hemera.app.api.routes.parameters.validate_address import is_eth_address +from hemera.app.core.config import settings +from hemera.app.models import SortOrder +from hemera.common.enumeration.token_type import TokenType +from hemera.common.models.token.token_balances import CurrentTokenBalances +from hemera.common.models.token.tokens import Tokens +from hemera.common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes +from hemera.common.utils.web3_utils import to_checksum_address + +router = APIRouter(tags=["tokens"]) + + +class TokenHolderInfo(BaseModel): + token_address: str + wallet_address: str + balance: str + + +class TokenHoldersResponse(BaseModel): + data: List[TokenHolderInfo] + total: int + + +class TokenProfileResponse(BaseModel): + token_name: str + token_checksum_address: str + token_address: str + token_symbol: str + token_logo_url: Optional[str] + token_urls: Optional[dict] + social_medias: Optional[dict] + token_description: Optional[str] + total_supply: str + total_holders: Optional[int] + total_transfers: Optional[int] + token_type: str + type: str + # ERC20 specific fields + token_price: Optional[float] + token_previous_price: Optional[float] + decimals: Optional[float] + token_market_cap: Optional[float] + token_on_chain_market_cap: Optional[float] + previous_price: Optional[float] + gecko_url: Optional[str] + cmc_url: Optional[str] + + +class TokenSortField(str, Enum): + MARKET_CAP = "market_cap" + VOLUME_24H = "volume_24h" + HOLDER_COUNT = "holder_count" + PRICE = "price" + ON_CHAIN_MARKET_CAP = "on_chain_market_cap" + TRANSFER_COUNT = "transfer_count" + + +class TokenResponse(BaseModel): + address: str + name: str + symbol: str + logo: Optional[str] = None + description: Optional[str] = None + total_supply: Optional[float] = None + volume_24h: Optional[float] = None + market_cap: Optional[float] = None + on_chain_market_cap: Optional[float] = None + holder_count: Optional[int] = None + transfer_count: Optional[int] = None + price: Optional[float] = None + + +class TokenListResponse(BaseModel): + page: int + size: int + total: int + data: List[TokenResponse] + + +class TokenTransferItem(TokenTransferAbbr): + token_info: Optional[TokenInfo] = None + display_value: Optional[str] + from_addr: Optional[Address] + to_addr: Optional[Address] + + +class TokenTransferResponse(BaseModel): + total: int + data: List[TokenTransferItem] + + +# API Endpoints +@router.get("/v1/explorer/tokens", response_model=TokenListResponse) +async def api_get_tokens( + session: ReadSessionDep, + page: int = Query(1, gt=0), + size: int = Query(25, gt=0), + type: TokenType = Query(...), + sort: Optional[TokenSortField] = None, + order: SortOrder = Query(SortOrder.DESC), + is_verified: bool = Query(False), +): + """Get list of tokens with pagination and sorting.""" + # Set default sort field + if not sort: + sort = TokenSortField.MARKET_CAP if type == TokenType.ERC20 else TokenSortField.HOLDER_COUNT + + # Validate sort field based on token type + valid_sorts = ( + [ + TokenSortField.MARKET_CAP, + TokenSortField.VOLUME_24H, + TokenSortField.HOLDER_COUNT, + TokenSortField.PRICE, + TokenSortField.ON_CHAIN_MARKET_CAP, + ] + if type == TokenType.ERC20 + else [TokenSortField.HOLDER_COUNT, TokenSortField.TRANSFER_COUNT] + ) + + if sort not in valid_sorts: + raise HTTPException(status_code=400, detail="Invalid sort field for token type") + + # Build query with filters + query = select(Tokens) + filter_conditions = [Tokens.token_type == type.value.upper()] + if is_verified: + filter_conditions.append(Tokens.is_verified == True) + query = query.where(and_(*filter_conditions)) + + # Apply sorting and pagination + sort_column = getattr(Tokens, sort.value) + query = query.order_by(nullslast(sort_column.desc() if order == SortOrder.DESC else sort_column.asc())) + total = session.exec(select([func.count()]).select_from(query.subquery())).one() + query = query.offset((page - 1) * size).limit(size) + + tokens = session.exec(query).all() + + # Format response based on token type + if type == TokenType.ERC20: + token_list = [ + TokenResponse( + address=bytes_to_hex_str(token.address), + name=token.name, + symbol=token.symbol, + logo=token.icon_url, + description=token.description, + total_supply=float(token.total_supply) * 10 ** (-int(token.decimals)) if token.total_supply else None, + volume_24h=round(token.volume_24h, 2) if token.volume_24h else None, + market_cap=round(token.market_cap, 2) if token.market_cap else None, + on_chain_market_cap=round(token.on_chain_market_cap, 2) if token.on_chain_market_cap else None, + holder_count=token.holder_count, + price=round(token.price, 4) if token.price else None, + ) + for token in tokens + ] + else: + token_list = [ + TokenResponse( + address=bytes_to_hex_str(token.address), + name=token.name, + symbol=token.symbol, + total_supply=int(token.total_supply) if token.total_supply else None, + holder_count=token.holder_count, + transfer_count=token.transfer_count, + ) + for token in tokens + ] + + return {"page": page, "size": size, "total": total, "data": token_list} + + +@router.get("/v1/explorer/token/{address}/profile", response_model=TokenProfileResponse) +async def api_get_token_profile(session: ReadSessionDep, address: str): + """Get detailed profile information for a token.""" + token = session.exec(select(Tokens).where(Tokens.address == hex_str_to_bytes(address.lower()))).first() + if not token: + raise HTTPException(status_code=404, detail="Token not found") + + # Base token info + profile = { + "token_name": token.name, + "token_checksum_address": to_checksum_address(token.address), + "token_address": bytes_to_hex_str(token.address), + "token_symbol": token.symbol, + "token_logo_url": token.icon_url, + "token_urls": token.urls, + "social_medias": token.urls, + "token_description": token.description, + "total_supply": f"{float(token.total_supply or 0):.0f}", + "total_holders": token.holder_count, + "total_transfers": get_token_transfer_count(token.token_type, address), + "token_type": token.token_type, + "type": token_type_convert(token.token_type), + } + + # Add ERC20-specific info + if token.token_type == "ERC20": + profile.update( + { + "token_price": token.price, + "token_previous_price": token.previous_price, + "decimals": float(token.decimals), + "total_supply": format_token_supply(token.total_supply, token.decimals), + "token_market_cap": token.market_cap, + "token_on_chain_market_cap": token.on_chain_market_cap, + "previous_price": token.previous_price, + "gecko_url": f"https://www.coingecko.com/en/coins/{token.gecko_id}" if token.gecko_id else None, + "cmc_url": f"https://coinmarketcap.com/currencies/{token.cmc_slug}/" if token.cmc_slug else None, + } + ) + + return profile + + +@router.get("/v1/explorer/token/{token_address}/top_holders", response_model=TokenHoldersResponse) +async def api_get_token_top_holders( + session: ReadSessionDep, token_address: str, page: int = Query(1, gt=0), size: int = Query(settings.PAGE_SIZE, gt=0) +): + """Get top holders for a specific token.""" + if page <= 0 or size <= 0: + raise HTTPException(status_code=400, detail="Invalid page or size") + + token = session.exec(select(Tokens).where(Tokens.address == hex_str_to_bytes(token_address.lower()))).first() + if not token: + raise HTTPException(status_code=404, detail="Token not found") + + # Get holders with pagination + token_address_bytes = hex_str_to_bytes(token_address.lower()) + query = ( + select(CurrentTokenBalances) + .where(CurrentTokenBalances.token_address == token_address_bytes) + .order_by(desc(CurrentTokenBalances.balance)) + .offset((page - 1) * size) + .limit(size) + ) + + holders = session.exec(query).all() + holder_list = [ + { + "token_address": token_address.lower(), + "wallet_address": bytes_to_hex_str(holder.address), + "balance": format_token_balance(holder.balance, token.decimals if token.token_type == "ERC20" else 0), + } + for holder in holders + ] + + total_count = session.exec( + select([func.count()]).where(CurrentTokenBalances.token_address == token_address_bytes) + ).one() + + return {"data": holder_list, "total": total_count} + + +class TokenTransfersFilterParams: + def __init__( + self, + token_address: Optional[str] = Query( + None, description="Token contract address for filtering transfers by a specific token" + ), + type: TokenType = Query(TokenType.ERC20, description="Token type, e.g., ERC20, ERC721, or ERC1155"), + ): + self.token_address = token_address + self.type = type + self._validate_filters() + + def _validate_filters(self): + if self.token_address and not is_eth_address(self.token_address): + raise HTTPException(status_code=400, detail="Invalid token address") + + +@router.get("/v1/explorer/token_transfers", response_model=TokenTransferResponse) +async def api_get_token_transfers( + session: ReadSessionDep, + enricher: BlockchainEnricherDep, + page: int = Query(1, gt=0), + size: int = Query(settings.PAGE_SIZE, gt=0), + filters: TokenTransfersFilterParams = Depends(), +): + if filters.token_address: + token_transfers = get_token_transfers_by_token_address( + session, filters.token_address, token_type=filters.type.value, limit=size, offset=(page - 1) * size + ) + else: + token_transfers = get_token_transfers( + session, None, token_type=filters.type.value, limit=size, offset=(page - 1) * size + ) + + enriched_token_transfers = enricher.enrich( + [token_transfer.dict() for token_transfer in token_transfers], + { + EnricherType.ADDRESS: {"to_address": "to_addr", "from_address": "from_addr"}, + EnricherType.TOKEN_INFO: {"token_address": "token_info"}, + }, + session, + ) + + return TokenTransferResponse( + total=len(enriched_token_transfers), data=[TokenTransferItem(**item) for item in enriched_token_transfers] + ) diff --git a/hemera/app/api/routes/explorer/transaction.py b/hemera/app/api/routes/explorer/transaction.py new file mode 100644 index 000000000..4f6741e6c --- /dev/null +++ b/hemera/app/api/routes/explorer/transaction.py @@ -0,0 +1,521 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2024/12/20 15:01 +# @Author ideal93 +# @File transaction.py +# @Brief +from datetime import datetime, timedelta +from typing import Annotated, Any, List, Optional, Tuple + +from fastapi import APIRouter, Depends, HTTPException, Path, Query +from pydantic import BaseModel +from sqlmodel import and_, desc, func, select + +from hemera.app.api.deps import ReadSessionDep +from hemera.app.api.routes.enricher import BlockchainEnricherDep +from hemera.app.api.routes.enricher.address_enricher import Address, EnricherType +from hemera.app.api.routes.explorer.token import TokenTransferItem, TokenTransferResponse +from hemera.app.api.routes.helper.internal_transaction import ( + InternalTransactionAbbr, + get_internal_transactions, + get_internal_transactions_by_address, + get_internal_transactions_by_block_number, + get_internal_transactions_by_hash, + get_internal_transactions_count, + get_internal_transactions_count_by_address, + get_internal_transactions_count_by_block_number, +) +from hemera.app.api.routes.helper.log import LogItem, get_logs_by_hash +from hemera.app.api.routes.helper.token_transfers import get_token_transfers_by_hash +from hemera.app.api.routes.helper.transaction import ( + TransactionAbbr, + TransactionDetail, + get_transactions_and_total_count_by_condition, + get_transactions_by_address, + get_transactions_count_by_address, +) +from hemera.app.core.config import settings +from hemera.common.models.base.blocks import Blocks +from hemera.common.models.base.transactions import Transactions +from hemera.common.utils.format_utils import hex_str_to_bytes +from hemera.common.utils.web3_utils import valid_hash + +router = APIRouter(tags=["TRANSACTION"]) + + +class InternalTransactionItem(InternalTransactionAbbr): + from_addr: Address + to_addr: Address + display_value: str + + +class InternalTransactionResponse(BaseModel): + data: List[InternalTransactionItem] + total: int + max_display: Optional[int] = None + page: Optional[int] = None + size: Optional[int] = None + + +class TransactionItem(TransactionAbbr): + from_addr: Address + to_addr: Address + display_value: str + value_usd: Optional[str] + transaction_fee_usd: Optional[str] + + +class TransactionResponse(BaseModel): + data: List[TransactionItem] + total: int + max_display: int + page: int + size: int + + +class LogResponse(BaseModel): + total: int + data: List[LogItem] + + +class TraceItem(BaseModel): + from_address: str + to_address: str + value: Optional[str] + input: Optional[str] + output: Optional[str] + trace_type: str + call_type: Optional[str] + gas: Optional[int] + gas_used: Optional[int] + error: Optional[str] + status: Optional[int] + function_name: Optional[str] + function_input: Optional[List[dict]] + function_output: Optional[List[dict]] + calls: Optional[List["TraceItem"]] + + from_addr: Address + to_addr: Address + + +class TransactionTracesResponse(BaseModel): + data: TraceItem + + +class TransactionTraceJsonResponse(BaseModel): + data: dict[str, Any] + + +@router.get("/v1/explorer/internal_transactions", response_model=InternalTransactionResponse) +async def api_get_internal_transactions( + session: ReadSessionDep, + enricher: BlockchainEnricherDep, + page: int = Query(1, gt=0), + size: int = Query(25, gt=0), + address: Optional[str] = None, + block: Optional[int] = None, +): + if page * size > settings.MAX_INTERNAL_TRANSACTION: + raise HTTPException( + status_code=400, + detail=f"Showing the last {settings.MAX_INTERNAL_TRANSACTION} records only", + ) + + offset = (page - 1) * size + + if address: + total_count = get_internal_transactions_count_by_address(session, address, block) + transactions = get_internal_transactions_by_address(session, address, block, limit=size, offset=offset) + elif block: + total_count = get_internal_transactions_count_by_block_number(session, block) + transactions = get_internal_transactions_by_block_number(session, block, limit=size, offset=offset) + else: + total_count = get_internal_transactions_count(session) + transactions = get_internal_transactions(session, limit=size, offset=offset) + + enriched_transactions = enricher.enrich( + [transaction.dict() for transaction in transactions], + { + EnricherType.ADDRESS: {"to_address": "to_addr", "from_address": "from_addr"}, + EnricherType.COIN_VALUE: {"value": "display_value"}, + }, + session, + ) + data = [InternalTransactionItem(**tx) for tx in enriched_transactions] + + return InternalTransactionResponse( + data=data, + total=total_count, + max_display=min(total_count, settings.MAX_INTERNAL_TRANSACTION), + page=page, + size=size, + ) + + +class TransactionFilterParams: + def __init__( + self, + block: Optional[str] = Query(None, description="Block number or hash"), + address: Optional[str] = Query(None, description="Filter by address"), + date: Optional[str] = Query(None, description="Filter by date (format: YYYYMMDD)"), + ): + self.block = block + self.address = address + self.date = date + self._validate_filters() + + def _validate_filters(self): + filter_count = sum(x is not None for x in [self.block, self.address, self.date]) + + if filter_count > 1: + raise HTTPException(status_code=400, detail="Only one filter can be applied: block, address, or date") + + if self.date: + try: + datetime.strptime(self.date, "%Y%m%d") + except ValueError: + raise HTTPException(status_code=400, detail="Invalid date format") + + if self.block: + if not self.block.isnumeric() and not valid_hash(self.block): + raise HTTPException(status_code=400, detail="Invalid block identifier") + + +@router.get("/v1/explorer/transactions", response_model=TransactionResponse) +async def get_transactions( + session: ReadSessionDep, + enricher: BlockchainEnricherDep, + page: int = Query(1, gt=0), + size: int = Query(25, gt=0), + filters: TransactionFilterParams = Depends(), +): + """Get transactions list with various filters and pagination. + + Args: + session: Database session + page: Page number, starting from 1 + size: Items per page + filters: Transaction filters (block, address, or date) + + Returns: + TransactionResponse: Paginated transaction list with metadata + + Raises: + HTTPException: If page*size exceeds limits or invalid parameters + """ + # Check pagination limits + max_limit = ( + settings.MAX_TRANSACTION_WITH_CONDITION + if any([filters.block, filters.address, filters.date]) + else settings.MAX_TRANSACTION + ) + if page * size > max_limit: + raise HTTPException(status_code=400, detail=f"Showing the last {max_limit} records only") + + if filters.block: + total_records, transactions = _get_transactions_by_block(session, filters.block, page, size) + elif filters.address: + total_records, transactions = _get_transactions_by_address(session, filters.address, page, size) + elif filters.date: + total_records, transactions = _get_transactions_by_date(session, filters.date, page, size) + else: + # Get all transactions with pagination + transaction_list = session.exec( + select(Transactions) + .order_by(desc(Transactions.block_number), desc(Transactions.transaction_index)) + .offset((page - 1) * size) + .limit(size) + ).all() + + total_records = _calculate_total_records(session, transaction_list, page, size) + transactions = [TransactionAbbr.from_db_model(tx) for tx in transaction_list] + + enriched_transactions = enricher.enrich( + [transaction.dict() for transaction in transactions], + { + EnricherType.ADDRESS: {"to_address": "to_addr", "from_address": "from_addr"}, + EnricherType.COIN_VALUE: {"value": "display_value"}, + EnricherType.COIN_PRICE: {"transaction_fee": "transaction_fee_usd", "display_value": "value_usd"}, + }, + session, + ) + + return TransactionResponse( + data=[TransactionItem(**tx) for tx in enriched_transactions], + total=total_records, + max_display=min(max_limit, total_records), + page=page, + size=size, + ) + + +def _get_transactions_by_block( + session: ReadSessionDep, block: str, page: int, size: int +) -> Tuple[int, List[TransactionAbbr]]: + """Get transactions by block number or hash + + Args: + session: Database session + block: Block number or hash + page: Page number + size: Items per page + + Returns: + Tuple[int, List[TransactionAbbr]]: Total count and list of transactions + """ + if block.isnumeric(): + # Query by block number + block_number = int(block) + chain_block = session.exec(select(Blocks).where(Blocks.number == block_number)).first() + if not chain_block: + raise HTTPException(status_code=400, detail="Block not exist") + + transactions = session.exec( + select(Transactions) + .where(Transactions.block_number == block_number) + .order_by(Transactions.block_number.desc()) + .offset((page - 1) * size) + .limit(size) + ).all() + + return chain_block.transactions_count, [TransactionAbbr.from_db_model(tx) for tx in transactions] + else: + # Query by block hash + block_hash = hex_str_to_bytes(block) + chain_block = session.exec(select(Blocks).where(Blocks.hash == block_hash)).first() + if not chain_block: + raise HTTPException(status_code=400, detail="Block not exist") + + transactions = session.exec( + select(Transactions) + .where(Transactions.block_hash == block_hash) + .order_by(Transactions.block_number.desc()) + .offset((page - 1) * size) + .limit(size) + ).all() + + return chain_block.transactions_count, [TransactionAbbr.from_db_model(tx) for tx in transactions] + + +def _get_transactions_by_address( + session: ReadSessionDep, address: str, page: int, size: int +) -> Tuple[int, List[TransactionAbbr]]: + """Get transactions by address + + Args: + session: Database session + address: Address to filter by + page: Page number + size: Items per page + + Returns: + Tuple[int, List[TransactionAbbr]]: Total count and list of transactions + """ + return ( + get_transactions_count_by_address(session, address, use_address_index=True), + get_transactions_by_address(session, address, use_address_index=True, limit=size, offset=(page - 1) * size), + ) + + +def _get_transactions_by_date( + session: ReadSessionDep, date: str, page: int, size: int +) -> Tuple[int, List[Transactions]]: + """Get transactions by date + + Args: + session: Database session + date: Date in YYYYMMDD format + page: Page number + size: Items per page + + Returns: + Tuple[int, List[Transactions]]: Total count and list of transactions + """ + date_obj = datetime.strptime(date, "%Y%m%d") + start_time = date_obj + end_time = start_time + timedelta(days=1) + + date_condition = and_(Transactions.block_timestamp >= start_time, Transactions.block_timestamp < end_time) + transactions, total_count = get_transactions_and_total_count_by_condition( + session, + filter_condition=date_condition, + limit=size, + offset=(page - 1) * size, + ) + return total_count, transactions + + +def _calculate_total_records(session: ReadSessionDep, transactions: List[Transactions], page: int, size: int) -> int: + """Calculate total number of records + + Args: + session: Database session + transactions: List of transactions from current page + page: Current page number + size: Page size + + Returns: + int: Total number of records + """ + if len(transactions) > 0 and len(transactions) < size: + return (page - 1) * size + len(transactions) + return session.exec(select(func.count()).select_from(Transactions)).first() + + +async def verify_transaction_hash( + tx_hash: Annotated[ + str, + Path( + title="Transaction Hash", + description="Ethereum transaction hash (hex format)", + min_length=66, + max_length=66, + pattern="^0x[0-9a-fA-F]{64}$", + ), + ] +) -> str: + """ + Dependency for validating transaction hashes. + + Args: + tx_hash: Transaction hash to validate + + Returns: + str: Validated and formatted transaction hash + + Raises: + HTTPException: If hash format is invalid + """ + tx_hash = valid_hash(tx_hash) + if not tx_hash: + raise HTTPException(status_code=422, detail="Invalid transaction hash format") + return tx_hash + + +TransactionHashDep = Annotated[str, Depends(verify_transaction_hash)] + + +@router.get("/v1/explorer/transaction/{tx_hash}", response_model=TransactionDetail) +async def get_transaction_detail(session: ReadSessionDep, tx_hash: TransactionHashDep): + """Get detailed information about a specific transaction. + + Args: + session: Database session + tx_hash: Transaction hash in hex format + + Raises: + HTTPException: If transaction not found or invalid hash + """ + # Validate and format transaction hash + # Get transaction with basic info + pass + + +@router.get("/v1/explorer/transaction/{tx_hash}/logs", response_model=LogResponse) +async def get_transaction_logs(session: ReadSessionDep, tx_hash: TransactionHashDep): + """Get all logs for a specific transaction. + + Args: + session: Database session + tx_hash: Transaction hash in hex format + + Raises: + HTTPException: If invalid hash format or transaction not found + """ + + logs = get_logs_by_hash(session, tx_hash) + return LogResponse( + logs, + total=len(logs), + ) + + +@router.get("/v1/explorer/transaction/{tx_hash}/token_transfers", response_model=TokenTransferResponse) +async def get_transaction_token_transfers( + session: ReadSessionDep, enricher: BlockchainEnricherDep, tx_hash: TransactionHashDep +): + """Get all token transfers (ERC20, ERC721, ERC1155) for a specific transaction. + + Args: + session: Database session + tx_hash: Transaction hash in hex format + + Raises: + HTTPException: If invalid hash format + """ + # Validate and format hash + token_transfers = get_token_transfers_by_hash(session, tx_hash) + + enriched_token_transfers = enricher.enrich( + [token_transfer.dict() for token_transfer in token_transfers], + { + EnricherType.ADDRESS: {"to_address": "to_addr", "from_address": "from_addr"}, + EnricherType.TOKEN_INFO: {"token_address": "token_info"}, + EnricherType.TOKEN_VALUE: {"value": "display_value"}, + }, + session, + ) + + return TokenTransferResponse( + total=len(enriched_token_transfers), data=[TokenTransferItem(**item) for item in enriched_token_transfers] + ) + + +@router.get("/v1/explorer/transaction/{tx_hash}/internal_transactions", response_model=InternalTransactionResponse) +async def get_transaction_internal_transactions(session: ReadSessionDep, enricher: BlockchainEnricherDep, tx_hash: str): + """Get internal transactions for a specific transaction. + + Args: + session: Database session + tx_hash: Transaction hash in hex format + + Raises: + HTTPException: If invalid hash format + """ + internal_transactions = get_internal_transactions_by_hash(session, tx_hash) + + enriched_transactions = enricher.enrich( + [transaction.dict() for transaction in internal_transactions], + { + EnricherType.ADDRESS: {"to_address": "to_addr", "from_address": "from_addr"}, + EnricherType.COIN_VALUE: {"value": "display_value"}, + }, + session, + ) + data = [InternalTransactionItem(**tx) for tx in enriched_transactions] + + return InternalTransactionResponse( + data=data, + total=len(data), + ) + + +@router.get("/v1/explorer/transaction/{tx_hash}/traces", response_model=TransactionTracesResponse) +async def get_transaction_traces(session: ReadSessionDep, enricher: BlockchainEnricherDep, tx_hash: str): + """Get detailed trace information for a transaction. + + Args: + session: Database session + tx_hash: Transaction hash in hex format + + Raises: + HTTPException: If invalid hash format or trace not found + """ + # Validate and format hash + pass + + +@router.get("/v1/explorer/transaction/{tx_hash}/trace_json", response_model=TransactionTraceJsonResponse) +async def get_transaction_traces(session: ReadSessionDep, enricher: BlockchainEnricherDep, tx_hash: str): + """Get detailed trace information for a transaction. + + Args: + session: Database session + tx_hash: Transaction hash in hex format + + Raises: + HTTPException: If invalid hash format or trace not found + """ + # Validate and format hash + pass diff --git a/hemera/app/api/routes/helper/__init__.py b/hemera/app/api/routes/helper/__init__.py new file mode 100644 index 000000000..35f259aeb --- /dev/null +++ b/hemera/app/api/routes/helper/__init__.py @@ -0,0 +1,24 @@ +from typing import Any, List, Literal, Union + +from sqlmodel import select + +ColumnType = Union[Literal["*"], str, List[str]] + + +def process_columns(model_class: Any, columns: ColumnType): + """Helper function to process column input + + Args: + model_class: SQLModel class to select from + columns: Can be "*", single column name, or list of column names + + Returns: + statement: Select statement + """ + if columns == "*": + return select(model_class) + + if isinstance(columns, str): + columns = [col.strip() for col in columns.split(",")] + + return select(*[getattr(model_class, col.strip()) for col in columns]) diff --git a/hemera/app/api/routes/helper/address.py b/hemera/app/api/routes/helper/address.py new file mode 100644 index 000000000..1a8b32c7c --- /dev/null +++ b/hemera/app/api/routes/helper/address.py @@ -0,0 +1,87 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2024/12/22 +# @Author ideal93 +# @File address_utils.py +# @Brief + +from enum import Enum +from typing import Optional, Union + +from sqlmodel import Session, select + +from hemera.app.api.routes.helper import ColumnType +from hemera.common.models.address.stats.address_index_stats import AddressIndexStats +from hemera.common.utils.format_utils import hex_str_to_bytes + + +class TokenTransferType(str, Enum): + """Token transfer types for address statistics""" + + ERC20 = "erc20" + ERC721 = "erc721" + ERC1155 = "erc1155" + TOKEN_TXNS = "tokentxns" + TOKEN_TXNS_NFT = "tokentxns-nft" + TOKEN_TXNS_NFT1155 = "tokentxns-nft1155" + + +# Mapping from token type to stats column +TOKEN_TYPE_TO_COLUMN = { + TokenTransferType.TOKEN_TXNS: AddressIndexStats.erc20_transfer_count, + TokenTransferType.TOKEN_TXNS_NFT: AddressIndexStats.nft_721_transfer_count, + TokenTransferType.TOKEN_TXNS_NFT1155: AddressIndexStats.nft_1155_transfer_count, + TokenTransferType.ERC20: AddressIndexStats.erc20_transfer_count, + TokenTransferType.ERC721: AddressIndexStats.nft_721_transfer_count, + TokenTransferType.ERC1155: AddressIndexStats.nft_1155_transfer_count, +} + + +def get_txn_cnt_by_address(session: Session, address: str) -> Optional[int]: + bytes_address = hex_str_to_bytes(address) + statement = select(AddressIndexStats.transaction_count).where(AddressIndexStats.address == bytes_address) + return session.exec(statement).first() + + +def get_token_transfer_count( + session: Session, address: str, token_type: Union[str, TokenTransferType], columns: ColumnType = "*" +) -> Optional[int]: + """Get token transfer count for an address by token type + + Args: + session: SQLModel session + address: Address in hex string format + token_type: Type of token transfers to count (e.g. "erc20", "erc721") + columns: Can be "*" for all columns, single column name, or list of column names + + Returns: + Optional[int]: Transfer count or None if not found + + Raises: + ValueError: If address format is invalid or token type is not supported + """ + if isinstance(token_type, str): + token_type = TokenTransferType(token_type.lower()) + + bytes_address = hex_str_to_bytes(address) + statement = select(TOKEN_TYPE_TO_COLUMN[token_type]).where(AddressIndexStats.address == bytes_address) + return session.exec(statement).first() + + +def get_transaction_count(session: Session, address: str, columns: ColumnType = "*") -> Optional[int]: + """Get total transaction count for an address + + Args: + session: SQLModel session + address: Address in hex string format + columns: Can be "*" for all columns, single column name, or list of column names + + Returns: + Optional[int]: Transaction count or None if not found + + Raises: + ValueError: If address format is invalid + """ + bytes_address = hex_str_to_bytes(address) + statement = select(AddressIndexStats.transaction_count).where(AddressIndexStats.address == bytes_address) + return session.exec(statement).first() diff --git a/hemera/app/api/routes/helper/block.py b/hemera/app/api/routes/helper/block.py new file mode 100644 index 000000000..07fb68c86 --- /dev/null +++ b/hemera/app/api/routes/helper/block.py @@ -0,0 +1,309 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2024/12/19 18:35 +# @Author ideal93 +# @File block_utils.py.py +# @Brief + +from datetime import datetime, timedelta +from typing import Any, List, Optional, Union + +from psycopg2._psycopg import Column +from pydantic import BaseModel +from sqlmodel import Session, and_, desc, func, select + +from hemera.app.api.routes.helper import process_columns +from hemera.app.utils import ColumnType +from hemera.common.models.base.blocks import Blocks +from hemera.common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes + + +# Response Models +class BlockAbbr(BaseModel): + hash: str + number: int + timestamp: datetime + parent_hash: str + gas_limit: int + gas_used: int + base_fee_per_gas: Optional[int] + miner: str + transaction_count: int + internal_transaction_count: int + + @staticmethod + def from_db_model(block: Blocks) -> "BlockAbbr": + return BlockAbbr( + hash=bytes_to_hex_str(block.hash), + number=block.number, + timestamp=block.timestamp, + parent_hash=bytes_to_hex_str(block.parent_hash), + gas_limit=int(block.gas_limit), + gas_used=int(block.gas_used), + base_fee_per_gas=int(block.base_fee_per_gas) if block.base_fee_per_gas is not None else None, + miner=bytes_to_hex_str(block.miner), + transaction_count=block.transactions_count, + internal_transaction_count=block.internal_transactions_count, + ) + + +class BlockDetails(BlockAbbr): + difficulty: Optional[int] + extra_data: Optional[str] + gas_fee_token_price: Optional[str] + is_last_block: Optional[bool] + nonce: Optional[str] + receipts_root: Optional[str] + seconds_since_last_block: Optional[int] + sha3_uncles: Optional[str] + size: Optional[int] + state_root: Optional[str] + total_difficulty: Optional[int] + transactions_root: Optional[str] + + @staticmethod + def from_db_model(block: Blocks) -> "BlockDetails": + return BlockDetails( + hash=bytes_to_hex_str(block.hash), + number=block.number, + timestamp=block.timestamp, + parent_hash=bytes_to_hex_str(block.parent_hash), + gas_limit=int(block.gas_limit), + gas_used=int(block.gas_used), + base_fee_per_gas=int(block.base_fee_per_gas) if block.base_fee_per_gas is not None else None, + miner=bytes_to_hex_str(block.miner), + transaction_count=block.transactions_count, + internal_transaction_count=block.internal_transactions_count, + difficulty=int(block.difficulty) if block.difficulty is not None else None, + extra_data=bytes_to_hex_str(block.extra_data) if block.extra_data is not None else None, + gas_fee_token_price=None, + is_last_block=None, + nonce=bytes_to_hex_str(block.nonce) if block.nonce is not None else None, + receipts_root=bytes_to_hex_str(block.receipts_root) if block.receipts_root is not None else None, + seconds_since_last_block=None, + sha3_uncles=bytes_to_hex_str(block.sha3_uncles) if block.sha3_uncles is not None else None, + size=block.size, + state_root=bytes_to_hex_str(block.state_root) if block.state_root is not None else None, + total_difficulty=int(block.total_difficulty) if block.total_difficulty is not None else None, + transactions_root=( + bytes_to_hex_str(block.transactions_root) if block.transactions_root is not None else None + ), + ) + + +def _process_columns(columns: ColumnType): + return process_columns(Blocks, columns) + + +def get_block_count(session: Session, duration: timedelta) -> int: + """ + Calculate the number of blocks within the specified duration (up to 1 hour) + based on the latest block_timestamp in the Blocks table. + + Args: + session (Session): SQLModel session object. + duration (timedelta): Time duration for which to calculate the block count, + must not exceed 1 hour. + + Returns: + int: Number of blocks within the specified time duration. + + Raises: + ValueError: If the provided duration exceeds 1 hour. + """ + if duration > timedelta(hours=1): + raise ValueError("duration must not exceed 1 hour") + + # Retrieve the latest block_timestamp from the Blocks table + latest_time_stmt = select(func.max(Blocks.timestamp)) + latest_time = session.exec(latest_time_stmt).one() + if latest_time is None: + return 0 + + start_time = latest_time - duration + + # Count blocks with block_timestamp greater than or equal to start_time + block_count_stmt = select(func.count()).where(Blocks.timestamp >= start_time) + block_count = session.exec(block_count_stmt).one() + return block_count + + +def _get_last_block(session: Session, columns: ColumnType = "*") -> Optional[Blocks]: + """Get the latest block + + Args: + session: SQLModel session + columns: Can be "*" for all columns, single column name, or list of column names + Examples: + - "*": select all columns + - "number": select only number column + - "number,hash": select number and hash columns + - ["number", "hash"]: select number and hash columns + + Returns: + Optional[Blocks]: Latest block or None + When specific columns are selected, other attributes will raise AttributeError when accessed + """ + statement = _process_columns(columns) + statement = statement.order_by(desc(Blocks.number)).limit(1) + return session.exec(statement).first() + + +def _get_block_by_number(session: Session, block_number: int, columns: ColumnType = "*") -> Optional[Blocks]: + """Get block by block number + + Args: + session: SQLModel session + block_number: Block number to query + columns: Can be "*" for all columns, single column name, or list of column names + Examples: + - "*": select all columns + - "number": select only number column + - "number,hash": select number and hash columns + - ["number", "hash"]: select number and hash columns + + Returns: + Optional[Blocks]: Matching block or None + When specific columns are selected, other attributes will raise AttributeError when accessed + """ + statement = _process_columns(columns) + statement = statement.where(and_(Blocks.number == block_number, Blocks.reorg == False)) + return session.exec(statement).first() + + +def _get_block_by_hash(session: Session, hash: str, columns: ColumnType = "*") -> Optional[Blocks]: + """Get block by block hash + + Args: + session: SQLModel session + hash: Block hash (hex string) + columns: Can be "*" for all columns, single column name, or list of column names + Examples: + - "*": select all columns + - "number": select only number column + - "number,hash": select number and hash columns + - ["number", "hash"]: select number and hash columns + + Returns: + Optional[Blocks]: Matching block or None + When specific columns are selected, other attributes will raise AttributeError when accessed + + Raises: + ValueError: If hash format is invalid + """ + statement = _process_columns(columns) + statement = statement.where(and_(Blocks.hash == hex_str_to_bytes(hash), Blocks.reorg == False)) + return session.exec(statement).first() + + +def get_block_by_number_or_hash(session: Session, number_or_hash: str) -> Optional[BlockDetails]: + """Get block by block number or hash + + Args: + session: SQLModel session + number_or_hash: Block number or hash + + Returns: + Optional[BlockDetails]: Matching block or None + """ + if number_or_hash.isdigit(): + block = _get_block_by_number(session, int(number_or_hash)) + else: + block = _get_block_by_hash(session, number_or_hash) + return BlockDetails.from_db_model(block) if block else None + + +block_list_columns = [ + "hash", + "number", + "timestamp", + "parent_hash", + "gas_limit", + "gas_used", + "base_fee_per_gas", + "miner", + "transactions_count", + "internal_transactions_count", +] + + +def get_blocks_by_range( + session: Session, + start_block: int, + end_block: int, +) -> List[BlockAbbr]: + """Get blocks by block number range + + Args: + session: SQLModel session + start_block: Start block number + end_block: End block number + + Returns: + List[BlockAbbr]: List of matching blocks + """ + statement = _process_columns(block_list_columns) + + statement = statement.where( + and_(Blocks.number.between(start_block, end_block), Blocks.reorg == False), + ) + statement = statement.order_by(Blocks.number.desc()) + blocks = session.exec(statement).all() + return [BlockAbbr.from_db_model(block) for block in blocks] + + +def _get_blocks_by_condition( + session: Session, + filter_condition: Optional[Any] = None, + columns: ColumnType = "*", + limit: Optional[int] = None, + offset: Optional[int] = None, + order_by: Union[Column, List[Column], None] = None, +) -> List[Blocks]: + """Get blocks by condition with pagination support + + Args: + session: SQLModel session + filter_condition: SQL filter condition, defaults to None (no filter) + for example: Blocks.number > 100 + Blocks.number.between(100, 200) + Blocks.timestamp > datetime(2021, 1, 1) + Blocks.hash == hex_str_to_bytes("0x1234") + Blocks.hash.in_([hex_str_to_bytes("0x1234"), hex_str_to_bytes("0x5678")]) + columns: Can be "*" for all columns, single column name, or list of column names + Examples: + - "*": select all columns + - "number": select only number column + - "number,hash": select number and hash columns + - ["number", "hash"]: select number and hash columns + limit: Max number of blocks to return + offset: Number of blocks to skip + order_by: Specify sort order, can be a single column or list of columns + Examples: + - Blocks.number.desc() + - [Blocks.timestamp.desc(), Blocks.number.asc()] + - None (defaults to Blocks.number.desc()) + + Returns: + List[Blocks]: List of matching blocks + When specific columns are selected, other attributes will raise AttributeError when accessed + """ + statement = _process_columns(columns) + + if filter_condition is not None: + statement = statement.where(filter_condition) + + if order_by is None: + statement = statement.order_by(Blocks.number.desc()) + else: + if isinstance(order_by, list): + statement = statement.order_by(*order_by) + else: + statement = statement.order_by(order_by) + + if limit is not None: + statement = statement.limit(limit) + if offset is not None: + statement = statement.offset(offset) + + return session.exec(statement).all() diff --git a/hemera/app/api/routes/helper/contract.py b/hemera/app/api/routes/helper/contract.py new file mode 100644 index 000000000..4a50dfc89 --- /dev/null +++ b/hemera/app/api/routes/helper/contract.py @@ -0,0 +1,120 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2024/12/25 14:12 +# @Author ideal93 +# @File contract.py +# @Brief + +from typing import List, Optional, Union + +from pydantic import BaseModel +from sqlmodel import Session + +from hemera.app.api.routes.helper import ColumnType, process_columns +from hemera.common.models.trace.contracts import Contracts +from hemera.common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes + + +def _process_columns(columns: ColumnType): + return process_columns(Contracts, columns) + + +class ContractInfo(BaseModel): + contract_creator: str + transaction_hash: str + is_verified: bool + is_proxy: bool + implementation_contract: Optional[str] + verified_implementation_contract: Optional[str] + bytecode: Optional[str] + creation_code: Optional[str] + deployed_code: Optional[str] + + @classmethod + def from_db_model(cls, contract: Contracts) -> "ContractInfo": + return cls( + contract_creator=bytes_to_hex_str(contract.contract_creator), + transaction_hash=bytes_to_hex_str(contract.transaction_hash), + is_verified=contract.is_verified, + is_proxy=contract.is_proxy, + implementation_contract=bytes_to_hex_str(contract.implementation_contract), + verified_implementation_contract=bytes_to_hex_str(contract.verified_implementation_contract), + bytecode=bytes_to_hex_str(contract.creation_code) if contract.creation_code else None, + creation_code=bytes_to_hex_str(contract.creation_code), + deployed_code=bytes_to_hex_str(contract.deployed_code), + ) + + +def get_contract_by_address(session: Session, address: Union[str, bytes]) -> Optional[ContractInfo]: + """Get contract by its address + + Args: + session: SQLModel session + address: Contract address (hex string) + + Returns: + Optional[ContractInfo]: Matching contract or None + """ + if isinstance(address, str): + address = hex_str_to_bytes(address) + contract = _get_contract_by_address(session, address) + return ContractInfo.from_db_model(contract) if contract else None + + +def _get_contract_by_address( + session: Session, address: Union[str, bytes], columns: ColumnType = "*" +) -> Optional[Contracts]: + """Get contract by its address + + Args: + session: SQLModel session + address: Contract address (hex string) + columns: Can be "*" for all columns, single column name, or list of column names + Examples: + - "*": select all columns + - "address": select only address column + - "address,bytecode": select address and bytecode columns + - ["address", "bytecode"]: select address and bytecode columns + + Returns: + Optional[Contracts]: Matching contract or None + When specific columns are selected, other attributes will raise AttributeError when accessed + + Raises: + ValueError: If address format is invalid + """ + if isinstance(address, str): + bytes_address = hex_str_to_bytes(address) + statement = _process_columns(columns) + statement = statement.where(Contracts.address == bytes_address) + return session.exec(statement).first() + + +def _get_contracts_by_addresses( + session: Session, addresses: List[Union[str, bytes]], columns: ColumnType = "*" +) -> List[Contracts]: + """Get multiple contracts by their addresses + + Args: + session: SQLModel session + addresses: List of contract addresses (can be hex strings or bytes) + columns: Can be "*" for all columns, single column name, or list of column names + Examples: + - "*": select all columns + - "address": select only address column + - "address,bytecode": select address and bytecode columns + - ["address", "bytecode"]: select address and bytecode columns + + Returns: + List[Contracts]: List of matching contracts + When specific columns are selected, other attributes will raise AttributeError when accessed + + Raises: + ValueError: If any hex string address format is invalid + """ + # Convert addresses to bytes if needed + bytes_addresses = {addr if isinstance(addr, bytes) else hex_str_to_bytes(addr) for addr in addresses} + + statement = _process_columns(columns) + statement = statement.where(Contracts.address.in_(bytes_addresses)) + return session.exec(statement).all() diff --git a/hemera/app/api/routes/helper/format.py b/hemera/app/api/routes/helper/format.py new file mode 100644 index 000000000..e3ff35d76 --- /dev/null +++ b/hemera/app/api/routes/helper/format.py @@ -0,0 +1,53 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2024/12/20 14:58 +# @Author ideal +# @File format.py +# @Brief + +import copy +from typing import Union + + +def format_dollar_value(value: float) -> str: + """ """ + if value > 1: + return "{0:.2f}".format(value) + return "{0:.6}".format(value) + + +def format_coin_value(value: Union[int, None], decimal: int = 18) -> str: + """ + Formats a given integer value into a string that represents a token value. + Parameters: + value (int): The value to be formatted + + Returns: + str: The formatted token value as a string. + """ + if value is None: + return "0" + if value < 1000: + return str(value) + else: + return "{0:.15f}".format(value / 10**decimal).rstrip("0").rstrip(".") + + +def format_coin_value_with_unit(value: int, native_token: str) -> str: + """ + Formats a given integer value into a string that represents a token value with the appropriate unit. + For values below 1000, it returns the value in WEI. + For higher values, it converts the value to a floating-point representation in the native token unit, + stripping unnecessary zeros. + + Parameters: + value (int): The value to be formatted, typically representing a token amount in WEI. + native_token (str): + + Returns: + str: The formatted token value as a string with the appropriate unit. + """ + if value < 1000: + return str(value) + " WEI" + else: + return "{0:.15f}".format(value / 10**18).rstrip("0").rstrip(".") + " " + native_token diff --git a/hemera/app/api/routes/helper/internal_transaction.py b/hemera/app/api/routes/helper/internal_transaction.py new file mode 100644 index 000000000..5a25fec14 --- /dev/null +++ b/hemera/app/api/routes/helper/internal_transaction.py @@ -0,0 +1,469 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2024/12/26 14:28 +# @Author ideal +# @File internal_transaction.py +# @Brief +from datetime import datetime +from decimal import Decimal +from typing import List, Optional + +from pydantic import BaseModel +from sqlmodel import Session, desc, func, or_, select +from typing_extensions import Literal, Union + +from hemera.app.api.routes.helper import ColumnType, process_columns +from hemera.common.enumeration.txn_type import InternalTransactionType +from hemera.common.models.address.address_internal_transaciton import AddressInternalTransactions +from hemera.common.models.trace.traces import ContractInternalTransactions +from hemera.common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes + + +class InternalTransactionAbbr(BaseModel): + """Standardized response model for internal transactions""" + + trace_id: str + from_address: Optional[str] = None + to_address: Optional[str] = None + value: Optional[Decimal] = None + gas: Optional[Decimal] = None + gas_used: Optional[Decimal] = None + trace_type: Optional[str] = None + call_type: Optional[str] = None + status: Optional[int] = None + error: Optional[str] = None + block_number: Optional[int] = None + block_hash: Optional[str] = None + block_timestamp: Optional[datetime] = None + transaction_hash: Optional[str] = None + transaction_index: Optional[int] = None + + @staticmethod + def from_db_model( + transaction: Union[ContractInternalTransactions, AddressInternalTransactions] + ) -> "InternalTransactionAbbr": + # Common fields between both models + common_fields = { + "trace_id": transaction.trace_id, + "trace_type": transaction.trace_type, + "call_type": transaction.call_type, + "value": transaction.value, + "gas": transaction.gas, + "gas_used": transaction.gas_used, + "status": transaction.status, + "error": transaction.error, + "block_number": transaction.block_number, + "block_timestamp": transaction.block_timestamp, + "transaction_index": transaction.transaction_index, + "block_hash": bytes_to_hex_str(transaction.block_hash) if transaction.block_hash else None, + "transaction_hash": ( + bytes_to_hex_str(transaction.transaction_hash) if transaction.transaction_hash else None + ), + } + + # Convert bytes fields to hex strings + + if isinstance(transaction, ContractInternalTransactions): + common_fields["from_address"] = bytes_to_hex_str(transaction.from_address) + common_fields["to_address"] = bytes_to_hex_str(transaction.to_address) + else: # AddressInternalTransactions + if transaction.txn_type == InternalTransactionType.SENDER.value: + common_fields["from_address"] = bytes_to_hex_str(transaction.address) + common_fields["to_address"] = bytes_to_hex_str(transaction.related_address) + else: + common_fields["from_address"] = bytes_to_hex_str(transaction.related_address) + common_fields["to_address"] = bytes_to_hex_str(transaction.address) + + return InternalTransactionAbbr(**common_fields) + + +def _process_columns(columns: ColumnType): + return process_columns(ContractInternalTransactions, columns) + + +def _process_address_columns(columns: ColumnType): + return process_columns(AddressInternalTransactions, columns) + + +def get_internal_transactions_by_address( + session: Session, + address: Union[str, bytes], + direction: Optional[Literal["from", "to", "both"]] = "both", + limit: Optional[int] = None, + offset: Optional[int] = None, + use_address_index: bool = False, +) -> List[InternalTransactionAbbr]: + """Unified interface to get internal transactions with option to use address index + + Args: + session: SQLModel session + address: Address in hex string format + direction: Filter direction - "from", "to", or "both" (default) + limit: Max number of transactions to return + offset: Number of transactions to skip + use_address_index: Whether to use address index table (default: False) + + Returns: + List[InternalTransactionAbbr]: List of standardized transaction responses + + Raises: + ValueError: If address format is invalid + """ + if use_address_index: + raw_transactions = get_internal_transactions_by_address_using_address_index( + session=session, + address=address, + direction=direction, + columns="*", + limit=limit, + offset=offset, + ) + else: + raw_transactions = _get_internal_transactions_by_address_native( + session=session, + address=address, + direction=direction, + columns="*", + limit=limit, + offset=offset, + ) + + return [InternalTransactionAbbr.from_db_model(tx) for tx in raw_transactions] + + +def get_internal_transactions_by_address_using_address_index( + session: Session, + address: Union[str, bytes], + direction: Optional[Literal["from", "to", "both"]] = "both", + columns: ColumnType = "*", + limit: Optional[int] = None, + offset: Optional[int] = None, +) -> List[AddressInternalTransactions]: + """Get internal transactions by address with optional direction filter using address index + + Args: + session: SQLModel session + address: Address in hex string format + direction: Filter direction - "from", "to", or "both" (default) + columns: Can be "*" for all columns, single column name, or list of column names + Examples: + - "*": select all columns + - "block_number": select only block number + - "block_number,hash": select block number and hash columns + - ["block_number", "hash"]: select block number and hash columns + limit: Max number of internal transactions to return + offset: Number of internal transactions to skip + + Returns: + List[InternalTransactions]: List of matching transactions + When specific columns are selected, other attributes will raise AttributeError when accessed + + Raises: + ValueError: If address format is invalid + """ + if isinstance(address, str): + address = hex_str_to_bytes(address) + statement = _process_address_columns(columns).where(AddressInternalTransactions.address == address) + + if direction == "from": + statement = statement.where( + AddressInternalTransactions.txn_type.in_( + [InternalTransactionType.SELF_CALL.value, InternalTransactionType.SENDER.value] + ) + ) + elif direction == "to": + statement = statement.where( + AddressInternalTransactions.txn_type.in_( + [InternalTransactionType.SELF_CALL.value, InternalTransactionType.RECEIVER.value] + ) + ) + else: # both + statement = statement + + statement = statement.order_by( + desc(AddressInternalTransactions.block_number), + desc(AddressInternalTransactions.transaction_index), + desc(AddressInternalTransactions.trace_id), + ) + if limit is not None: + statement = statement.limit(limit) + if offset is not None: + statement = statement.offset(offset) + + return session.exec(statement).all() + + +def _get_internal_transactions_by_address_native( + session: Session, + address: Union[str, bytes], + direction: Optional[Literal["from", "to", "both"]] = "both", + columns: ColumnType = "*", + limit: Optional[int] = None, + offset: Optional[int] = None, +) -> List[ContractInternalTransactions]: + """Get internal transactions by address with optional direction filter + + Args: + session: SQLModel session + address: Address in hex string format + direction: Filter direction - "from", "to", or "both" (default) + columns: Can be "*" for all columns, single column name, or list of column names + Examples: + - "*": select all columns + - "block_number": select only block number + - "block_number,hash": select block number and hash columns + - ["block_number", "hash"]: select block number and hash columns + limit: Max number of internal transactions to return + offset: Number of internal transactions to skip + + Returns: + List[InternalTransactions]: List of matching transactions + When specific columns are selected, other attributes will raise AttributeError when accessed + + Raises: + ValueError: If address format is invalid + """ + if isinstance(address, str): + address = hex_str_to_bytes(address) + statement = _process_columns(columns) + + if direction == "from": + statement = statement.where(ContractInternalTransactions.from_address == address) + elif direction == "to": + statement = statement.where(ContractInternalTransactions.to_address == address) + else: # both + statement = statement.where( + or_( + ContractInternalTransactions.from_address == address, + ContractInternalTransactions.to_address == address, + ) + ) + + statement = statement.order_by( + desc(ContractInternalTransactions.block_number), + desc(ContractInternalTransactions.transaction_index), + desc(ContractInternalTransactions.trace_id), + ) + + if limit is not None: + statement = statement.limit(limit) + if offset is not None: + statement = statement.offset(offset) + + return session.exec(statement).all() + + +def get_internal_transactions_count_by_address( + session: Session, + address: Union[str, bytes], + direction: Optional[Literal["from", "to", "both"]] = "both", + use_address_index: bool = False, +) -> int: + """Get count of internal transactions by address with optional direction filter + + Args: + session: SQLModel session + address: Address in hex string format + direction: Filter direction - "from", "to", or "both" (default) + use_address_index: Whether to use address index table (default: False) + + Returns: + int: Count of internal transactions + """ + if use_address_index: + internal_transactions_count = get_internal_transactions_count_by_address_using_address_index( + session=session, + address=address, + direction=direction, + ) + else: + internal_transactions_count = get_internal_transactions_count_by_address_native( + session=session, + address=address, + direction=direction, + ) + + return internal_transactions_count + + +def get_internal_transactions_count_by_address_using_address_index( + session: Session, + address: Union[str, bytes], + direction: Optional[Literal["from", "to", "both"]] = "both", +) -> int: + """Get count of internal transactions by address with optional direction filter using address index + + Args: + session: SQLModel session + address: Address in hex string format + direction: Filter direction - "from", "to", or "both" (default) + + Returns: + int: Count of internal transactions + """ + + # TODO: Use address index statistics for optimized count + if isinstance(address, str): + address = hex_str_to_bytes(address) + statement = ( + select(func.count()) + .select_from(AddressInternalTransactions) + .where(AddressInternalTransactions.address == address) + ) + + if direction == "from": + statement = statement.where( + AddressInternalTransactions.txn_type.in_( + [InternalTransactionType.SELF_CALL.value, InternalTransactionType.SENDER.value] + ) + ) + elif direction == "to": + statement = statement.where( + AddressInternalTransactions.txn_type.in_( + [InternalTransactionType.SELF_CALL.value, InternalTransactionType.RECEIVER.value] + ) + ) + else: # both + statement = statement + + return session.exec(statement).first() + + +def get_internal_transactions_count_by_address_native( + session: Session, + address: Union[str, bytes], + direction: Optional[Literal["from", "to", "both"]] = "both", +) -> int: + """Get count of internal transactions by address with optional direction filter + + Args: + session: SQLModel session + address: Address in hex string format + direction: Filter direction - "from", "to", or "both" (default) + + Returns: + int: Count of internal transactions + """ + if isinstance(address, str): + address = hex_str_to_bytes(address) + statement = select(func.count()).select_from(ContractInternalTransactions) + + if direction == "from": + statement = statement.where(ContractInternalTransactions.from_address == address) + elif direction == "to": + statement = statement.where(ContractInternalTransactions.to_address == address) + else: # both + statement = statement.where( + or_( + ContractInternalTransactions.from_address == address, + ContractInternalTransactions.to_address == address, + ) + ) + + return session.exec(statement).first() + + +def get_internal_transactions_by_hash( + session: Session, transaction_hash: Union[str, bytes], columns: ColumnType = "*" +) -> List[InternalTransactionAbbr]: + """Get internal transactions by transaction hash + + Args: + session: SQLModel session + transaction_hash: Transaction hash in hex string format + columns: Can be "*" for all columns, single column name, or list of column names + Examples: + - "*": select all columns + - "block_number": select only block number + - "block_number,hash": select block number and hash columns + - ["block_number", "hash"]: select block number and hash columns + + Returns: + List[InternalTransactionAbbr]: List of standardized transaction responses + """ + if isinstance(transaction_hash, str): + transaction_hash = hex_str_to_bytes(transaction_hash) + + statement = _process_columns(columns).where( + ContractInternalTransactions.transaction_hash == hex_str_to_bytes(transaction_hash) + ) + + raw_transactions = session.exec(statement).all() + return [InternalTransactionAbbr.from_db_model(tx) for tx in raw_transactions] + + +def get_internal_transactions_count_by_block_number(session: Session, block_number: int) -> int: + """Get count of internal transactions by block number + + Args: + session: SQLModel session + block_number: Block number + + Returns: + int: Count of internal transactions + """ + statement = ( + select(func.count()) + .select_from(ContractInternalTransactions) + .where(ContractInternalTransactions.block_number == block_number) + ) + return session.exec(statement).first() or 0 + + +def get_internal_transactions_by_block_number( + session: Session, block_number: int, offset: int, limit: int +) -> List[InternalTransactionAbbr]: + """Get internal transactions by block number + + Args: + session: SQLModel session + block_number: Block number + offset: Number of transactions to skip + limit: Max number of transactions to return + + Returns: + List[InternalTransactionAbbr]: List of standardized transaction responses + """ + transactions = session.exec( + select(ContractInternalTransactions) + .where(ContractInternalTransactions.block_number == block_number) + .order_by(desc(ContractInternalTransactions.block_number), desc(ContractInternalTransactions.trace_id)) + .offset(offset) + .limit(limit) + ).all() + + return [InternalTransactionAbbr.from_db_model(tx) for tx in transactions] + + +def get_internal_transactions_count(session: Session) -> int: + """Get count of internal transactions + + Args: + session: SQLModel session + + Returns: + int: Count of internal transactions + """ + statement = select(func.count()).select_from(ContractInternalTransactions) + return session.exec(statement).first() or 0 + + +def get_internal_transactions(session: Session, offset: int, limit: int) -> List[InternalTransactionAbbr]: + """Get internal transactions + + Args: + session: SQLModel session + offset: Number of transactions to skip + limit: Max number of transactions to return + + Returns: + List[InternalTransactionAbbr]: List of standardized transaction responses + """ + transactions = session.exec( + select(ContractInternalTransactions) + .order_by(desc(ContractInternalTransactions.block_number), desc(ContractInternalTransactions.trace_id)) + .offset(offset) + .limit(limit) + ).all() + + return [InternalTransactionAbbr.from_db_model(tx) for tx in transactions] diff --git a/hemera/app/api/routes/helper/log.py b/hemera/app/api/routes/helper/log.py new file mode 100644 index 000000000..6bc85743e --- /dev/null +++ b/hemera/app/api/routes/helper/log.py @@ -0,0 +1,187 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2024/12/31 13:54 +# @Author ideal93 +# @File log.py +# @Brief +from datetime import datetime +from typing import List, Optional, Union + +from pydantic import BaseModel +from sqlmodel import Session, desc, select + +from hemera.app.utils import ColumnType +from hemera.common.models.base.logs import Logs +from hemera.common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes + + +class LogDetails(BaseModel): + # Transaction related fields + transaction_hash: str + + # Log specific fields + log_index: int + address: str + data: str + topic0: str + topic1: Optional[str] = None + topic2: Optional[str] = None + topic3: Optional[str] = None + + # Block related fields + block_number: int + block_hash: str + block_timestamp: datetime + + @staticmethod + def from_db_model(log: Logs) -> "LogDetails": + return LogDetails( + transaction_hash=bytes_to_hex_str(log.transaction_hash), + log_index=log.log_index, + address=bytes_to_hex_str(log.address), + data=bytes_to_hex_str(log.data), + topic0=bytes_to_hex_str(log.topic0), + topic1=bytes_to_hex_str(log.topic1) if log.topic1 else None, + topic2=bytes_to_hex_str(log.topic2) if log.topic2 else None, + topic3=bytes_to_hex_str(log.topic3) if log.topic3 else None, + block_number=log.block_number, + block_hash=bytes_to_hex_str(log.block_hash), + block_timestamp=log.block_timestamp, + ) + + +class DecodedInputData(BaseModel): + """Decoded input data for a log parameter""" + + indexed: bool + name: str = "" + data_type: str + hex_data: str + dec_data: str + + +class ContractFunctionInfo(BaseModel): + address_display_name: Optional[str] = None + function_name: Optional[str] = None + full_function_name: Optional[str] = None + function_unsigned: Optional[str] = None + input_data: List[DecodedInputData] = [] + + +class LogItem(LogDetails, ContractFunctionInfo): + def __init__(self, log_details: LogDetails, contract_info: ContractFunctionInfo): + """ + Initialize LogItem by combining LogDetails and ContractInfo. + + Args: + log_details (LogDetails): The LogDetails instance. + contract_info (ContractFunctionInfo): The ContractInfo instance. + """ + combined_data = {**log_details.dict(), **contract_info.dict()} + super().__init__(**combined_data) + + +def _process_log_columns(columns: ColumnType): + """Process columns for Logs table""" + if columns == "*": + return select(Logs) + + if isinstance(columns, str): + columns = columns.split(",") + + columns = [col.strip() for col in columns] + return select(*[getattr(Logs, col) for col in columns]) + + +def _get_logs_by_hash(session: Session, hash: str, columns: ColumnType = "*") -> List[Logs]: + """Get logs by transaction hash + + Args: + session: SQLModel session + hash: Transaction hash (hex string) or bytes + columns: Can be "*" for all columns, single column name, or list of column names + Examples: + - "*": select all columns from logs + - "address,data": select specific columns + - ["address", "data"]: select specific columns + + Returns: + List[Logs]: List of logs matching the transaction hash + When specific columns are selected, only those columns will be available + + Raises: + ValueError: If hash format is invalid + """ + if isinstance(hash, str): + hash = hex_str_to_bytes(hash.lower()) + + statement = _process_log_columns(columns) + statement = statement.where(Logs.transaction_hash == hash) + + return session.exec(statement).all() + + +def _get_logs_by_address( + session: Session, address: Union[str, bytes], columns: ColumnType = "*", limit: int = 25, offset: int = 0 +) -> List[Logs]: + """Get logs by contract address + + Args: + session: SQLModel session + address: Contract address (hex string) or bytes + columns: Can be "*" for all columns, single column name, or list of column names + Examples: + - "*": select all columns from logs + - "address,data": select specific columns + - ["address", "data"]: select specific columns + limit: Max number of logs to return + offset: Number of logs to skip + + Returns: + List[Logs]: List of logs matching the contract address + When specific columns are selected, only those columns will be available + + Raises: + ValueError: If address format is invalid + """ + if isinstance(address, str): + address = hex_str_to_bytes(address.lower()) + + statement = _process_log_columns(columns) + statement = statement.where(Logs.address == address).order_by(desc(Logs.block_number), desc(Logs.log_index)) + + statement = statement.limit(limit) + statement = statement.offset(offset) + + return session.exec(statement).all() + + +def get_logs_by_hash(session: Session, hash: str) -> List[LogDetails]: + """Get logs by transaction hash + + Args: + session: SQLModel session + hash: Transaction hash (hex string) + + Returns: + List[LogDetails]: List of logs matching the transaction hash + When specific columns are selected, only those columns will be available + """ + logs = _get_logs_by_hash(session, hash, "*") + return [LogDetails.from_db_model(log) for log in logs] + + +def get_logs_by_address(session: Session, address: str, limit: int = 25, offset: int = 0) -> List[LogDetails]: + """Get logs by contract address + + Args: + session: SQLModel session + address: Contract address (hex string) + limit: Max number of logs to return + offset: Number of logs to skip + Returns: + List[LogDetails]: List of logs matching the contract address + When specific columns are selected, only those columns will be available + """ + logs = _get_logs_by_address(session, address, "*", limit, offset) + return [LogDetails.from_db_model(log) for log in logs] diff --git a/hemera/app/api/routes/helper/stats.py b/hemera/app/api/routes/helper/stats.py new file mode 100644 index 000000000..ca06e90cc --- /dev/null +++ b/hemera/app/api/routes/helper/stats.py @@ -0,0 +1,47 @@ +from typing import Any, List, Tuple, Union + +from sqlalchemy import select +from sqlalchemy.orm import Session +from sqlmodel import select + +from hemera.common.models.stats.daily_transactions_stats import DailyTransactionsStats + + +def get_daily_transactions_cnt( + session: Session, columns: Union[str, List[Union[str, Tuple[str, str]]]] = "*", limit: int = 10 +) -> List[Any]: + """ + Get daily transactions count, ordered by block date descending. + + Args: + session: Database session + columns: Column names to select. Can be: + - "*" for all columns + - A single column name + - A list of column names + - A list of tuples (column_name, label) + limit: Maximum number of records to return + + Returns: + List of DailyTransactionsStats objects or tuples depending on columns parameter + """ + if columns == "*": + stmt = select(DailyTransactionsStats) + else: + if isinstance(columns, str): + columns = [columns] + + # Handle both simple column names and (column, label) tuples + select_columns = [] + for col in columns: + if isinstance(col, tuple): + col_name, label = col + select_columns.append(getattr(DailyTransactionsStats, col_name).label(label)) + else: + select_columns.append(getattr(DailyTransactionsStats, col)) + + stmt = select(*select_columns) + + stmt = stmt.order_by(DailyTransactionsStats.block_date.desc()).limit(limit) + + return session.execute(stmt).all() diff --git a/hemera/app/api/routes/helper/token.py b/hemera/app/api/routes/helper/token.py new file mode 100644 index 000000000..7a46a8808 --- /dev/null +++ b/hemera/app/api/routes/helper/token.py @@ -0,0 +1,169 @@ +from datetime import datetime +from decimal import Decimal +from typing import List, Optional, Sequence, Union + +from pydantic import BaseModel +from sqlmodel import Session, select + +from hemera.common.models.token.tokens import Tokens +from hemera.common.models.utils.prices import CoinPrices, TokenHourlyPrices, TokenPrices +from hemera.common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes + + +class TokenExtraInfo(BaseModel): + logo_url: Optional[str] = None + price: Optional[Decimal] = None + previous_price: Optional[Decimal] = None + market_cap: Optional[Decimal] = None + on_chain_market_cap: Optional[Decimal] = None + + +class TokenInfo(BaseModel): + name: Optional[str] = None + address: Optional[str] = None + symbol: Optional[str] = None + type: Optional[str] = None + decimals: Optional[int] = None + extra_info: Optional[TokenExtraInfo] = None + + @classmethod + def from_db_model(cls, token: Tokens) -> "TokenInfo": + return cls( + name=token.name, + address=bytes_to_hex_str(token.address), + symbol=token.symbol, + type=token.token_type, + decimals=token.decimals, + extra_info=TokenExtraInfo( + price=token.price, + previous_price=token.previous_price, + logo_url=token.icon_url, + market_cap=token.market_cap, + on_chain_market_cap=token.on_chain_market_cap, + ), + ) + + +def get_token_info(session: Session, address: Union[str, bytes]) -> Optional[TokenInfo]: + """Get token info by its address + + Args: + session: SQLModel session + address: Token address + + Returns: + Optional[TokenInfo]: Token info or None + """ + if isinstance(address, str): + address = hex_str_to_bytes(address) + token = _get_token_info(session, address) + return TokenInfo.from_db_model(token) if token else None + + +def _get_token_info(session: Session, address: Union[str, bytes]) -> Optional[Tokens]: + """Get token by its address + + Args: + session: SQLModel session + address: Token address + + Returns: + Optional[Tokens]: Matching token or None + """ + if isinstance(address, str): + address = hex_str_to_bytes(address) + statement = select(Tokens).where(Tokens.address == address) + return session.exec(statement).first() + + +def get_token_map(session: Session, addresses: Sequence[str]) -> dict[str, TokenInfo]: + """Get tokens by their addresses + + Args: + session: SQLModel session + addresses: List of token addresses + + Returns: + List[Tokens]: List of matching tokens + """ + address_set = set() + for address in addresses: + address_set.add(hex_str_to_bytes(address)) + statement = select(Tokens).where(Tokens.address.in_(address_set)) + tokens = session.exec(statement).all() + return {bytes_to_hex_str(token.address): TokenInfo.from_db_model(token) for token in tokens} + + +def get_token_price(session: Session, symbol: str, date: Optional[datetime] = None) -> Decimal: + """Get token price + + Args: + session: SQLModel session + symbol: Token symbol + date: Optional date to get token price at that date, defaults to None will get the latest price + + Returns: + Decimal: Token price + """ + if date: + statement = ( + select(TokenHourlyPrices) + .where(TokenHourlyPrices.symbol == symbol, TokenHourlyPrices.timestamp <= date) + .order_by(TokenHourlyPrices.timestamp.desc()) + ) + else: + statement = select(TokenPrices).where(TokenPrices.symbol == symbol).order_by(TokenPrices.timestamp.desc()) + + token_price = session.exec(statement).first() + return token_price.price if token_price else Decimal(0.0) + + +def get_coin_prices(session: Session, dates: List[datetime]) -> List[CoinPrices]: + """Get coin prices for specified dates + + Args: + session: SQLModel session + dates: List of dates to query prices for + + Returns: + List[CoinPrices]: List of coin prices + """ + statement = select(CoinPrices).where(CoinPrices.block_date.in_(dates)) + + coin_prices = session.exec(statement).all() + return coin_prices + + +def get_latest_coin_price(session: Session) -> float: + """Get latest coin price + + Args: + session: SQLModel session + + Returns: + float: Latest coin price, 0.0 if no price found + """ + statement = select(CoinPrices).order_by(CoinPrices.block_date.desc()) + + result = session.exec(statement).first() + return float(result.price) if result and result.price else 0.0 + + +def get_tokens_by_token_address(session: Session, token_addresses: List[str]) -> List[TokenInfo]: + """Get token info map for specified token addresses + + Args: + session: SQLModel session + token_addresses: List of token addresses + Returns: + List[TokenInfo]: List of token info + """ + address_set = set() + for address in token_addresses: + if isinstance(address, str): + address_set.add(hex_str_to_bytes(address)) + else: + address_set.add(address) + statement = select(Tokens).where(Tokens.address.in_(address_set)) + tokens = session.exec(statement).all() + return [TokenInfo.from_db_model(token) for token in tokens] diff --git a/hemera/app/api/routes/helper/token_balances.py b/hemera/app/api/routes/helper/token_balances.py new file mode 100644 index 000000000..8b3bf3313 --- /dev/null +++ b/hemera/app/api/routes/helper/token_balances.py @@ -0,0 +1,36 @@ +from typing import List, Optional, Union + +from pydantic import BaseModel +from sqlmodel import Session, select + +from hemera.common.models.current_token_balances import CurrentTokenBalances +from hemera.common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes + + +class TokenBalanceAbbr(BaseModel): + address: str + token_address: str + token_id: Optional[int] + balance: str + + @classmethod + def from_db_model(cls, balance: CurrentTokenBalances) -> "TokenBalanceAbbr": + return cls( + address=bytes_to_hex_str(balance.address), + token_address=bytes_to_hex_str(balance.token_address), + token_id=balance.token_id, + balance=str(balance.balance), + ) + + +def get_address_token_balances( + session: Session, + address: Union[str, bytes], +) -> List[TokenBalanceAbbr]: + + if isinstance(address, str): + address = hex_str_to_bytes(address) + + statement = select(CurrentTokenBalances).where(CurrentTokenBalances.address == address) + balances = session.exec(statement).all() + return [TokenBalanceAbbr.from_db_model(balance) for balance in balances] diff --git a/hemera/app/api/routes/helper/token_transfers.py b/hemera/app/api/routes/helper/token_transfers.py new file mode 100644 index 000000000..f064d3159 --- /dev/null +++ b/hemera/app/api/routes/helper/token_transfers.py @@ -0,0 +1,646 @@ +from datetime import datetime +from typing import Any, List, Literal, Optional, Union + +from pydantic import BaseModel +from sqlmodel import Session, desc, func, or_, select + +from hemera.common.enumeration.token_type import TokenType +from hemera.common.enumeration.txn_type import AddressNftTransferType, AddressTokenTransferType +from hemera.common.models.address.address_nft_transfers import AddressNftTransfers +from hemera.common.models.address.address_token_transfers import AddressTokenTransfers +from hemera.common.models.token.token_transfers import ( + ERC20TokenTransfers, + ERC721TokenTransfers, + ERC1155TokenTransfers, + NftTransfers, +) +from hemera.common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes + + +class TokenTransferAbbr(BaseModel): + """Standardized response model for token_transfer""" + + transaction_hash: Optional[str] = None + log_index: Optional[int] = None + from_address: Optional[str] = None + to_address: Optional[str] = None + token_id: Optional[str] = None + value: Optional[int] = 0 + token_type: Optional[str] = None + token_address: Optional[str] = None + block_number: Optional[int] = None + block_timestamp: Optional[datetime] = None + + @staticmethod + def from_db_model( + transfer: Union[ + ERC20TokenTransfers, + ERC721TokenTransfers, + ERC1155TokenTransfers, + NftTransfers, + AddressTokenTransfers, + AddressNftTransfers, + ] + ) -> "TokenTransferAbbr": + """Convert database model to response model""" + common_fields = { + "transaction_hash": bytes_to_hex_str(transfer.transaction_hash), + "log_index": transfer.log_index, + "block_number": transfer.block_number, + "block_hash": transfer.block_hash, + "block_timestamp": transfer.block_timestamp, + "token_address": bytes_to_hex_str(transfer.token_address), + } + + if isinstance(transfer, AddressTokenTransfers) or isinstance(transfer, AddressNftTransfers): + if isinstance(transfer, AddressTokenTransfers): + token_type = TokenType.ERC20.value + elif isinstance(transfer, AddressNftTransfers): + if transfer.value: + token_type = TokenType.ERC1155.value + else: + token_type = TokenType.ERC721.value + else: + token_type = None + is_outgoing = transfer.transfer_type in [ + AddressTokenTransferType.SENDER.value, + AddressNftTransferType.SENDER.value, + AddressTokenTransferType.DEPOSITOR.value, + AddressNftTransferType.BURNER.value, + ] + common_fields.update( + { + "token_type": token_type, + "from_address": bytes_to_hex_str(transfer.address if is_outgoing else transfer.related_address), + "to_address": bytes_to_hex_str(transfer.related_address if is_outgoing else transfer.address), + "token_id": ( + getattr(transfer, "token_id", None) if isinstance(transfer, AddressNftTransfers) else None + ), + "value": getattr(transfer, "value", None), + } + ) + else: + if isinstance(transfer, ERC20TokenTransfers): + token_type = TokenType.ERC20.value + elif isinstance(transfer, ERC721TokenTransfers): + token_type = TokenType.ERC721.value + elif isinstance(transfer, ERC1155TokenTransfers): + token_type = TokenType.ERC1155.value + else: + if transfer.value: + token_type = TokenType.ERC721.value + else: + token_type = TokenType.ERC1155.value + common_fields.update( + { + "from_address": bytes_to_hex_str(transfer.from_address), + "to_address": bytes_to_hex_str(transfer.to_address), + "token_id": getattr(transfer, "token_id", None), + "value": getattr(transfer, "value", None), + "token_type": token_type, + } + ) + if common_fields.get("token_id") is not None: + common_fields["token_id"] = str(common_fields["token_id"]) + return TokenTransferAbbr(**common_fields) + + +def _get_erc20_token_transfers_by_hash(session: Session, hash: str) -> List[ERC20TokenTransfers]: + """Get ERC20 token transfer by transaction hash""" + hash_bytes = hex_str_to_bytes(hash) + return session.exec(select(ERC20TokenTransfers).where(ERC20TokenTransfers.transaction_hash == hash_bytes)).all() + + +def _get_erc721_token_transfers_by_hash(session: Session, hash: str) -> List[ERC721TokenTransfers]: + """Get ERC721 token transfer by transaction hash""" + hash_bytes = hex_str_to_bytes(hash) + return session.exec(select(ERC721TokenTransfers).where(ERC721TokenTransfers.transaction_hash == hash_bytes)).all() + + +def _get_erc1155_token_transfers_by_hash(session: Session, hash: str) -> List[ERC1155TokenTransfers]: + """Get ERC1155 token transfer by transaction hash""" + hash_bytes = hex_str_to_bytes(hash) + return session.exec(select(ERC1155TokenTransfers).where(ERC1155TokenTransfers.transaction_hash == hash_bytes)).all() + + +def _get_nft_transfers_by_hash( + session: Session, hash: str, token_type: Literal["erc721", "erc1155", "all"] = "all" +) -> List[NftTransfers]: + """Get NFT transfer by transaction hash from unified NFT table""" + hash_bytes = hex_str_to_bytes(hash) + statement = select(NftTransfers).where(NftTransfers.transaction_hash == hash_bytes) + + if token_type == "erc721": + statement = statement.where(NftTransfers.value == None) + elif token_type == "erc1155": + statement = statement.where(NftTransfers.value != None) + + return session.exec(statement).all() + + +def get_token_transfers_by_hash( + session: Session, + hash: str, + token_type: Literal["ERC20", "ERC721", "ERC1155", "ALL"] = "ALL", + use_unified_table: bool = False, +) -> List[TokenTransferAbbr]: + """ + Get token transfer by transaction hash + + Args: + session: SQLModel session + hash: Transaction hash in hex string format + token_type: Type of token transfer to query + use_unified_table: Whether to use unified NFT table for NFT transfers + """ + # For NFT transfers, use unified table if specified + transfers = [] + if token_type in ["ERC20", "ERC721", "ALL"] and use_unified_table: + transfers.extend(_get_nft_transfers_by_hash(session, hash, token_type)) + if token_type == "ALL": + transfers.extend(_get_erc20_token_transfers_by_hash(session, hash)) + else: + if token_type == "ERC20": + transfers.extend(_get_erc20_token_transfers_by_hash(session, hash)) + elif token_type == "ERC721": + transfers.extend(_get_erc721_token_transfers_by_hash(session, hash)) + elif token_type == "ERC1155": + transfers.extend(_get_erc1155_token_transfers_by_hash(session, hash)) + else: + transfers.extend(_get_erc20_token_transfers_by_hash(session, hash)) + transfers.extend(_get_erc721_token_transfers_by_hash(session, hash)) + transfers.extend(_get_erc1155_token_transfers_by_hash(session, hash)) + + return [TokenTransferAbbr.from_db_model(transfer) for transfer in transfers if transfer] + + +def _get_erc20_transfers_by_address_native( + session: Session, + address: Union[str, bytes], + direction: Optional[Literal["from", "to", "both"]] = "both", + token_address: Optional[Union[str, bytes]] = None, + limit: Optional[int] = None, + offset: Optional[int] = None, +) -> List[ERC20TokenTransfers]: + """Get ERC20 transfers by address using native table""" + if isinstance(address, str): + address = hex_str_to_bytes(address) + if isinstance(token_address, str): + token_address = hex_str_to_bytes(token_address) + + statement = select(ERC20TokenTransfers) + + if direction == "from": + statement = statement.where(ERC20TokenTransfers.from_address == address) + elif direction == "to": + statement = statement.where(ERC20TokenTransfers.to_address == address) + else: # both + statement = statement.where( + or_(ERC20TokenTransfers.from_address == address, ERC20TokenTransfers.to_address == address) + ) + + if token_address: + statement = statement.where(ERC20TokenTransfers.token_address == token_address) + + statement = statement.order_by( + desc(ERC20TokenTransfers.block_timestamp), + desc(ERC20TokenTransfers.block_number), + desc(ERC20TokenTransfers.log_index), + ) + + if limit: + statement = statement.limit(limit) + if offset: + statement = statement.offset(offset) + + return session.exec(statement).all() + + +def _get_erc721_transfers_by_address_native( + session: Session, + address: Union[str, bytes], + direction: Optional[Literal["from", "to", "both"]] = "both", + token_address: Optional[Union[str, bytes]] = None, + limit: Optional[int] = None, + offset: Optional[int] = None, +) -> List[ERC721TokenTransfers]: + """Get ERC721 transfers by address using native table""" + if isinstance(address, str): + address = hex_str_to_bytes(address) + if isinstance(token_address, str): + token_address = hex_str_to_bytes(token_address) + + statement = select(ERC721TokenTransfers) + + if direction == "from": + statement = statement.where(ERC721TokenTransfers.from_address == address) + elif direction == "to": + statement = statement.where(ERC721TokenTransfers.to_address == address) + else: # both + statement = statement.where( + or_(ERC721TokenTransfers.from_address == address, ERC721TokenTransfers.to_address == address) + ) + + if token_address: + statement = statement.where(ERC721TokenTransfers.token_address == token_address) + + statement = statement.order_by(ERC721TokenTransfers.block_timestamp.desc()) + + if limit: + statement = statement.limit(limit) + if offset: + statement = statement.offset(offset) + + return session.exec(statement).all() + + +def _get_erc1155_transfers_by_address_native( + session: Session, + address: Union[str, bytes], + direction: Optional[Literal["from", "to", "both"]] = "both", + token_address: Optional[Union[str, bytes]] = None, + limit: Optional[int] = None, + offset: Optional[int] = None, +) -> List[ERC1155TokenTransfers]: + """Get ERC1155 transfers by address using native table""" + if isinstance(address, str): + address = hex_str_to_bytes(address) + if isinstance(token_address, str): + token_address = hex_str_to_bytes(token_address) + + statement = select(ERC1155TokenTransfers) + + if direction == "from": + statement = statement.where(ERC1155TokenTransfers.from_address == address) + elif direction == "to": + statement = statement.where(ERC1155TokenTransfers.to_address == address) + else: # both + statement = statement.where( + or_(ERC1155TokenTransfers.from_address == address, ERC1155TokenTransfers.to_address == address) + ) + + if token_address: + statement = statement.where(ERC1155TokenTransfers.token_address == token_address) + + statement = statement.order_by(ERC1155TokenTransfers.block_timestamp.desc()) + + if limit: + statement = statement.limit(limit) + if offset: + statement = statement.offset(offset) + + return session.exec(statement).all() + + +def get_nft_transfers_by_address_native( + session: Session, + address: Union[str, bytes], + direction: Optional[Literal["from", "to", "both"]] = "both", + token_type: Literal["erc721", "erc1155", "all"] = "all", + token_address: Optional[Union[str, bytes]] = None, + limit: Optional[int] = None, + offset: Optional[int] = None, +) -> List[NftTransfers]: + """Get NFT transfers by address from unified NFT table""" + if isinstance(address, str): + address = hex_str_to_bytes(address) + if isinstance(token_address, str): + token_address = hex_str_to_bytes(token_address) + + statement = select(NftTransfers) + + if direction == "from": + statement = statement.where(NftTransfers.from_address == address) + elif direction == "to": + statement = statement.where(NftTransfers.to_address == address) + else: # both + statement = statement.where(or_(NftTransfers.from_address == address, NftTransfers.to_address == address)) + + if token_type == "erc721": + statement = statement.where(NftTransfers.value == None) + elif token_type == "erc1155": + statement = statement.where(NftTransfers.value != None) + + if token_address: + statement = statement.where(NftTransfers.token_address == token_address) + + statement = statement.order_by(NftTransfers.block_timestamp.desc()) + + if limit: + statement = statement.limit(limit) + if offset: + statement = statement.offset(offset) + + return session.exec(statement).all() + + +def _get_erc20_transfers_by_address_index( + session: Session, + address: Union[str, bytes], + token_address: Optional[Union[str, bytes]] = None, + limit: Optional[int] = None, + offset: Optional[int] = None, +) -> List[AddressTokenTransfers]: + """Get ERC20 transfers by address using address index table""" + if isinstance(address, str): + address = hex_str_to_bytes(address) + if isinstance(token_address, str): + token_address = hex_str_to_bytes(token_address) + + statement = select(AddressTokenTransfers).where( + AddressTokenTransfers.address == address, + ) + + if token_address: + statement = statement.where(AddressTokenTransfers.token_address == token_address) + + statement = statement.order_by(AddressTokenTransfers.block_timestamp.desc()) + + if limit: + statement = statement.limit(limit) + if offset: + statement = statement.offset(offset) + + return session.exec(statement).all() + + +def _get_erc721_transfers_by_address_index( + session: Session, + address: Union[str, bytes], + token_address: Optional[Union[str, bytes]] = None, + limit: Optional[int] = None, + offset: Optional[int] = None, +) -> List[AddressNftTransfers]: + """Get ERC721 transfers by address using address index table""" + if isinstance(address, str): + address = hex_str_to_bytes(address) + if isinstance(token_address, str): + token_address = hex_str_to_bytes(token_address) + + statement = select(AddressNftTransfers).where( + AddressNftTransfers.address == address and AddressNftTransfers.value == None + ) + + if token_address: + statement = statement.where(AddressNftTransfers.token_address == token_address) + + statement = statement.order_by( + desc(AddressNftTransfers.block_timestamp), + desc(AddressNftTransfers.block_number), + desc(AddressNftTransfers.log_index), + ) + + if limit: + statement = statement.limit(limit) + if offset: + statement = statement.offset(offset) + + return session.exec(statement).all() + + +def _get_erc1155_transfers_by_address_index( + session: Session, + address: Union[str, bytes], + token_address: Optional[Union[str, bytes]] = None, + limit: Optional[int] = None, + offset: Optional[int] = None, +) -> List[AddressNftTransfers]: + """Get ERC1155 transfers by address using address index table""" + if isinstance(address, str): + address = hex_str_to_bytes(address) + if isinstance(token_address, str): + token_address = hex_str_to_bytes(token_address) + + statement = select(AddressNftTransfers).where( + AddressNftTransfers.address == address and AddressNftTransfers.value != None + ) + + if token_address: + statement = statement.where(AddressNftTransfers.token_address == token_address) + + statement = statement.order_by(AddressNftTransfers.block_timestamp.desc()) + + if limit: + statement = statement.limit(limit) + if offset: + statement = statement.offset(offset) + + return session.exec(statement).all() + + +def get_token_transfers_by_address( + session: Session, + address: Union[str, bytes], + token_type: Literal["ERC20", "ERC721", "ERC1155", "ALL"] = "ALL", + direction: Optional[Literal["from", "to", "both"]] = "both", + token_address: Optional[Union[str, bytes]] = None, + use_address_index: bool = False, + limit: Optional[int] = None, + offset: Optional[int] = None, +) -> List[TokenTransferAbbr]: + """ + Get token transfers by address with flexible options for querying + + Args: + session: SQLModel session + address: Address to query + token_type: Type of token transfers to query + direction: Filter direction (only used for native table queries) + token_address: Optional token address to filter by + use_address_index: Whether to use address index tables + limit: Max number of records to return + offset: Number of records to skip + """ + transfers = [] + if use_address_index: + # Use address index tables + if token_type == "ERC20": + transfers.extend(_get_erc20_transfers_by_address_index(session, address, token_address, limit, offset)) + elif token_type == "ERC721": + transfers.extend(_get_erc721_transfers_by_address_index(session, address, token_address, limit, offset)) + elif token_type == "ERC1155": + transfers.extend(_get_erc1155_transfers_by_address_index(session, address, token_address, limit, offset)) + else: # all + transfers = [] + transfers.extend(_get_erc20_transfers_by_address_index(session, address, token_address, limit, offset)) + transfers.extend(_get_erc721_transfers_by_address_index(session, address, token_address, limit, offset)) + transfers.extend(_get_erc1155_transfers_by_address_index(session, address, token_address, limit, offset)) + transfers.extend(transfers) + else: + # Use native tables + if token_type == "ERC20": + transfers.extend( + _get_erc20_transfers_by_address_native(session, address, direction, token_address, limit, offset) + ) + elif token_type == "ERC721": + transfers.extend( + _get_erc721_transfers_by_address_native(session, address, direction, token_address, limit, offset) + ) + elif token_type == "ERC1155": + transfers.extend( + _get_erc1155_transfers_by_address_native(session, address, direction, token_address, limit, offset) + ) + else: # all + transfers.extend( + _get_erc20_transfers_by_address_native(session, address, direction, token_address, limit, offset) + ) + transfers.extend( + _get_erc721_transfers_by_address_native(session, address, direction, token_address, limit, offset) + ) + transfers.extend( + _get_erc1155_transfers_by_address_native(session, address, direction, token_address, limit, offset) + ) + return [TokenTransferAbbr.from_db_model(transfer) for transfer in transfers if transfer] + + +def _get_erc20_token_transfers_by_condition( + session: Session, + filter_condition: Optional[Any] = None, + limit: Optional[int] = 25, + offset: Optional[int] = 0, +) -> List[ERC20TokenTransfers]: + """Get ERC20 token transfers by condition""" + statement = select(ERC20TokenTransfers) + if filter_condition: + statement = statement.where(filter_condition) + statement = statement.order_by(desc(ERC20TokenTransfers.block_number), desc(ERC20TokenTransfers.log_index)) + if limit: + statement = statement.limit(limit) + if offset: + statement = statement.offset(offset) + return session.exec(statement).all() + + +def _get_erc721_token_transfers_by_condition( + session: Session, + filter_condition: Optional[Any] = None, + limit: Optional[int] = 25, + offset: Optional[int] = 0, +) -> List[ERC721TokenTransfers]: + """Get ERC721 token transfers by condition""" + statement = select(ERC721TokenTransfers) + if filter_condition: + statement = statement.where(filter_condition) + statement = statement.order_by(desc(ERC721TokenTransfers.block_number), desc(ERC721TokenTransfers.log_index)) + if limit: + statement = statement.limit(limit) + if offset: + statement = statement.offset(offset) + return session.exec(statement).all() + + +def _get_erc1155_token_transfers_by_condition( + session: Session, + filter_condition: Optional[Any] = None, + limit: Optional[int] = 25, + offset: Optional[int] = 0, +) -> List[ERC1155TokenTransfers]: + """Get ERC1155 token transfers by condition""" + statement = select(ERC1155TokenTransfers) + if filter_condition: + statement = statement.where(filter_condition) + statement = statement.order_by(desc(ERC1155TokenTransfers.block_number), desc(ERC1155TokenTransfers.log_index)) + if limit: + statement = statement.limit(limit) + if offset: + statement = statement.offset(offset) + return session.exec(statement).all() + + +def _get_erc20_token_transfers_count( + session: Session, + filter_condition: Optional[Any] = None, +) -> int: + """Get the total count of ERC1155 token transfers by condition""" + statement = select(func.count()).select_from(ERC20TokenTransfers) + if filter_condition: + statement = statement.where(filter_condition) + count = session.exec(statement).one() + return count + + +def _get_erc721_token_transfers_count( + session: Session, + filter_condition: Optional[Any] = None, +) -> int: + """Get the total count of ERC1155 token transfers by condition""" + statement = select(func.count()).select_from(ERC721TokenTransfers) + if filter_condition: + statement = statement.where(filter_condition) + count = session.exec(statement).one() + return count + + +def _get_erc1155_token_transfers_count( + session: Session, + filter_condition: Optional[Any] = None, +) -> int: + """Get the total count of ERC1155 token transfers by condition""" + statement = select(func.count()).select_from(ERC1155TokenTransfers) + if filter_condition: + statement = statement.where(filter_condition) + count = session.exec(statement).one() + return count + + +def get_token_transfers_by_token_address( + session: Session, + token_address: Union[str, bytes], + token_type: Literal["ERC20", "ERC721", "ERC1155"] = "ERC20", + limit: int = 25, + offset: int = 0, +) -> (int, List[TokenTransferAbbr]): + """ + Get token transfers by token address + + Args: + session: SQLModel session + token_addresses: List of token addresses + token_type: Type of token transfers to query + limit: Max number of records to return + offset: Number of records to skip + """ + if isinstance(token_address, str): + token_address = hex_str_to_bytes(token_address) + transfers = [] + if token_type == "ERC20": + filter_condition = ERC20TokenTransfers.token_address == token_address + elif token_type == "ERC721": + filter_condition = ERC20TokenTransfers.token_address == token_address + elif token_type == "ERC1155": + filter_condition = ERC20TokenTransfers.token_address == token_address + else: + filter_condition = None + return get_token_transfers(session, filter_condition, token_type, limit, offset) + + +def get_token_transfers( + session: Session, + filter_condition: Optional[Any] = None, + token_type: Literal["ERC20", "ERC721", "ERC1155"] = "ERC20", + limit: int = 25, + offset: int = 0, +) -> (int, List[TokenTransferAbbr]): + """ + Get token transfers with pagination + + Args: + session: SQLModel session + filter_condition: Optional filter condition + limit: Number of records to return + offset: Number of records to skip + """ + transfers = [] + total_count = 0 + + if token_type == "ERC20": + transfers = _get_erc20_token_transfers_by_condition(session, filter_condition, limit=limit, offset=offset) + total_count = _get_erc20_token_transfers_count(session, filter_condition) + elif token_type == "ERC721": + transfers = _get_erc721_token_transfers_by_condition(session, filter_condition, limit=limit, offset=offset) + total_count = _get_erc721_token_transfers_count(session, filter_condition) + elif token_type == "ERC1155": + transfers = _get_erc1155_token_transfers_by_condition(session, filter_condition, limit=limit, offset=offset) + total_count = _get_erc1155_token_transfers_count(session, filter_condition) + + return total_count, [TokenTransferAbbr.from_db_model(transfer) for transfer in transfers if transfer] diff --git a/hemera/app/api/routes/helper/transaction.py b/hemera/app/api/routes/helper/transaction.py new file mode 100644 index 000000000..d481be4cf --- /dev/null +++ b/hemera/app/api/routes/helper/transaction.py @@ -0,0 +1,1029 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2024/12/22 +# @Author ideal93 +# @File transaction_utils.py +# @Brief + +from datetime import datetime, timedelta +from decimal import Decimal +from typing import Any, Dict, List, Optional, Tuple, Union + +from pydantic import BaseModel, Field +from sqlalchemy import and_, desc, literal, select, true +from sqlmodel import Session, and_, desc, func, or_, select +from typing_extensions import Literal, Tuple + +from hemera.app.api.routes.enricher.address_enricher import Address +from hemera.app.api.routes.helper import ColumnType, process_columns +from hemera.app.api.routes.helper.address import get_txn_cnt_by_address +from hemera.common.enumeration.txn_type import AddressTransactionType +from hemera.common.models.address.address_transactions import AddressTransactions +from hemera.common.models.base.transactions import Transactions +from hemera.common.models.stats.daily_boards_stats import DailyBoardsStats +from hemera.common.models.stats.daily_transactions_stats import DailyTransactionsStats +from hemera.common.models.utils.scheduled_metadata import ScheduledMetadata +from hemera.common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes + +# Constants +MAX_ADDRESS_TXN_COUNT = 100000 + + +class TransactionDetail(BaseModel): + # Basic Transaction Info + hash: str + nonce: int + transaction_index: int + transaction_type: int + + # Address Info + from_address: str + to_address: str + from_addr: Address + to_addr: Address + + # Value and Contract Info + value: str + display_value: str + + # Gas Related Fields + gas: int + gas_price: str + max_fee_per_gas: Optional[str] + max_priority_fee_per_gas: Optional[str] + gas_price_gwei: str + gas_fee_token_price: Optional[str] + + # Input and Function Info + input: str + input_data: List[dict] + method: Optional[str] + method_id: Optional[str] + function_name: Optional[str] + function_unsigned: Optional[str] + full_function_name: Optional[str] + + # Receipt Info + receipt_cumulative_gas_used: int + receipt_gas_used: int + receipt_contract_address: Optional[str] + receipt_root: Optional[str] + receipt_status: int + receipt_effective_gas_price: int + + # L1 Specific Fields + receipt_l1_fee: Optional[str] + receipt_l1_gas_used: Optional[str] + receipt_l1_gas_price: Optional[str] + receipt_l1_fee_scalar: Optional[str] + + # Block Info + block_timestamp: datetime + block_number: int + block_hash: str + + # Transaction Fee Info + transaction_fee: str + transaction_fee_usd: str + total_transaction_fee: str + total_transaction_fee_usd: str + value_usd: str + + +class TransactionAbbr(BaseModel): + """Standardized response model for transactions""" + + hash: Optional[str] = None + transaction_index: Optional[int] = None + + from_address: Optional[str] = None + to_address: Optional[str] = None + value: Optional[Decimal] = 0 + transaction_fee: Optional[Decimal] = None + receipt_status: Optional[int] = None + + block_number: Optional[int] = None + block_hash: Optional[str] = None + block_timestamp: Optional[datetime] = None + method_id: Optional[str] = None + + @staticmethod + def from_db_model(transaction: Union[Transactions, AddressTransactions]) -> "TransactionAbbr": + common_fields = { + "value": transaction.value, + "receipt_status": transaction.receipt_status, + "block_number": transaction.block_number, + "block_timestamp": transaction.block_timestamp, + "transaction_index": transaction.transaction_index, + "block_hash": bytes_to_hex_str(transaction.block_hash) if transaction.block_hash else None, + } + + if isinstance(transaction, Transactions): + common_fields["hash"] = bytes_to_hex_str(transaction.hash) + common_fields["from_address"] = bytes_to_hex_str(transaction.from_address) + common_fields["to_address"] = bytes_to_hex_str(transaction.to_address) + receipt_gas_used = transaction.receipt_gas_used or Decimal(0) + gas_price = transaction.gas_price or Decimal(0) + common_fields["transaction_fee"] = receipt_gas_used * gas_price + common_fields["method_id"] = transaction.method_id + + else: # AddressTransactions + common_fields["hash"] = bytes_to_hex_str(transaction.transaction_hash) + common_fields["from_address"] = ( + bytes_to_hex_str(transaction.address) + if transaction.txn_type in [AddressTransactionType.SENDER.value, AddressTransactionType.CREATOR.value] + else bytes_to_hex_str(transaction.related_address) + ) + common_fields["to_address"] = ( + bytes_to_hex_str(transaction.related_address) + if transaction.txn_type in [AddressTransactionType.SENDER.value, AddressTransactionType.CREATOR.value] + else bytes_to_hex_str(transaction.address) + ) + common_fields["transaction_fee"] = transaction.transaction_fee + common_fields["method_id"] = transaction.method + + return TransactionAbbr(**common_fields) + + +def _process_columns(columns: ColumnType): + return process_columns(Transactions, columns) + + +def _process_address_columns(columns: ColumnType): + return process_columns(AddressTransactions, columns) + + +class GasStats(BaseModel): + gas_price_avg: float = Field(None, description="Average gas price") + gas_price_max: float = Field(None, description="Maximum gas price") + gas_price_min: float = Field(None, description="Minimum gas price") + gas_fee_avg: float = Field(None, description="Average gas fee") + gas_fee_max: float = Field(None, description="Maximum gas fee") + gas_fee_min: float = Field(None, description="Minimum gas fee") + + +def get_gas_stats(session: Session, duration: timedelta) -> GasStats: + """ + Calculate the average, maximum, and minimum values of gas_price and gas_fee (i.e. gas_price * receipt_cumulative_gas_used) + within the specified duration (up to 1 hour) based on the latest block_timestamp in the database. + + Args: + session (Session): SQLModel session object. + duration (timedelta): Time duration for which to calculate the metrics, must not exceed 1 hour. + + Returns: + GasStats: A Pydantic model instance containing the aggregated values for gas_price and gas_fee. + + Raises: + ValueError: If the provided duration exceeds 1 hour. + """ + if duration > timedelta(hours=1): + raise ValueError("duration must not exceed 1 hour") + + # Retrieve the latest block_timestamp from the Transactions table + latest_time_stmt = select(func.max(Transactions.block_timestamp)) + latest_time = session.exec(latest_time_stmt).one() + if latest_time is None: + # Return a GasStats instance with None for all values if there is no data in the database + return GasStats() + + start_time = latest_time - duration + + # Construct the SQL statement to calculate aggregated metrics for gas_price and gas_fee + stmt = select( + func.avg(Transactions.gas_price), + func.max(Transactions.gas_price), + func.min(Transactions.gas_price), + func.avg(Transactions.gas_price * Transactions.receipt_cumulative_gas_used), + func.max(Transactions.gas_price * Transactions.receipt_cumulative_gas_used), + func.min(Transactions.gas_price * Transactions.receipt_cumulative_gas_used), + ).where(Transactions.block_timestamp >= start_time) + + result = session.exec(stmt).one() + + return GasStats( + gas_price_avg=result[0], + gas_price_max=result[1], + gas_price_min=result[2], + gas_fee_avg=result[3], + gas_fee_max=result[4], + gas_fee_min=result[5], + ) + + +def get_latest_txn_count(session: Session, duration: timedelta) -> int: + """ + Calculate the number of transactions within the specified duration (up to 1 hour) + based on the latest block_timestamp in the database. + + Args: + session (Session): SQLModel session. + duration (timedelta): Time duration for which to calculate the transaction count. + Must not exceed 1 hour. + + Returns: + int: Number of transactions within the specified time duration. + + Raises: + ValueError: If the provided duration exceeds 1 hour. + """ + if duration > timedelta(hours=1): + raise ValueError("duration must not exceed 1 hour") + + # Retrieve the latest block_timestamp from the Transactions table + latest_time_stmt = select(func.max(Transactions.block_timestamp)) + latest_time = session.exec(latest_time_stmt).one() + if latest_time is None: + return 0 + + start_time = latest_time - duration + + # Count transactions with block_timestamp greater than or equal to start_time + count_stmt = select(func.count()).where(Transactions.block_timestamp >= start_time) + count = session.exec(count_stmt).one() + return count + + +def get_total_txn_count(session: Session) -> int: + """ + Get the total transaction count, estimating based on the last known block date and recent transactions. + + Args: + session: SQLModel session + + Returns: + Total estimated transaction count + """ + # Get the latest block date and cumulative count + latest_record = session.exec( + select(DailyTransactionsStats.block_date, DailyTransactionsStats.total_cnt).order_by( + DailyTransactionsStats.block_date.desc() + ) + ).first() + + # Check if the query returned a result + if latest_record is None: + return session.exec(select(func.count()).select_from(Transactions)).first() + + block_date, cumulate_count = latest_record + + current_time = datetime.utcnow() + ten_minutes_ago = current_time - timedelta(minutes=10) + + latest_10_min_txn_cnt = session.exec( + select(func.count()).select_from(Transactions).where(Transactions.block_timestamp >= ten_minutes_ago) + ).first() + + # Convert block_date (datetime.date) to datetime.datetime (assuming midnight as the time) + block_datetime = datetime.combine(block_date, datetime.min.time()) + + avg_txn_per_minute = latest_10_min_txn_cnt / 10 + minutes_since_last_block = int((current_time - block_datetime).total_seconds() / 60) + + estimated_txn = int(avg_txn_per_minute * minutes_since_last_block) + + return estimated_txn + cumulate_count + + +def get_last_transaction(session: Session, columns: ColumnType = "*") -> Optional[Transactions]: + """Get the latest transaction + + Args: + session: SQLModel session + columns: Can be "*" for all columns, single column name, or list of column names + Examples: + - "*": select all columns + - "block_timestamp": select only timestamp + - "block_number,hash": select block number and hash columns + - ["block_number", "hash"]: select block number and hash columns + + Returns: + Optional[Transactions]: Latest transaction or None + When specific columns are selected, other attributes will raise AttributeError when accessed + """ + statement = _process_columns(columns) + statement = statement.order_by(desc(Transactions.block_number), desc(Transactions.transaction_index)) + return session.exec(statement).first() + + +def get_tps_latest_10min(session: Session, timestamp: datetime) -> float: + """Calculate transactions per second for the last 10 minutes + + Args: + session: SQLModel session + timestamp: Reference timestamp + + Returns: + float: Average TPS over the last 10 minutes + """ + statement = select(func.count()).where(Transactions.block_timestamp >= (timestamp - timedelta(minutes=10))) + count = session.exec(statement).one() + return float(count / 600) + + +def get_address_transaction_count(session: Session, address: str, use_index: bool = True) -> int: + """Get total transaction count for an address + + Args: + session: SQLModel session + address: Address in hex string format + use_index: Whether to use address index table (default: True) + + Returns: + int: Total number of transactions for the address + + Raises: + ValueError: If address format is invalid + """ + statement = select(func.max(ScheduledMetadata.last_data_timestamp)) + last_timestamp = session.exec(statement).one() + bytes_address = hex_str_to_bytes(address) + + past_txn_count = get_txn_cnt_by_address(session, address) or 0 + + if past_txn_count > MAX_ADDRESS_TXN_COUNT: + return past_txn_count + + if use_index: + statement = select(func.count()).where( + and_( + AddressTransactions.block_timestamp >= last_timestamp if last_timestamp is not None else True, + AddressTransactions.address == bytes_address, + ) + ) + else: + statement = select(func.count()).where( + and_( + Transactions.block_timestamp >= last_timestamp if last_timestamp is not None else True, + or_(Transactions.from_address == bytes_address, Transactions.to_address == bytes_address), + ) + ) + + recently_txn_count = session.exec(statement).one() + return past_txn_count + recently_txn_count + + +def get_total_transaction_count(session: Session) -> int: + """Get estimated total transaction count + + Args: + session: SQLModel session + + Returns: + int: Estimated total number of transactions + """ + statement = ( + select(DailyTransactionsStats.block_date, DailyTransactionsStats.total_cnt) + .order_by(desc(DailyTransactionsStats.block_date)) + .limit(1) + ) + + latest_record = session.exec(statement).first() + + if latest_record is None: + statement = select(func.count()).select_from(Transactions) + return session.exec(statement).one() + + block_date, cumulate_count = latest_record + current_time = datetime.utcnow() + ten_minutes_ago = current_time - timedelta(minutes=10) + + statement = select(func.count()).where(Transactions.block_timestamp >= ten_minutes_ago) + latest_10_min_txn_cnt = session.exec(statement).one() + + avg_txn_per_minute = latest_10_min_txn_cnt / 10 + block_date_as_datetime = datetime.combine(block_date, datetime.min.time()) + minutes_since_last_block = int((current_time - block_date_as_datetime).total_seconds() / 60) + estimated_txn = int(avg_txn_per_minute * minutes_since_last_block) + + return estimated_txn + cumulate_count + + +def get_transactions_by_condition( + session: Session, + filter_condition: Optional[Any] = None, + columns: ColumnType = "*", + limit: Optional[int] = None, + offset: Optional[int] = None, +) -> List[Transactions]: + """Get transactions by condition with pagination support + + Args: + session: SQLModel session + filter_condition: SQL filter condition, defaults to None (no filter) + columns: Can be "*" for all columns, single column name, or list of column names + Examples: + - "*": select all columns + - "block_number": select only block number + - "block_number,hash": select block number and hash columns + - ["block_number", "hash"]: select block number and hash columns + limit: Max number of transactions to return + offset: Number of transactions to skip + + Returns: + List[Transactions]: List of matching transactions + When specific columns are selected, other attributes will raise AttributeError when accessed + """ + statement = _process_columns(columns) + + if filter_condition is not None: + statement = statement.where(filter_condition) + + statement = statement.order_by(desc(Transactions.block_number), desc(Transactions.transaction_index)) + + if limit is not None: + statement = statement.limit(limit) + if offset is not None: + statement = statement.offset(offset) + + return session.exec(statement).all() + + +def get_transactions_count_by_condition(session: Session, filter_condition: Optional[Any] = None) -> int: + """Get count of transactions matching the given condition + + Args: + session: SQLModel session + filter_condition: SQL filter condition, defaults to None (no filter) + + Returns: + int: Number of matching transactions + """ + statement = select(func.count()).select_from(Transactions) + + if filter_condition is not None: + statement = statement.where(filter_condition) + + return session.exec(statement).one() + + +def get_transactions_and_total_count_by_condition( + session: Session, + filter_condition: Optional[Any] = None, + columns: ColumnType = "*", + limit: Optional[int] = None, + offset: Optional[int] = None, +) -> (List[TransactionAbbr], int): + """Get transactions and total count by condition + + Args: + session: SQLModel session + filter_condition: SQL filter condition, defaults to None (no filter) + columns: Columns to select + limit: Max number of transactions to return + offset: Number of transactions to skip + + Returns: + (List[TransactionAbbr], int): List of standardized transaction responses and total count + """ + + transactions = get_transactions_by_condition( + session=session, + filter_condition=filter_condition, + columns=columns, + limit=limit, + offset=offset, + ) + total_count = get_transactions_count_by_condition(session=session, filter_condition=filter_condition) + + return [TransactionAbbr.from_db_model(tx) for tx in transactions], total_count + + +def get_transactions_by_address( + session: Session, + address: Union[str, bytes], + direction: Optional[Literal["from", "to", "both"]] = "both", + limit: Optional[int] = None, + offset: Optional[int] = None, + use_address_index: bool = False, +) -> List[TransactionAbbr]: + """Get transactions by address with option to use address index + + Args: + session: SQLModel session + address: Address in hex string format + direction: Filter direction - "from", "to", or "both" (default) + columns: Columns to select + limit: Max number of transactions to return + offset: Number of transactions to skip + use_address_index: Whether to use address index table (default: False) + + Returns: + List[TransactionAbbr]: List of standardized transaction responses + """ + if use_address_index: + raw_transactions = _get_transactions_by_address_using_address_index( + session=session, + address=address, + direction=direction, + columns="*", + limit=limit, + offset=offset, + ) + else: + raw_transactions = _get_transactions_by_address_native( + session=session, + address=address, + direction=direction, + columns="*", + limit=limit, + offset=offset, + ) + + return [TransactionAbbr.from_db_model(tx) for tx in raw_transactions] + + +def _get_transactions_by_address_using_address_index( + session: Session, + address: Union[str, bytes], + direction: Optional[Literal["from", "to", "both"]] = "both", + columns: ColumnType = "*", + limit: Optional[int] = None, + offset: Optional[int] = None, +) -> List[AddressTransactions]: + """Get transactions by address using address index table + + Args: + session: SQLModel session + address: Address in hex string format + direction: Filter direction - "from", "to", or "both" (default) + columns: Columns to select + limit: Max number of transactions to return + offset: Number of transactions to skip + + Returns: + List[AddressTransactions]: List of matching transactions + """ + if isinstance(address, str): + address = hex_str_to_bytes(address) + + statement = _process_address_columns(columns).where(AddressTransactions.address == address) + + if direction == "from": + statement = statement.where(AddressTransactions.is_sender == True) + elif direction == "to": + statement = statement.where(AddressTransactions.is_sender == False) + # For "both", no additional filter needed + + statement = statement.order_by(desc(AddressTransactions.block_number), desc(AddressTransactions.transaction_index)) + + if limit is not None: + statement = statement.limit(limit) + if offset is not None: + statement = statement.offset(offset) + + return session.exec(statement).all() + + +def _get_transactions_by_address_native( + session: Session, + address: Union[str, bytes], + direction: Optional[Literal["from", "to", "both"]] = "both", + columns: ColumnType = "*", + limit: Optional[int] = None, + offset: Optional[int] = None, +) -> List[Transactions]: + """Get transactions by address using native transactions table + + Args: + session: SQLModel session + address: Address in hex string format + direction: Filter direction - "from", "to", or "both" (default) + columns: Columns to select + limit: Max number of transactions to return + offset: Number of transactions to skip + + Returns: + List[Transactions]: List of matching transactions + """ + if isinstance(address, str): + address = hex_str_to_bytes(address) + + statement = _process_columns(columns) + + if direction == "from": + statement = statement.where(Transactions.from_address == address) + elif direction == "to": + statement = statement.where(Transactions.to_address == address) + else: # both + statement = statement.where( + or_( + Transactions.from_address == address, + Transactions.to_address == address, + ) + ) + + statement = statement.order_by(desc(Transactions.block_number), desc(Transactions.transaction_index)) + + if limit is not None: + statement = statement.limit(limit) + if offset is not None: + statement = statement.offset(offset) + + return session.exec(statement).all() + + +def get_transactions_count_by_address( + session: Session, + address: Union[str, bytes], + direction: Optional[Literal["from", "to", "both"]] = "both", + use_address_index: bool = False, +) -> int: + """Get count of transactions by address + + Args: + session: SQLModel session + address: Address in hex string format + direction: Filter direction - "from", "to", or "both" (default) + use_address_index: Whether to use address index table (default: False) + + Returns: + int: Count of transactions + """ + if use_address_index: + return _get_transactions_count_by_address_using_address_index( + session=session, + address=address, + direction=direction, + ) + else: + return _get_transactions_count_by_address_native( + session=session, + address=address, + direction=direction, + ) + + +def _get_transactions_count_by_address_using_address_index( + session: Session, + address: Union[str, bytes], + direction: Optional[Literal["from", "to", "both"]] = "both", +) -> int: + """Get count of transactions by address using address index + + Args: + session: SQLModel session + address: Address in hex string format + direction: Filter direction - "from", "to", or "both" (default) + + Returns: + int: Count of transactions + """ + if isinstance(address, str): + address = hex_str_to_bytes(address) + + statement = select(func.count()).select_from(AddressTransactions).where(AddressTransactions.address == address) + + if direction == "from": + statement = statement.where(AddressTransactions.is_sender == True) + elif direction == "to": + statement = statement.where(AddressTransactions.is_sender == False) + + return session.exec(statement).first() + + +def _get_transactions_count_by_address_native( + session: Session, + address: Union[str, bytes], + direction: Optional[Literal["from", "to", "both"]] = "both", +) -> int: + """Get count of transactions by address using native table + + Args: + session: SQLModel session + address: Address in hex string format + direction: Filter direction - "from", "to", or "both" (default) + + Returns: + int: Count of transactions + """ + if isinstance(address, str): + address = hex_str_to_bytes(address) + + statement = select(func.count()).select_from(Transactions) + + if direction == "from": + statement = statement.where(Transactions.from_address == address) + elif direction == "to": + statement = statement.where(Transactions.to_address == address) + else: # both + statement = statement.where( + or_( + Transactions.from_address == address, + Transactions.to_address == address, + ) + ) + + return session.exec(statement).first() + + +def get_transaction_by_hash(session: Session, hash: str) -> Optional[TransactionDetail]: + """Get transaction by transaction hash + + Args: + session: SQLModel session + hash: Transaction hash in hex string format + + Returns: + Optional[TransactionDetail]: Matching transaction or None + """ + transaction = _get_transaction_by_hash(session, hash) + if transaction is None: + return None + + +def _get_transaction_by_hash( + session: Session, hash: Union[str, bytes], columns: ColumnType = "*" +) -> Optional[Transactions]: + """Get transaction by transaction hash + + Args: + session: SQLModel session + hash: Transaction hash (hex string) or bytes + columns: Can be "*" for all columns, single column name, or list of column names + Examples: + - "*": select all columns + - "block_number": select only block number + - "block_number,hash": select block number and hash columns + - ["block_number", "hash"]: select block number and hash columns + + Returns: + Optional[Transactions]: Matching transaction or None + When specific columns are selected, other attributes will raise AttributeError when accessed + + Raises: + ValueError: If hash format is invalid + """ + if isinstance(hash, str): + hash = hex_str_to_bytes(hash) + statement = _process_columns(columns) + statement = statement.where(Transactions.hash == hash) + return session.exec(statement).first() + + +def get_gas_stats_list( + session: Session, duration: timedelta, interval: timedelta, latest_timestamp: Optional[datetime] = None +) -> List[Tuple[datetime, GasStats]]: + """ + Returns a list of GasStats, each corresponding to a time bucket of aggregated statistics, + for the given duration and interval (e.g., 12:01, 12:02, 12:03). + The duration must not exceed 1 hour. + Note: This example is for PostgreSQL; adjustments may be needed for other databases. + """ + if duration > timedelta(hours=1): + raise ValueError("duration must not exceed 1 hour") + + if latest_timestamp is None: + # Retrieve the latest block_timestamp from the Transactions table + latest_time_stmt = select(func.max(Transactions.block_timestamp)) + latest_time = session.exec(latest_time_stmt).one() + if latest_time is None: + # Return a GasStats instance with None for all values if there is no data in the database + return List[Tuple[datetime, GasStats]]() + else: + latest_time = latest_timestamp + latest_time = latest_time.replace(microsecond=0).replace(second=0) + start_time = latest_time - duration + interval_seconds = interval.total_seconds() + start_epoch = start_time.timestamp() + + bucket_expr = func.to_timestamp( + func.floor((func.extract("epoch", Transactions.block_timestamp) - start_epoch) / interval_seconds) + * interval_seconds + + start_epoch + ).label("bucket") + + stmt = ( + select( + bucket_expr, + func.avg(Transactions.gas_price).label("gas_price_avg"), + func.max(Transactions.gas_price).label("gas_price_max"), + func.min(Transactions.gas_price).label("gas_price_min"), + func.avg(Transactions.gas_price * Transactions.receipt_cumulative_gas_used).label("gas_fee_avg"), + func.max(Transactions.gas_price * Transactions.receipt_cumulative_gas_used).label("gas_fee_max"), + func.min(Transactions.gas_price * Transactions.receipt_cumulative_gas_used).label("gas_fee_min"), + ) + .where(Transactions.block_timestamp >= start_time, Transactions.block_timestamp < latest_time) + .group_by(bucket_expr) + .order_by(bucket_expr) + ) + + results = session.exec(stmt).all() + + stats_list = [ + ( + row.bucket, + GasStats( + gas_price_avg=row.gas_price_avg, + gas_price_max=row.gas_price_max, + gas_price_min=row.gas_price_min, + gas_fee_avg=row.gas_fee_avg, + gas_fee_max=row.gas_fee_max, + gas_fee_min=row.gas_fee_min, + ), + ) + for row in results + ] + return stats_list + + +def get_transaction_count_stats_list( + session: Session, duration: timedelta, interval: timedelta, latest_timestamp: Optional[datetime] = None +) -> List[Tuple[datetime, int]]: + """ + Returns a list of tuples, each containing a bucket start datetime and the average transaction + count per second for that bucket. The buckets are defined over the given duration and interval. + The duration must not exceed 1 hour. + Note: This example is for PostgreSQL; adjustments may be needed for other databases. + """ + if duration > timedelta(hours=1): + raise ValueError("duration must not exceed 1 hour") + + if latest_timestamp is None: + # Retrieve the latest block_timestamp from the Transactions table + latest_time_stmt = select(func.max(Transactions.block_timestamp)) + latest_time = session.exec(latest_time_stmt).one() + if latest_time is None: + # Return an empty list if there is no data in the database + return [] + else: + latest_time = latest_timestamp + latest_time = latest_time.replace(microsecond=0).replace(second=0) + start_time = latest_time - duration + interval_seconds = interval.total_seconds() + start_epoch = start_time.timestamp() + + # Build a bucket expression: + # Convert block_timestamp to epoch seconds, subtract start_epoch, divide by interval_seconds, + # floor the result, multiply back by interval_seconds, add start_epoch, and convert back to timestamp. + bucket_expr = func.to_timestamp( + func.floor((func.extract("epoch", Transactions.block_timestamp) - start_epoch) / interval_seconds) + * interval_seconds + + start_epoch + ).label("bucket") + + # Construct the SQL statement: + # For each bucket, count the transactions and calculate the average per second (count / interval_seconds) + stmt = ( + select(bucket_expr, (func.count(Transactions.hash)).label("tx_count")) + .where(Transactions.block_timestamp >= start_time, Transactions.block_timestamp < latest_time) + .group_by(bucket_expr) + .order_by(bucket_expr) + ) + + results = session.exec(stmt).all() + + # Build a list of tuples (bucket timestamp, average transaction count per second) + stats_list: List[Tuple[datetime, int]] = [(row.bucket, row.tx_count) for row in results] + return stats_list + + +class SmartContractMetric(BaseModel): + contract_address: str = Field(..., description="Smart contract address") + minute_transaction_count: int = Field(..., description="Transaction count") + + +def get_top_contracts_transaction_count_list( + session: Session, duration: timedelta, interval: timedelta, latest_timestamp: Optional[datetime] = None +) -> List[Tuple[datetime, List[SmartContractMetric]]]: + if duration > timedelta(hours=1): + raise ValueError("duration must not exceed 1 hour") + + # Determine the latest time. + if latest_timestamp is None: + latest_time_stmt = select(func.max(AddressTransactions.block_timestamp)) + latest_time = session.exec(latest_time_stmt).one() + if latest_time is None: + return [] + else: + latest_time = latest_timestamp + + # Truncate latest_time to minute precision. + latest_time = latest_time.replace(microsecond=0).replace(second=0) + start_time = latest_time - duration + interval_seconds = interval.total_seconds() + + top_20_addresses_stmt = ( + select(DailyBoardsStats.key) + .where( + DailyBoardsStats.board_id == "top_contract_transactions", + DailyBoardsStats.block_date + == ( + select(func.max(DailyBoardsStats.block_date)) + .where(DailyBoardsStats.board_id == "top_contract_transactions") + .scalar_subquery() + ), + ) + .order_by(desc(DailyBoardsStats.count)) + .limit(20) + ) + top_20_addresses = session.exec(top_20_addresses_stmt).all() + if not top_20_addresses: + return [] + + stmt = ( + select( + func.date_trunc("minute", AddressTransactions.block_timestamp).label("bucket"), + AddressTransactions.address, + (func.count(AddressTransactions.transaction_hash)).label("tx_count_per_minute"), + ) + .where( + AddressTransactions.block_timestamp >= start_time, + AddressTransactions.block_timestamp < latest_time, + AddressTransactions.address.in_([hex_str_to_bytes(addr) for addr in top_20_addresses]), + ) + .group_by("bucket", AddressTransactions.address) + .order_by("bucket") + ) + + results = session.exec(stmt).all() + + all_buckets = [ + start_time + timedelta(seconds=interval_seconds * i) + for i in range(int(duration.total_seconds() / interval_seconds)) + ] + + buckets: Dict[datetime, Dict[str, float]] = { + bucket: {addr: 0.0 for addr in top_20_addresses} for bucket in all_buckets + } + + for row in results: + bucket_minute = row.bucket + address = bytes_to_hex_str(row.address) + tx_count_per_minute = row.tx_count_per_minute + + for bucket in all_buckets: + if bucket <= bucket_minute < bucket + interval: + if address in buckets[bucket]: + buckets[bucket][address] += tx_count_per_minute + break + + stats_list: List[Tuple[datetime, List[SmartContractMetric]]] = [] + for bucket, address_counts in sorted(buckets.items(), key=lambda x: x[0]): + metrics = [ + SmartContractMetric(contract_address=address, minute_transaction_count=tx_count) + for address, tx_count in sorted(address_counts.items()) + ] + stats_list.append((bucket, metrics)) + + return stats_list + + +def get_recent_1_minutes_average_transactions( + session: Session, latest_timestamp: Optional[datetime] = None +) -> List[SmartContractMetric]: + top_20_addresses_subquery = ( + select(DailyBoardsStats.key) + .where( + DailyBoardsStats.board_id == "top_contract_transactions", + DailyBoardsStats.block_date + == ( + select(func.max(DailyBoardsStats.block_date)) + .where(DailyBoardsStats.board_id == "top_contract_transactions") + .scalar_subquery() + ), + ) + .order_by(desc(DailyBoardsStats.count)) + .limit(20) + .subquery() + ) + + top_20_addresses = session.exec(select(top_20_addresses_subquery.c.key)).all() + + if not top_20_addresses: + return [] + + if latest_timestamp is None: + latest_time_stmt = select(func.max(AddressTransactions.block_timestamp)) + latest_timestamp = session.exec(latest_time_stmt).one() + if latest_timestamp is None: + return [] + + latest_timestamp = latest_timestamp.replace(microsecond=0, second=0) + start_time = latest_timestamp - timedelta(minutes=1) + + stmt = ( + select( + func.concat("0x", func.encode(AddressTransactions.address, "hex")).label("address"), + (func.count(AddressTransactions.transaction_hash)).label("avg_tx_per_minute"), + ) + .where( + AddressTransactions.block_timestamp >= start_time, + AddressTransactions.block_timestamp < latest_timestamp, + func.concat("0x", func.encode(AddressTransactions.address, "hex")).in_(top_20_addresses), + ) + .group_by(AddressTransactions.address) + ) + + results = session.exec(stmt).all() + + address_to_avg_tx = {row.address: row.avg_tx_per_minute for row in results} + + top_active_contracts = [] + for address in top_20_addresses: + avg_tx_per_minute = address_to_avg_tx.get(address, 0.0) + top_active_contracts.append( + SmartContractMetric(contract_address=address, minute_transaction_count=avg_tx_per_minute) + ) + + return top_active_contracts diff --git a/hemera/app/api/routes/parameters/__init__.py b/hemera/app/api/routes/parameters/__init__.py new file mode 100644 index 000000000..2f2b0e092 --- /dev/null +++ b/hemera/app/api/routes/parameters/__init__.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2025/2/11 14:46 +# @Author ideal93 +# @File __init__.py.py +# @Brief diff --git a/hemera/app/api/routes/parameters/validate_address.py b/hemera/app/api/routes/parameters/validate_address.py new file mode 100644 index 000000000..4b881e1db --- /dev/null +++ b/hemera/app/api/routes/parameters/validate_address.py @@ -0,0 +1,52 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2025/2/11 14:46 +# @Author ideal93 +# @File validate_address.py +# @Brief +from fastapi import HTTPException, Path + + +def is_eth_address(address: str) -> bool: + if len(address) != 42 or address[:2] != "0x": + return False + try: + int(address[2:], 16) + return True + except ValueError: + return False + + +async def external_api_validate_address( + address: str = Path( + ..., + description=""" +Ethereum Address or ENS Name + +This parameter accepts both Ethereum addresses and Ethereum Name Service (ENS) names. +An Ethereum address is a 42-character string starting with '0x', followed by 40 hexadecimal characters (0-9, a-f). +Example: + - Ethereum address: `0x32Be343B94f860124dC4fEe278FDCBD38C102D88` + +An ENS name is a human-readable name ending in '.eth', which is mapped to an Ethereum address. +ENS names are case-insensitive. +Example: + - ENS name: `vitalik.eth` + +If the provided address is invalid, a 400 HTTP exception will be raised. + """, + ) +) -> str: + if is_eth_address(address): # TODO: or is_ens_name(address): + return address + raise HTTPException(status_code=400, detail="Invalid Address") + + +async def internal_api_validate_address(address: str = Path(..., description="""Ethereum Address""")) -> str: + standardized_address = address.lower() + if not standardized_address.startswith("0x"): + standardized_address = "0x" + standardized_address + if is_eth_address(standardized_address): + return standardized_address + + raise HTTPException(status_code=400, detail="Invalid Address") diff --git a/hemera/app/api/routes/parameters/validate_block.py b/hemera/app/api/routes/parameters/validate_block.py new file mode 100644 index 000000000..2ada2f0ff --- /dev/null +++ b/hemera/app/api/routes/parameters/validate_block.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2025/2/11 15:45 +# @Author ideal93 +# @File validate_block.py +# @Brief + +from typing import Union + +from fastapi import HTTPException, Path + +from hemera.common.utils.web3_utils import valid_hash + + +async def validate_block_identifier( + number_or_hash: str = Path(..., description="Block number or hash") +) -> Union[str, int]: + if number_or_hash.isnumeric(): + return number_or_hash + hash = valid_hash(number_or_hash) + if hash: + return hash + raise HTTPException(status_code=400, detail="Invalid block identifier") diff --git a/hemera/app/api/routes/stats/__init__.py b/hemera/app/api/routes/stats/__init__.py new file mode 100644 index 000000000..416bd8ea3 --- /dev/null +++ b/hemera/app/api/routes/stats/__init__.py @@ -0,0 +1,248 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2025/2/26 14:01 +# @Author ideal93 +# @File __init__.py.py +# @Brief +from datetime import date, datetime, timedelta +from typing import List, Optional, Tuple + +from fastapi import APIRouter, Depends, HTTPException, Query +from pydantic import BaseModel, Field +from sqlalchemy import desc, func + +from hemera.app.api.deps import ReadSessionDep +from hemera.app.api.routes.helper.block import _get_last_block, get_block_count +from hemera.app.api.routes.helper.transaction import ( + GasStats, + SmartContractMetric, + get_gas_stats, + get_gas_stats_list, + get_latest_txn_count, + get_recent_1_minutes_average_transactions, + get_top_contracts_transaction_count_list, + get_total_txn_count, + get_transaction_count_stats_list, +) +from hemera.common.models.stats.daily_boards_stats import DailyBoardsStats + +router = APIRouter(tags=["STATS"]) + + +class MetricsResponse(BaseModel): + block_timestamp: datetime = Field(..., description="Timestamp of the block") + transaction_count_minute: int = Field(..., description="Number of transactions in the current minute") + transaction_count_total: int = Field(..., description="Total transaction count") + transaction_per_second: float = Field(..., description="Transactions per second") + block_per_second: float = Field(..., description="Blocks generated per second") + gas_stats: GasStats = Field(..., description="Statistics related to gas usage") + top_active_contracts: List[SmartContractMetric] = Field( + ..., description="Top active smart contracts based on the previous day's data" + ) + + +class GasStatsWithBlockTimestamp(BaseModel): + block_timestamp: datetime = Field(..., description="Timestamp of the block") + gas_stats: GasStats = Field(..., description="Statistics related to gas usage") + + +class TransactionCountWithBlockTimestamp(BaseModel): + block_timestamp: datetime = Field(..., description="Timestamp of the block") + transaction_count: int = Field(..., description="Number of transactions in the current minute") + + +class TopActiveContractWithBlockTimestamp(BaseModel): + block_timestamp: datetime = Field(..., description="Timestamp of the block") + top_active_contracts: List[SmartContractMetric] = Field( + ..., description="Top active smart contracts based on the previous day's data" + ) + + +class TransactionCountListResponse(BaseModel): + metrics: List[TransactionCountWithBlockTimestamp] = Field( + ..., description="List of transaction count statistics for the latest period" + ) + + +class GasStatsListResponse(BaseModel): + metrics: List[GasStatsWithBlockTimestamp] = Field(..., description="List of metrics for the last N minutes") + + +class TopActiveContractListResponse(BaseModel): + metrics: List[TopActiveContractWithBlockTimestamp] = Field( + ..., description="List of top active contracts for the latest period" + ) + + +def validate_stats_params( + duration: timedelta = Query(default=timedelta(minutes=15), description="Duration for stats (e.g., 30 minutes)"), + interval: timedelta = Query(default=timedelta(minutes=1), description="Time bucket interval (e.g., 2 minutes)"), + latest_timestamp: Optional[datetime] = Query( + default=None, description="Optional latest timestamp to override the database's latest timestamp" + ), +) -> Tuple[timedelta, timedelta, Optional[datetime]]: + """ + Validates that the given duration is evenly divisible by the interval. + Returns a tuple of (duration, interval, latest_timestamp) if valid. + """ + if duration.total_seconds() % interval.total_seconds() != 0: + raise HTTPException(status_code=400, detail="Duration must be evenly divisible by interval") + if latest_timestamp is not None: + latest_timestamp = latest_timestamp.replace(microsecond=0).replace(second=0) + return duration, interval, latest_timestamp + + +@router.get("/v1/developer/stats/latest_gas_stats_list", response_model=GasStatsListResponse) +async def get_latest_gas_stats_list( + session: ReadSessionDep, params: Tuple[timedelta, timedelta, Optional[datetime]] = Depends(validate_stats_params) +): + duration, interval, latest_timestamp = params + result = get_gas_stats_list(session, duration, interval, latest_timestamp) + return GasStatsListResponse( + metrics=[ + GasStatsWithBlockTimestamp(block_timestamp=block_timestamp, gas_stats=gas_stats) + for (block_timestamp, gas_stats) in result + ] + ) + + +@router.get("/v1/developer/stats/latest_transaction_count_list", response_model=TransactionCountListResponse) +async def get_latest_transaction_count_stats( + session: ReadSessionDep, params: Tuple[timedelta, timedelta, Optional[datetime]] = Depends(validate_stats_params) +): + """ + Retrieves transaction count statistics for the latest period specified by duration and interval. + The duration must be evenly divisible by the interval. + """ + duration, interval, latest_timestamp = params + result = get_transaction_count_stats_list(session, duration, interval, latest_timestamp) + return TransactionCountListResponse( + metrics=[ + TransactionCountWithBlockTimestamp(block_timestamp=block_timestamp, transaction_count=tx_count_per_second) + for (block_timestamp, tx_count_per_second) in result + ] + ) + + +# TODO +@router.get("/v1/developer/stats/latest_top_active_contracts_list", response_model=TopActiveContractListResponse) +async def get_latest_top_active_contracts( + session: ReadSessionDep, params: Tuple[timedelta, timedelta, Optional[datetime]] = Depends(validate_stats_params) +): + duration, interval, latest_timestamp = params + result = get_top_contracts_transaction_count_list(session, duration, interval, latest_timestamp) + + return TopActiveContractListResponse( + metrics=[ + TopActiveContractWithBlockTimestamp( + block_timestamp=block_timestamp, top_active_contracts=top_active_contracts + ) + for (block_timestamp, top_active_contracts) in result + ] + ) + + +@router.get("/v1/developer/stats/metrics", response_model=MetricsResponse) +async def get_stats_metrics(session: ReadSessionDep): + + block_timestamp = _get_last_block(session, columns="timestamp") + transaction_count_minute = get_latest_txn_count(session, timedelta(minutes=1)) + transaction_count_total = get_total_txn_count(session) + block_times = get_block_count(session, timedelta(minutes=1)) + gas_stats = get_gas_stats(session, timedelta(minutes=1)) + top_active_contracts = get_recent_1_minutes_average_transactions(session) + + return MetricsResponse( + block_timestamp=block_timestamp.replace(microsecond=0).replace(second=0) if block_timestamp else None, + transaction_count_minute=transaction_count_minute, + transaction_count_total=transaction_count_total, + transaction_per_second=transaction_count_minute / 60.0, + block_per_second=block_times / 60.0, + gas_stats=gas_stats, + top_active_contracts=top_active_contracts, + ) + + +@router.get("/v1/developer/stats/all_boards", response_model=List[str]) +async def get_unique_board_ids(session: ReadSessionDep): + query = session.query(DailyBoardsStats.board_id).distinct() + result = query.all() + unique_board_ids = [row[0] for row in result] + return unique_board_ids + + +class ECOBoardResponse(BaseModel): + rank: int + board_id: str + block_date: str + actual_date: str + key: str + count: int + + +class WrappedECOBoardResponse(BaseModel): + total: int + page: int + page_size: int + list: List[ECOBoardResponse] + + +time_ranges = { + "1d": lambda now: now - timedelta(days=1), + "7d": lambda now: now - timedelta(days=7), + "30d": lambda now: now - timedelta(days=30), + "1m": lambda now: now - timedelta(days=30), + "6m": lambda now: now - timedelta(days=180), + "YTD": lambda now: datetime(now.year, 1, 1), + "1y": lambda now: now - timedelta(days=365), + "all": lambda now: datetime(2020, 1, 1), +} + + +@router.get("/v1/developer/stats/get_board_data", response_model=WrappedECOBoardResponse) +# get("time_range", "7d") +async def get_board_data(board_id: str, time_range: str, session: ReadSessionDep, page: int = 1, page_size: int = 10): + today = date.today() + block_date = time_ranges[time_range](today) + query = ( + session.query(DailyBoardsStats) + .filter(DailyBoardsStats.board_id == board_id, DailyBoardsStats.block_date == block_date) + .order_by(desc(DailyBoardsStats.count)) + ) + + total_count = query.count() + result = query.offset((page - 1) * page_size).limit(page_size).all() + if not result: + closest_block_date = ( + session.query(DailyBoardsStats.block_date) + .filter(DailyBoardsStats.board_id == board_id) + .order_by( + func.abs(func.date_part("epoch", DailyBoardsStats.block_date) - func.date_part("epoch", block_date)) + ) + .limit(1) + .scalar() + ) + if not closest_block_date: + return [] + query = ( + session.query(DailyBoardsStats) + .filter(DailyBoardsStats.board_id == board_id, DailyBoardsStats.block_date == closest_block_date) + .order_by(desc(DailyBoardsStats.count)) + ) + + total_count = query.count() + result = query.offset((page - 1) * page_size).limit(page_size).all() + + data = [ + ECOBoardResponse( + rank=(page - 1) * page_size + index + 1, # 计算全局排名 + board_id=row.board_id, + block_date=block_date.strftime("%Y-%m-%d"), + actual_date=row.block_date.strftime("%Y-%m-%d"), + key=row.key, + count=row.count, + ) + for index, row in enumerate(result) + ] + + return WrappedECOBoardResponse(total=total_count, page=page, page_size=page_size, list=data) diff --git a/hemera/migrations/__init__.py b/hemera/app/core/__init__.py similarity index 100% rename from hemera/migrations/__init__.py rename to hemera/app/core/__init__.py diff --git a/hemera/app/core/config.py b/hemera/app/core/config.py new file mode 100644 index 000000000..4991523e8 --- /dev/null +++ b/hemera/app/core/config.py @@ -0,0 +1,198 @@ +import os +from typing import Dict, List, Optional +from urllib.parse import urlparse + +import yaml +from pydantic import BaseModel, Field + +from hemera.common.enumeration.entity_type import calculate_entity_value, generate_entity_types + + +class TokenConfiguration(BaseModel): + dashboard_token: str = Field(default="ETH") + native_token: str = Field(default="ETH") + gas_fee_token: str = Field(default="ETH") + + @classmethod + def from_yaml(cls, data: dict) -> "TokenConfiguration": + return cls( + dashboard_token=data.get("dashboard_token", "ETH"), + native_token=data.get("native_token", "ETH"), + gas_fee_token=data.get("gas_fee_token", "ETH"), + ) + + +class CacheConfig(BaseModel): + cache_type: Optional[str] = None + cache_redis_host: str = Field(default="127.0.0.1") + cache_key_prefix: str = Field(default="socialscan_api_ut") + + @classmethod + def from_yaml(cls, data: dict) -> "CacheConfig": + return cls( + cache_type=data.get("cache_type"), + cache_redis_host=data.get("cache_redis_host", "127.0.0.1"), + cache_key_prefix=data.get("cache_key_prefix", "socialscan_api_ut"), + ) + + def get_cache_config(self, redis_db) -> Dict: + if self.cache_type == "RedisCache": + return { + "CACHE_TYPE": "RedisCache", + "CACHE_REDIS_HOST": redis_db.r, + "CACHE_KEY_PREFIX": self.cache_key_prefix, + } + elif self.cache_type == "RedisClusterCache": + return { + "CACHE_TYPE": "RedisCache", + "CACHE_REDIS_HOST": redis_db.r, + "CACHE_KEY_PREFIX": self.cache_key_prefix, + } + return { + "CACHE_TYPE": "SimpleCache", + "DEBUG": True, + } + + +class DatabaseConfig(BaseModel): + host: Optional[str] = None + port: Optional[int] = None + username: Optional[str] = None + password: Optional[str] = None + database: Optional[str] = None + schema: str = Field(default="public") + url: str = Field(default="") + + @classmethod + def from_yaml(cls, data: dict) -> "DatabaseConfig": + return cls( + host=data.get("host"), + port=data.get("port"), + username=data.get("username"), + password=data.get("password"), + database=data.get("database"), + schema=data.get("schema", "public"), + ) + + def get_sql_alchemy_uri(self) -> str: + if self.url: + return self.url + else: + return f"postgresql+psycopg2://{self.username}:{self.password}@{self.host}:{self.port}/{self.database}" + + @staticmethod + def from_url(url: str) -> "DatabaseConfig": + result = urlparse(url) + return DatabaseConfig( + host=result.hostname, + port=result.port, + username=result.username, + password=result.password, + database=result.path[1:], # Remove leading '/' + url=url, + ) + + +class Settings(BaseModel): + udfs: List[str] = [] + chain: Optional[str] = None + ens_service: Optional[str] = None + contract_service: Optional[str] = None + token_configuration: TokenConfiguration = Field(default_factory=TokenConfiguration) + cache_config: CacheConfig = Field(default_factory=CacheConfig) + sql_alchemy_engine_options: Dict = Field(default_factory=dict) + sql_alchemy_database_engine_options: Dict = Field( + default_factory=lambda: { + "pool_size": 100, + "max_overflow": 100, + } + ) + rpc: str = Field(default="https://ethereum.publicnode.com") + debug_rpc: str = Field(default="https://ethereum.publicnode.com") + + # constant + MAX_INTERNAL_TRANSACTION: int = 10000 + MAX_TRANSACTION_WITH_CONDITION: int = 10000 + MAX_TRANSACTION: int = 10000 + PAGE_SIZE: int = 10 + DATABASE_URI: str = Field(default="postgresql://postgres:admin@localhost:5432/postgres") + + class Config: + env_file = ".env" + + @classmethod + def from_yaml(cls, yaml_path: str) -> "Settings": + """Create settings from YAML file""" + with open(yaml_path, "r") as f: + config_data = yaml.safe_load(f) + + settings = cls() + + # Load basic fields + settings.udfs = config_data.get("udfs", []) + settings.chain = config_data.get("chain") + settings.ens_service = config_data.get("ens_service") + settings.contract_service = config_data.get("contract_service") + settings.rpc = config_data.get("rpc", "https://ethereum.publicnode.com") + settings.debug_rpc = config_data.get("debug_rpc", "https://ethereum.publicnode.com") + + # Load nested configurations + if "token_configuration" in config_data: + settings.token_configuration = TokenConfiguration.from_yaml(config_data["token_configuration"]) + if "cache_config" in config_data: + settings.cache_config = CacheConfig.from_yaml(config_data["cache_config"]) + if "sql_alchemy_database_engine_options" in config_data: + settings.sql_alchemy_database_engine_options = config_data["sql_alchemy_database_engine_options"] + + return settings + + def update_from_env(self): + """Update settings from environment variables""" + self.udfs = generate_entity_types(calculate_entity_value(os.getenv("ENTITY_TYPES", ""))) + self.chain = os.getenv("CHAIN", self.chain) + self.ens_service = os.getenv("ENS_SERVICE", self.ens_service) + self.contract_service = os.getenv("CONTRACT_SERVICE", self.contract_service) + + if pool_size := os.getenv("SQL_POOL_SIZE"): + self.sql_alchemy_database_engine_options["pool_size"] = int(pool_size) + if max_overflow := os.getenv("SQL_MAX_OVERFLOW"): + self.sql_alchemy_database_engine_options["max_overflow"] = int(max_overflow) + + self.rpc = os.getenv("PROVIDER_URI", self.rpc) + self.debug_rpc = os.getenv("DEBUG_PROVIDER_URI", self.debug_rpc) + + # Update cache configuration + if cache_type := os.getenv("CACHE_TYPE"): + self.cache_config.cache_type = cache_type + self.cache_config.cache_redis_host = os.getenv("REDIS_HOST", self.cache_config.cache_redis_host) + + # Update token configuration + if dashboard_token := os.getenv("DASHBOARD_TOKEN"): + self.token_configuration.dashboard_token = dashboard_token + if native_token := os.getenv("NATIVE_TOKEN"): + self.token_configuration.native_token = native_token + if gas_fee_token := os.getenv("GAS_FEE_TOKEN"): + self.token_configuration.gas_fee_token = gas_fee_token + if DATABASE_URI := os.getenv("DATABASE_URI"): + self.DATABASE_URI = DATABASE_URI + + +def get_settings() -> Settings: + """Factory function to create Settings instance with priority: + 1. Environment variables (highest priority) + 2. YAML file (if provided) + 3. Default values (lowest priority) + """ + yaml_path = os.getenv("CONFIG_FILE") + + if yaml_path: + settings = Settings.from_yaml(yaml_path) + else: + settings = Settings() + + # Environment variables override YAML settings + settings.update_from_env() + return settings + + +settings = get_settings() diff --git a/hemera/app/core/db.py b/hemera/app/core/db.py new file mode 100644 index 000000000..00ee67afd --- /dev/null +++ b/hemera/app/core/db.py @@ -0,0 +1,30 @@ +import os + +from sqlmodel import create_engine + + +class Database: + _read_engine = None + _write_engine = None + _common_engine = None + + @classmethod + def get_read_engine(cls): + if cls._read_engine is None: + cls._read_engine = create_engine( + os.getenv("READ_POSTGRES_URL") or os.getenv("POSTGRES_URL"), + pool_pre_ping=True, # TODO: Test Bugfix + ) + return cls._read_engine + + @classmethod + def get_write_engine(cls): + if cls._write_engine is None: + cls._write_engine = create_engine(os.getenv("WRITE_POSTGRES_URL") or os.getenv("POSTGRES_URL")) + return cls._write_engine + + @classmethod + def get_common_engine(cls): + if cls._common_engine is None: + cls._common_engine = create_engine(os.getenv("COMMON_POSTGRES_URL") or os.getenv("POSTGRES_URL")) + return cls._common_engine diff --git a/hemera_udf/stats/models/__init__.py b/hemera/app/core/security.py similarity index 100% rename from hemera_udf/stats/models/__init__.py rename to hemera/app/core/security.py diff --git a/hemera/app/core/service.py b/hemera/app/core/service.py new file mode 100644 index 000000000..776516363 --- /dev/null +++ b/hemera/app/core/service.py @@ -0,0 +1,12 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2024/12/25 14:39 +# @Author ideal93 +# @File service.py +# @Brief +from hemera.app.core.config import settings +from hemera.app.service.extra_contract_service import ExtraContractService +from hemera.app.service.extra_ens_service import ExtraEnsService + +extra_ens_service = ExtraEnsService(settings.ens_service) if settings.ens_service else None +extra_contract_service = ExtraContractService(settings.contract_service) if settings.contract_service else None diff --git a/hemera/app/main.py b/hemera/app/main.py new file mode 100644 index 000000000..35f167391 --- /dev/null +++ b/hemera/app/main.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2024/12/19 16:24 +# @Author will +# @File main.py +# @Brief +from fastapi import FastAPI, Request +from fastapi.middleware.cors import CORSMiddleware +from fastapi.responses import JSONResponse +from pydantic import ValidationError + +from hemera.app.api.routes.developer.es_adapter.router import router as developer_router +from hemera.app.api.routes.explorer.base import router as base_router +from hemera.app.api.routes.explorer.block import router as block_router +from hemera.app.api.routes.explorer.export import router as export_router +from hemera.app.api.routes.explorer.transaction import router as transaction_router +from hemera.app.api.routes.stats import router as stats_router + +app = FastAPI( + title="Hemera Explorer API", + description="This is my API documentation", + version="1.0.0", + docs_url="/docs", + redoc_url="/redoc", + openapi_url="/openapi.json", +) +app.add_middleware( + middleware_class=CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +app.include_router(block_router) +app.include_router(base_router) +app.include_router(transaction_router) +app.include_router(developer_router) +app.include_router(export_router) +app.include_router(stats_router) + + +def serialize_errors(errors): + if isinstance(errors, list): + return [serialize_errors(item) for item in errors] + elif isinstance(errors, dict): + new_dict = {} + for key, value in errors.items(): + new_dict[key] = serialize_errors(value) + return new_dict + elif isinstance(errors, ValueError): + return str(errors) + else: + return errors + + +@app.exception_handler(ValidationError) +async def validation_exception_handler(request: Request, exc: ValidationError): + errors_serializable = serialize_errors(exc.errors()) + return JSONResponse( + status_code=400, + content={"status": "0", "message": "Invalid input parameters", "result": {"errors": errors_serializable}}, + ) diff --git a/hemera/app/models.py b/hemera/app/models.py new file mode 100644 index 000000000..c3cec4a38 --- /dev/null +++ b/hemera/app/models.py @@ -0,0 +1,136 @@ +import uuid +from datetime import datetime +from enum import Enum +from typing import List, Optional, Union + +from pydantic import BaseModel, EmailStr +from sqlmodel import Field, Relationship, SQLModel + +""" +# Shared properties +class UserBase(SQLModel): + email: EmailStr = Field(unique=True, index=True, max_length=255) + is_active: bool = True + is_superuser: bool = False + full_name: str | None = Field(default=None, max_length=255) + + +# Properties to receive via API on creation +class UserCreate(UserBase): + password: str = Field(min_length=8, max_length=40) + + +class UserRegister(SQLModel): + email: EmailStr = Field(max_length=255) + password: str = Field(min_length=8, max_length=40) + full_name: str | None = Field(default=None, max_length=255) + + +# Properties to receive via API on update, all are optional +class UserUpdate(UserBase): + email: EmailStr | None = Field(default=None, max_length=255) # type: ignore + password: str | None = Field(default=None, min_length=8, max_length=40) + + +class UserUpdateMe(SQLModel): + full_name: str | None = Field(default=None, max_length=255) + email: EmailStr | None = Field(default=None, max_length=255) + + +class UpdatePassword(SQLModel): + current_password: str = Field(min_length=8, max_length=40) + new_password: str = Field(min_length=8, max_length=40) + + +class User(UserBase, table=True): + id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) + hashed_password: str + items: list["Item"] = Relationship(back_populates="owner", cascade_delete=True) + +class TokenPayload(SQLModel): + sub: str | None = None +""" + + +class PoolStatus(BaseModel): + checkedin: int + checkedout: int + overflow: int + size: int + + +class HealthCheckResponse(BaseModel): + latest_block_number: int + latest_block_timestamp: datetime + engine_pool_status: str + read_pool_status: str + write_pool_status: str + common_pool_status: str + status: str = "OK" + + class Config: + json_encoders = {datetime: lambda v: v.isoformat()} + + +class ExplorerStats(BaseModel): + total_transactions: int = Field(description="Total number of transactions") + transaction_tps: float = Field(description="Transactions per second in last 10 minutes") + latest_batch: int = Field(description="Latest batch number") + latest_block: int = Field(description="Latest block number") + avg_block_time: float = Field(description="Average block time") + eth_price: str = Field(description="ETH price in USD") + eth_price_btc: str = Field(description="ETH price in BTC") + eth_price_diff: str = Field(description="ETH price difference percentage") + native_token_price: str = Field(description="Native token price in USD") + native_token_price_eth: str = Field(description="Native token price in ETH") + native_token_price_diff: str = Field(description="Native token price difference percentage") + dashboard_token_price_eth: str = Field(description="Dashboard token price in ETH") + dashboard_token_price: str = Field(description="Dashboard token price in USD") + dashboard_token_price_diff: str = Field(description="Dashboard token price difference percentage") + gas_fee: str = Field(description="Current gas fee in Gwei") + + +class TransactionDay(BaseModel): + value: str # ISO format date + count: int + + +class TransactionsDayResponse(BaseModel): + title: str + data: List[TransactionDay] + + +class SearchResultBase(BaseModel): + type: str + + +class BlockSearchResult(SearchResultBase): + type: str = "block" + block_hash: str + block_number: int + + +class AddressSearchResult(SearchResultBase): + type: str = "address" + wallet_address: str + + +class TransactionSearchResult(SearchResultBase): + type: str = "transaction" + transaction_hash: str + + +class TokenSearchResult(SearchResultBase): + type: str = "token" + token_name: str + token_symbol: str + token_address: str + token_logo_url: Optional[str] + + +SearchResult = Union[BlockSearchResult, AddressSearchResult, TransactionSearchResult, TokenSearchResult] + + +class SortOrder(str, Enum): + ASC = "asc" + DESC = "desc" diff --git a/hemera/app/service/__init__.py b/hemera/app/service/__init__.py new file mode 100644 index 000000000..b29db7d30 --- /dev/null +++ b/hemera/app/service/__init__.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2024/12/25 14:11 +# @Author ideal93 +# @File __init__.py.py +# @Brief diff --git a/hemera/app/service/extra_contract_service.py b/hemera/app/service/extra_contract_service.py new file mode 100644 index 000000000..2829dfdce --- /dev/null +++ b/hemera/app/service/extra_contract_service.py @@ -0,0 +1,288 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2024/12/25 14:39 +# @Author ideal93 +# @File extra_contract_service.py +# @Brief + +from typing import Any, Dict, List, Optional, Tuple + +import requests +from sqlmodel import Session, select + +from hemera.app.utils.web3_utils import get_code, get_storage_at, w3 +from hemera.common.models.trace.contracts import Contracts +from hemera.common.utils.exception_control import APIError +from hemera.common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes + + +class ExtraContractService: + """Service class for handling contract verification and related operations""" + + def __init__(self, contract_service): + self.verify_host = contract_service or "" + self.verify_service_validation = bool(self.verify_host and self.verify_host != "") + + # Constants + self.NORMAL_TIMEOUT = 0.5 + self.VERIFY_TIMEOUT = 30 + self.chain_id = self._initial_chain_id() + + # URLs + self._setup_urls() + + def _initial_chain_id(self) -> int: + """Initialize chain ID from config or web3""" + return w3.eth.chain_id + + def _setup_urls(self): + """Setup service URLs""" + self.contract_verify_url = f"{self.verify_host}/v1/contract_verify/sync_verify" + self.common_contract_verify_url = f"{self.verify_host}/v1/contract_verify/async_verify" + self.abi_host = f"{self.verify_host}/v1/contract_verify/method" + + def _get_json_response(self, endpoint: str) -> List[Any]: + """Generic method to get JSON response from contract verify service""" + if not self.verify_service_validation: + return [] + + request_url = f"{self.verify_host}{endpoint}" + try: + response = requests.get(request_url, timeout=self.NORMAL_TIMEOUT) + return response.json() if response.status_code == 200 else [] + except Exception: + return [] + + # Contract Verification Methods + def validate_verify_input(self, address: str, compiler_type: str, compiler_version: str): + """Validate contract verification input parameters""" + if not address or not compiler_type or not compiler_version: + raise APIError("Missing base required data", code=400) + + def get_contract(self, session: Session, address: str) -> Contracts: + """Get contract by address""" + if isinstance(address, str): + address = hex_str_to_bytes(address) + contract = session.exec(select(Contracts).where(Contracts.address == address)).first() + if not contract: + raise APIError("The address is not a contract", code=400) + return contract + + def check_verification_status(self, contract: Contracts): + """Check if contract is already verified""" + if contract.is_verified: + raise APIError("This contract is already verified", code=400) + + def get_contract_code(self, contract: Contracts) -> Tuple[Optional[str], Optional[str]]: + """Get creation and deployed code for a contract""" + creation_code = bytes_to_hex_str(contract.creation_code) if contract.creation_code else contract.bytecode + deployed_code = ( + bytes_to_hex_str(contract.deployed_code) if contract.deployed_code else get_code(contract.address) + ) + return creation_code, deployed_code + + def send_sync_verification(self, payload: Dict[str, Any], file_list: List[Any]) -> requests.Response: + """Send synchronous verification request""" + if not self.verify_service_validation: + return self._mock_response("No valid verify service is set", 400) + + payload["chain_id"] = self.chain_id + files = [("files", (file.filename, file.read(), "application/octet-stream")) for file in file_list] + + try: + return requests.post(self.contract_verify_url, data=payload, files=files, timeout=self.VERIFY_TIMEOUT) + except Exception as e: + return self._mock_response(str(e), 400) + + def send_async_verification(self, payload: Dict[str, Any]) -> requests.Response: + """Send asynchronous verification request""" + if not self.verify_service_validation: + return self._mock_response("No valid verify service is set", 400) + + payload["chain_id"] = self.chain_id + compiler_type = payload["compiler_type"] + files = [] + + if compiler_type == "solidity-standard-json-input": + payload["compiler_type"] = "Solidity (Standard-Json-Input)" + files = [("files", (payload["address"] + ".json", payload["input_str"], "application/octet-stream"))] + elif compiler_type == "solidity-single-file": + payload["compiler_type"] = "Solidity (Single file)" + + try: + return requests.post( + self.common_contract_verify_url, data=payload, files=files, timeout=self.VERIFY_TIMEOUT + ) + except Exception as e: + return self._mock_response(str(e), 400) + + # Version and Configuration Methods + def get_solidity_versions(self) -> List[str]: + """Get available Solidity versions""" + return self._get_json_response("/v1/contract_verify/solidity_versions") + + def get_vyper_versions(self) -> List[str]: + """Get available Vyper versions""" + return self._get_json_response("/v1/contract_verify/vyper_versions") + + def get_evm_versions(self) -> List[str]: + """Get available EVM versions""" + return self._get_json_response("/v1/contract_verify/evm_versions") + + def get_license_types(self) -> List[str]: + """Get available license types""" + return self._get_json_response("/v1/contract_verify/license_types") + + # Contract Information Methods + def get_contract_code_by_address(self, address: str) -> List[Any]: + """Get contract code by address""" + return self._get_json_response(f"/v1/contract_verify/{self.chain_id}/{address}/code") + + def get_similar_addresses(self, deployed_code_hash: str) -> List[str]: + """Get similar contract addresses""" + return self._get_json_response(f"/v1/contract_verify/similar_address/{self.chain_id}/{deployed_code_hash}") + + def get_contract_names(self, address_list: List[str]) -> Dict[str, str]: + """Get contract names for a list of addresses""" + if not self.verify_service_validation: + return {} + + request_json = { + "chain_id": self.chain_id, + "address_list": address_list, + } + + try: + response = requests.post( + f"{self.verify_host}/v1/contract_verify/get_contract_name", + json=request_json, + timeout=self.NORMAL_TIMEOUT, + ) + return response.json() if response.status_code == 200 else {} + except Exception: + return {} + + # ABI Related Methods + def get_abi_by_address(self, address: str) -> List[Any]: + """Get ABI by address""" + return self._get_json_response(f"/v1/contract_verify/contract_abi/{self.chain_id}/{address}") + + def get_verification_abi(self, address: str) -> Dict[str, Any]: + """Get verification ABI by address""" + return self._get_json_response(f"/v1/contract_verify/contract_verification_abi/{self.chain_id}/{address}") + + def get_verification_status(self, guid: str) -> Dict[str, Any]: + """Get verification status by GUID""" + return self._get_json_response(f"/v1/contract_verify/get_verified_status/{self.chain_id}/{guid}") + + def get_verification_history(self, address: str) -> List[Any]: + """Get verification history by address""" + return self._get_json_response(f"/v1/contract_verify/get_verification_history/{self.chain_id}/{address}") + + def get_implementation_contract(self, address: str) -> Optional[str]: + """Get implementation contract address for proxy contracts""" + storage_slots = [ + "0x360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc", + "0x7050c9e0f4ca769c69bd3a8ef740bc37934f8e2c036e5a723fd8ee048ed3f8c3", + "0xc5f16f0fcc639fa48a6947836d9850f504798523bf8c9a3a87d5876cf622bcf7", + "0x5f3b5dfeb7b28cdbd7faba78963ee202a494e2a2cc8c9978d5e30d2aebb8c197", + ] + + for slot in storage_slots: + contract_address = get_storage_at(address, slot) + if contract_address and contract_address != "0x0000000000000000000000000000000000000000": + return contract_address + return None + + def get_abis_for_method(self, session, address_signed_prefix_list: List[Tuple[str, str]]) -> Dict[Any, Any]: + """Get ABIs for methods + + Args: + session: SQLModel session + address_signed_prefix_list: List of tuples containing (address, signed_prefix) + + Returns: + Dictionary containing ABI information for the methods + """ + if not self.verify_service_validation: + return {} + enriched_list = [(addr, prefix, 0) for addr, prefix in address_signed_prefix_list] + return self._get_abis_by_address_signed_prefix(session, enriched_list) + + def get_abis_for_logs(self, session, address_signed_prefix_list: List[Tuple[str, str, int]]) -> Dict[Any, Any]: + """Get ABIs for logs + + Args: + session: SQLModel session + address_signed_prefix_list: List of tuples containing (address, signed_prefix, indexed_true_count) + + Returns: + Dictionary containing ABI information for the logs + """ + if not self.verify_service_validation: + return {} + return self._get_abis_by_address_signed_prefix(session, address_signed_prefix_list) + + def _get_abis_by_address_signed_prefix( + self, session, address_signed_prefix_list: List[Tuple[str, str, int]] + ) -> Dict[Tuple[str, str], Dict[str, Any]]: + """Internal method to get ABIs by address and signed prefix + + Args: + session: SQLModel session + address_signed_prefix_list: List of tuples containing (address, signed_prefix, indexed_true_count) + address: Contract address (hex string) + signed_prefix: Method/event signature prefix + indexed_true_count: Number of indexed parameters + + Returns: + Dictionary mapping (address, topic0) pairs to their ABI information + """ + result_list = [] + + for address, signed_prefix, indexed_true_count in address_signed_prefix_list: + contract = session.get(Contracts, hex_str_to_bytes(address)) + if not contract: + continue + + deployed_code_hash = contract.deployed_code_hash + + if contract.is_proxy: + if not contract.implementation_contract: + implementation_address = self.get_implementation_contract(address) + contract.implementation_contract = implementation_address + session.commit() + else: + implementation_address = contract.implementation_contract + + implementation_contract = session.query(Contracts).get(implementation_address) + if implementation_contract: + implementation_hash = implementation_contract.deployed_code_hash + result_list.append( + (1, indexed_true_count, address, (deployed_code_hash, implementation_hash), signed_prefix) + ) + else: + result_list.append((0, indexed_true_count, address, deployed_code_hash, signed_prefix)) + else: + result_list.append((0, indexed_true_count, address, deployed_code_hash, signed_prefix)) + + request_json = {"request_type": 1, "request_list": result_list} + + try: + response = requests.post(url=self.abi_host, json=request_json, timeout=self.NORMAL_TIMEOUT) + if response.status_code == 200: + return {(address, topic0): result_map for address, topic0, result_map in response.json()} + return {} + except Exception: + return {} + + @staticmethod + def _mock_response(text: str, status_code: int) -> Any: + """Create mock response for error cases""" + + class MockResponse: + def __init__(self, text, status_code): + self.text = text + self.status_code = status_code + + return MockResponse(text, status_code) diff --git a/hemera/api/app/ens/ens.py b/hemera/app/service/extra_ens_service.py similarity index 89% rename from hemera/api/app/ens/ens.py rename to hemera/app/service/extra_ens_service.py index 37fb53798..8ce2d32ca 100644 --- a/hemera/api/app/ens/ens.py +++ b/hemera/app/service/extra_ens_service.py @@ -1,12 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2024/12/25 14:40 +# @Author ideal93 +# @File extra_ens_service.py.py +# @Brief import requests -class ENSClient: +class ExtraEnsService: _instance = None def __new__(cls, *args, **kwargs): if cls._instance is None: - cls._instance = super(ENSClient, cls).__new__(cls) + cls._instance = super(ExtraEnsService, cls).__new__(cls) cls._instance._initialized = False return cls._instance diff --git a/hemera/app/utils/__init__.py b/hemera/app/utils/__init__.py new file mode 100644 index 000000000..69e047c4d --- /dev/null +++ b/hemera/app/utils/__init__.py @@ -0,0 +1,9 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2024/12/22 13:55 +# @Author ideal93 +# @File __init__.py.py +# @Brief +from typing import List, Literal, Union + +ColumnType = Union[Literal["*"], str, List[str]] diff --git a/hemera/api/app/utils/web3_utils.py b/hemera/app/utils/web3_utils.py similarity index 84% rename from hemera/api/app/utils/web3_utils.py rename to hemera/app/utils/web3_utils.py index a7c244515..821612bbc 100644 --- a/hemera/api/app/utils/web3_utils.py +++ b/hemera/app/utils/web3_utils.py @@ -1,15 +1,21 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2024/12/22 14:19 +# @Author ideal93 +# @File web3_utils.py +# @Brief + from decimal import Decimal from typing import Optional from web3 import Web3 -from hemera.api.app.cache import app_config, cache +from hemera.app.core.config import settings from hemera.common.utils.abi_code_utils import decode_data -w3 = Web3(Web3.HTTPProvider(app_config.rpc)) +w3 = Web3(Web3.HTTPProvider(settings.rpc)) -@cache.memoize(600) def get_balance(address) -> Decimal: try: if not w3.is_address(address): @@ -21,7 +27,6 @@ def get_balance(address) -> Decimal: return Decimal(0) -@cache.memoize(600) def get_code(address) -> Optional[str]: try: if not w3.is_address(address): @@ -33,7 +38,6 @@ def get_code(address) -> Optional[str]: return None -@cache.memoize(300) def get_gas_price(): try: return w3.eth.gas_price diff --git a/hemera/cli/api.py b/hemera/cli/api.py index d43bf2261..b1ccf2c9c 100644 --- a/hemera/cli/api.py +++ b/hemera/cli/api.py @@ -1,11 +1,12 @@ import click +import uvicorn +from hemera.app.main import app from hemera.common.logo import print_logo @click.command(context_settings=dict(help_option_names=["-h", "--help"])) def api(): print_logo() - from hemera.api.app.main import app - app.run("0.0.0.0", 8082, threaded=True, debug=True, use_reloader=False) + uvicorn.run(app, host="0.0.0.0", port=8082) diff --git a/hemera/cli/core/stream_process.py b/hemera/cli/core/stream_process.py index 9f84d28cd..ae662369c 100644 --- a/hemera/cli/core/stream_process.py +++ b/hemera/cli/core/stream_process.py @@ -78,7 +78,7 @@ def stream_process( pid_file, ): print_logo() - import_submodules("hemera_udf") + # import_submodules("hemera_udf") configure_logging(log_level, log_file) configure_signals() provider_uri = pick_random_provider_uri(provider_uri) diff --git a/hemera/common/enumeration/entity_type.py b/hemera/common/enumeration/entity_type.py index 86de00c5f..4b7726d7e 100644 --- a/hemera/common/enumeration/entity_type.py +++ b/hemera/common/enumeration/entity_type.py @@ -7,9 +7,11 @@ from hemera.indexer.domains.contract import Contract from hemera.indexer.domains.contract_internal_transaction import ContractInternalTransaction from hemera.indexer.domains.current_token_balance import CurrentTokenBalance +from hemera.indexer.domains.current_token_id_balance import CurrentTokenIdBalance from hemera.indexer.domains.log import Log from hemera.indexer.domains.token import MarkBalanceToken, Token, UpdateToken from hemera.indexer.domains.token_balance import TokenBalance +from hemera.indexer.domains.token_id_balance import TokenIdBalance from hemera.indexer.domains.token_id_infos import ( ERC721TokenIdChange, ERC721TokenIdDetail, @@ -20,6 +22,7 @@ from hemera.indexer.domains.token_transfer import ERC20TokenTransfer, ERC721TokenTransfer, ERC1155TokenTransfer from hemera.indexer.domains.trace import Trace from hemera.indexer.domains.transaction import Transaction +from hemera.indexer.domains.transaction_trace_json import TransactionTraceJson class DynamicEntityTypeRegistry: @@ -95,6 +98,17 @@ def register_output_types(cls, entity_type: int, output_types: Set[Type]) -> Non """Register output types for a static entity type.""" cls._output_types[entity_type] = output_types + @classmethod + def get_output_types(cls, entity_types: int) -> Set[Type]: + """Get all output types for given static entity types, removing duplicates.""" + seen_types = set() + for bit_value, types in cls._output_types.items(): + if entity_types & bit_value: + for type_class in types: + if type_class not in seen_types: + seen_types.add(type_class) + return seen_types + class EntityType(IntFlag): """ @@ -105,8 +119,13 @@ class EntityType(IntFlag): # Core package EXPLORER_BASE = 1 << 0 EXPLORER_TOKEN = 1 << 1 + EXPLORER_TRACE = 1 << 2 + EXPLORER_TOKEN_TRANSFER = 1 << 3 + EXPLORER_TOKEN_BALANCE = 1 << 4 + EXPLORER_TOKEN_NFT = 1 << 5 + # Composite type EXPLORER = EXPLORER_BASE | EXPLORER_TOKEN | EXPLORER_TRACE @@ -134,16 +153,27 @@ def register_all_output_types(): StaticOutputTypes.register_output_types(EntityType.EXPLORER_BASE, {Block, BlockTsMapper, Transaction, Log}) StaticOutputTypes.register_output_types( - EntityType.EXPLORER_TOKEN, + EntityType.EXPLORER_TOKEN_TRANSFER, { - Token, - UpdateToken, ERC20TokenTransfer, ERC721TokenTransfer, ERC1155TokenTransfer, + }, + ) + + StaticOutputTypes.register_output_types( + EntityType.EXPLORER_TOKEN_BALANCE, + { TokenBalance, CurrentTokenBalance, - MarkBalanceToken, + TokenIdBalance, + CurrentTokenIdBalance, + }, + ) + + StaticOutputTypes.register_output_types( + EntityType.EXPLORER_TOKEN_NFT, + { UpdateERC1155TokenIdDetail, ERC1155TokenIdDetail, UpdateERC721TokenIdDetail, @@ -152,6 +182,18 @@ def register_all_output_types(): }, ) + StaticOutputTypes.register_output_types( + EntityType.EXPLORER_TOKEN, + StaticOutputTypes.get_output_types( + EntityType.EXPLORER_TOKEN_TRANSFER | EntityType.EXPLORER_TOKEN_BALANCE | EntityType.EXPLORER_TOKEN_NFT + ) + | { + Token, + UpdateToken, + MarkBalanceToken, + }, + ) + StaticOutputTypes.register_output_types( EntityType.EXPLORER_TRACE, { @@ -159,6 +201,7 @@ def register_all_output_types(): Contract, ContractInternalTransaction, UpdateBlockInternalCount, + TransactionTraceJson, # CoinBalance }, ) @@ -188,6 +231,17 @@ def calculate_entity_value(entity_types: str) -> int: return entities +def generate_entity_types(entity_types: int) -> Generator[str, None, None]: + """Generate entity type strings from combined bit value.""" + for entity_type in EntityType.__members__.keys(): + if entity_types & EntityType[entity_type]: + yield entity_type + + for entity_type, bit_value in DynamicEntityTypeRegistry._dynamic_types.items(): + if entity_types & bit_value: + yield entity_type + + def generate_output_types(entity_types: int) -> Generator[Type, None, None]: """Generate output types for both static and dynamic entity types.""" yield from DynamicEntityTypeRegistry.get_output_types(entity_types) diff --git a/hemera/common/enumeration/token_type.py b/hemera/common/enumeration/token_type.py index 2b540fccb..7d6cd7695 100644 --- a/hemera/common/enumeration/token_type.py +++ b/hemera/common/enumeration/token_type.py @@ -6,3 +6,23 @@ class TokenType(Enum): ERC721 = "ERC721" ERC1155 = "ERC1155" ERC404 = "ERC404" + + @classmethod + def from_string(cls, token_str: str) -> "TokenType": + """ + Initializes a TokenType from a string. The comparison is case-insensitive. + + Args: + token_str (str): The token type as a string. + + Returns: + TokenType: The corresponding TokenType enum member. + + Raises: + ValueError: If the token_str does not correspond to any TokenType. + """ + try: + # Convert the input to uppercase to match the enum values + return cls(token_str.upper()) + except ValueError: + raise ValueError(f"Unknown token type: {token_str}") diff --git a/hemera/common/enumeration/txn_type.py b/hemera/common/enumeration/txn_type.py new file mode 100644 index 000000000..959916434 --- /dev/null +++ b/hemera/common/enumeration/txn_type.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2024/12/26 15:19 +# @Author ideal93 +# @File txn_type.py +# @Brief + +from enum import Enum + + +class InternalTransactionType(Enum): + SELF_CALL = 0 + SENDER = 1 + RECEIVER = 2 + + +class AddressTransactionType(Enum): + SELF_CALL = 0 + + SENDER = 1 + RECEIVER = 2 + + CREATOR = 3 + BEEN_CREATED = 4 + + +class AddressTokenTransferType(Enum): + SELF_CALL = 0 + + SENDER = 1 + RECEIVER = 2 + + DEPOSITOR = 3 + WITHDRAWER = 4 + + +class AddressNftTransferType(Enum): + SELF_CALL = 0 + + SENDER = 1 + RECEIVER = 2 + + BURNER = 3 + MINTER = 4 diff --git a/hemera/common/models/__init__.py b/hemera/common/models/__init__.py index c9a6354ac..2a63c5e8e 100644 --- a/hemera/common/models/__init__.py +++ b/hemera/common/models/__init__.py @@ -1,41 +1,37 @@ from dataclasses import fields from datetime import datetime, timezone -from typing import Any, Dict, Type +from typing import Any, Dict, Set, Type -from flask_sqlalchemy import SQLAlchemy from psycopg2._json import Json from sqlalchemy import NUMERIC as SQL_NUMERIC +from sqlalchemy import DateTime, LargeBinary, MetaData +from sqlalchemy import Numeric from sqlalchemy import Numeric as SQL_Numeric from sqlalchemy.dialects.postgresql import ARRAY, BYTEA, JSON, JSONB, NUMERIC, TIMESTAMP +from sqlmodel import SQLModel from hemera.common.utils.format_utils import hex_str_to_bytes from hemera.common.utils.module_loading import import_string, import_submodules from hemera.indexer.domains import Domain -model_path_exclude = [] +meta = MetaData(schema="public") -# db = RouteSQLAlchemy(session_options={"autoflush": False}) -db = SQLAlchemy(session_options={"autoflush": False}) -from sqlalchemy import BigInteger, Boolean, DateTime, Integer, LargeBinary, Numeric - - -class HemeraMeta(type(db.Model)): - _registry = {} - - def __new__(mcs, name, bases, attrs): - new_cls = super().__new__(mcs, name, bases, attrs) +class HemeraMeta(type(SQLModel)): + _registry: Dict[str, Type["HemeraModel"]] = {} + def __new__(mcs, name, bases, attrs, **kwargs): + new_cls = super().__new__(mcs, name, bases, attrs, **kwargs) if name != "HemeraModel" and issubclass(new_cls, HemeraModel): mcs._registry[name] = new_cls return new_cls @classmethod - def get_all_subclasses(mcs): + def get_all_subclasses(mcs) -> Dict[Type["HemeraModel"], Type["HemeraModel"]]: import_submodules("hemera.common.models") - def get_subclasses(cls): + def get_subclasses(cls) -> Set[Type["HemeraModel"]]: subclasses = set() for subclass in cls.__subclasses__(): subclasses.add(subclass) @@ -46,19 +42,16 @@ def get_subclasses(cls): return {subclass: subclass for subclass in all_subclasses} -class HemeraModel(db.Model, metaclass=HemeraMeta): - __abstract__ = True +class HemeraModel(SQLModel, metaclass=HemeraMeta): - __query_order__ = [] + metadata = meta + model_config = {"arbitrary_types_allowed": True} + __query_order__: list = [] @staticmethod def model_domain_mapping(): pass - @classmethod - def schema(self): - return "public" - @classmethod def get_all_annotation_keys(cls): keys = set() diff --git a/hemera/common/models/address/__init__.py b/hemera/common/models/address/__init__.py new file mode 100644 index 000000000..abe6947b8 --- /dev/null +++ b/hemera/common/models/address/__init__.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2025/2/11 16:07 +# @Author ideal93 +# @File __init__.py.py +# @Brief diff --git a/hemera/common/models/address/address_contract_operation.py b/hemera/common/models/address/address_contract_operation.py new file mode 100644 index 000000000..0405af55a --- /dev/null +++ b/hemera/common/models/address/address_contract_operation.py @@ -0,0 +1,55 @@ +from datetime import datetime +from decimal import Decimal +from typing import Optional + +from sqlalchemy import Index, desc +from sqlalchemy.sql import text +from sqlmodel import Field + +from hemera.common.models import HemeraModel, general_converter +from hemera_udf.address_index.domains import AddressContractOperation + + +class AddressContractOperations(HemeraModel, table=True): + __tablename__ = "address_contract_operations" + + # Metadata fields + address: bytes = Field(primary_key=True) + trace_id: str = Field(primary_key=True) + block_number: int = Field(primary_key=True) + transaction_index: int = Field(primary_key=True) + block_timestamp: datetime = Field(primary_key=True) + + trace_from_address: Optional[bytes] = Field(default=None) + contract_address: Optional[bytes] = Field(default=None) + transaction_hash: Optional[bytes] = Field(default=None) + block_hash: Optional[bytes] = Field(default=None) + error: Optional[str] = Field(default=None) + status: Optional[int] = Field(default=None) + gas: Optional[Decimal] = Field(default=None) + gas_used: Optional[Decimal] = Field(default=None) + trace_type: Optional[str] = Field(default=None) + call_type: Optional[str] = Field(default=None) + transaction_receipt_status: Optional[int] = Field(default=None) + + # Metadata fields + create_time: datetime = Field(default_factory=datetime.utcnow) + update_time: datetime = Field(default_factory=datetime.utcnow) + + @staticmethod + def model_domain_mapping(): + return [ + { + "domain": AddressContractOperation, + "conflict_do_update": True, + "update_strategy": None, + "converter": general_converter, + } + ] + + __table_args__ = ( + Index( + "address_contract_operations_address_block_tn_t_idx", + text("address, block_timestamp DESC, block_number DESC, transaction_index DESC"), + ), + ) diff --git a/hemera/common/models/address/address_internal_transaciton.py b/hemera/common/models/address/address_internal_transaciton.py new file mode 100644 index 000000000..1e9c65751 --- /dev/null +++ b/hemera/common/models/address/address_internal_transaciton.py @@ -0,0 +1,64 @@ +from datetime import datetime +from decimal import Decimal +from typing import Optional + +from sqlalchemy import Index, desc +from sqlalchemy.sql import text +from sqlmodel import Field + +from hemera.common.models import HemeraModel, general_converter +from hemera_udf.address_index.domains import AddressInternalTransaction + + +class AddressInternalTransactions(HemeraModel, table=True): + __tablename__ = "address_internal_transactions" + + # Primary key fields + address: bytes = Field(primary_key=True) + trace_id: str = Field(primary_key=True) + block_number: int = Field(primary_key=True) + transaction_index: int = Field(primary_key=True) + block_timestamp: datetime = Field(primary_key=True) + trace_type: Optional[str] = Field(default=None) + + # Additional fields + related_address: Optional[bytes] = Field(default=None) + transaction_receipt_status: Optional[int] = Field(default=None) + + # Transaction related fields + transaction_hash: Optional[bytes] = Field(default=None) + block_hash: Optional[bytes] = Field(default=None) + error: Optional[str] = Field(default=None) + status: Optional[int] = Field(default=None) + input_method: Optional[str] = Field(default=None) + + # Numerical fields + value: Optional[Decimal] = Field(default=None, max_digits=100) + gas: Optional[Decimal] = Field(default=None, max_digits=100) + gas_used: Optional[Decimal] = Field(default=None, max_digits=100) + + # Type fields + call_type: Optional[str] = Field(default=None) + txn_type: Optional[int] = Field(default=None) + + # Metadata fields + create_time: Optional[datetime] = Field(default_factory=datetime.utcnow) + update_time: Optional[datetime] = Field(default_factory=datetime.utcnow) + + @staticmethod + def model_domain_mapping(): + return [ + { + "domain": AddressInternalTransaction, + "conflict_do_update": True, + "update_strategy": None, + "converter": general_converter, + } + ] + + __table_args__ = ( + Index( + "address_internal_transactions_address_idx", + text("address, block_timestamp DESC, block_number DESC, transaction_index DESC, trace_id DESC"), + ), + ) diff --git a/hemera/common/models/address/address_nft_1155_holders.py b/hemera/common/models/address/address_nft_1155_holders.py new file mode 100644 index 000000000..c9b0ce8fd --- /dev/null +++ b/hemera/common/models/address/address_nft_1155_holders.py @@ -0,0 +1,40 @@ +from datetime import datetime +from decimal import Decimal +from typing import Optional + +from sqlalchemy import Index +from sqlalchemy.sql import text +from sqlmodel import Field + +from hemera.common.models import HemeraModel, general_converter +from hemera_udf.address_index.domains import AddressNft1155Holder + + +class AddressNftTokenHolders(HemeraModel, table=True): + __tablename__ = "address_nft_1155_holders" + + address: bytes = Field(primary_key=True) + token_address: bytes = Field(primary_key=True) + token_id: Decimal = Field(primary_key=True) + balance_of: Optional[Decimal] = Field(default=None) + + create_time: datetime = Field(default_factory=datetime.utcnow) + update_time: datetime = Field(default_factory=datetime.utcnow) + + @staticmethod + def model_domain_mapping(): + return [ + { + "domain": AddressNft1155Holder, + "conflict_do_update": True, + "update_strategy": None, + "converter": general_converter, + } + ] + + __table_args__ = ( + Index( + "address_nft_1155_holders_token_address_balance_of_idx", + text("token_address, token_id, balance_of DESC"), + ), + ) diff --git a/hemera/common/models/address/address_nft_transfers.py b/hemera/common/models/address/address_nft_transfers.py new file mode 100644 index 000000000..981d0ee19 --- /dev/null +++ b/hemera/common/models/address/address_nft_transfers.py @@ -0,0 +1,53 @@ +from datetime import datetime +from decimal import Decimal +from typing import Optional + +from sqlalchemy import text +from sqlmodel import Field, Index, SQLModel + +from hemera.common.models import HemeraModel, general_converter +from hemera.common.models.token.token_transfers import BaseTokenTransfer +from hemera_udf.address_index.domains import AddressNftTransfer + + +class AddressNftTransfers(BaseTokenTransfer, HemeraModel, table=True): + """Model for indexing NFT transfers by address""" + + __tablename__ = "address_nft_transfers" + + address: bytes = Field(primary_key=True) + block_number: int = Field(primary_key=True) + log_index: int = Field(primary_key=True) + transaction_hash: bytes = Field(primary_key=True) + block_timestamp: datetime = Field(primary_key=True) + token_id: Decimal = Field(primary_key=True) + + block_hash: bytes = Field(default=None) + + token_address: Optional[bytes] = Field(default=None) + related_address: Optional[bytes] = Field(default=None) + transfer_type: Optional[int] = Field(default=None) + value: Optional[Decimal] = Field(default=None) + + create_time: datetime = Field(default_factory=datetime.utcnow) + update_time: datetime = Field(default_factory=datetime.utcnow) + + __table_args__ = ( + Index( + "idx_address_nft_transfers_token_time", + text("address, block_timestamp DESC, block_number DESC, log_index DESC"), + ), + ) + + __query_order__ = ["block_number", "log_index"] + + @staticmethod + def model_domain_mapping(): + return [ + { + "domain": AddressNftTransfer, + "conflict_do_update": True, + "update_strategy": None, + "converter": general_converter, + } + ] diff --git a/hemera/common/models/address/address_token_holders.py b/hemera/common/models/address/address_token_holders.py new file mode 100644 index 000000000..e1af33389 --- /dev/null +++ b/hemera/common/models/address/address_token_holders.py @@ -0,0 +1,37 @@ +from datetime import datetime +from decimal import Decimal + +from sqlalchemy import Index, desc +from sqlalchemy.sql import text +from sqlmodel import Field + +from hemera.common.models import HemeraModel, general_converter +from hemera_udf.address_index.domains import AddressTokenHolder + + +class AddressTokenHolders(HemeraModel, table=True): + __tablename__ = "address_token_holders" + + address: bytes = Field(primary_key=True) + token_address: bytes = Field(primary_key=True) + balance_of: Decimal = Field() + create_time: datetime = Field(default_factory=datetime.utcnow) + update_time: datetime = Field(default_factory=datetime.utcnow) + + @staticmethod + def model_domain_mapping(): + return [ + { + "domain": AddressTokenHolder, + "conflict_do_update": True, + "update_strategy": None, + "converter": general_converter, + } + ] + + __table_args__ = ( + Index( + "address_token_holders_token_address_balance_of_idx", + text("token_address, balance_of DESC"), + ), + ) diff --git a/hemera/common/models/address/address_token_transfers.py b/hemera/common/models/address/address_token_transfers.py new file mode 100644 index 000000000..a999e4cde --- /dev/null +++ b/hemera/common/models/address/address_token_transfers.py @@ -0,0 +1,52 @@ +from datetime import datetime +from decimal import Decimal +from typing import Optional + +from sqlalchemy import Index +from sqlalchemy.sql import text +from sqlmodel import Field + +from hemera.common.models import HemeraModel, general_converter +from hemera_udf.address_index.domains import AddressTokenTransfer + + +class AddressTokenTransfers(HemeraModel, table=True): + """Model for indexing token transfers by address""" + + __tablename__ = "address_token_transfers" + + # Primary keys + address: bytes = Field(primary_key=True) + block_number: int = Field(primary_key=True) + log_index: int = Field(primary_key=True) + transaction_hash: bytes = Field(primary_key=True) + block_timestamp: datetime = Field(primary_key=True) + block_hash: bytes = Field(primary_key=True) + + # Transfer data + token_address: Optional[bytes] = Field(default=None) + related_address: Optional[bytes] = Field(default=None) + transfer_type: Optional[int] = Field(default=None) + value: Optional[Decimal] = Field(default=None) + + # Metadata + create_time: datetime = Field(default_factory=datetime.utcnow) + update_time: datetime = Field(default_factory=datetime.utcnow) + + @staticmethod + def model_domain_mapping(): + return [ + { + "domain": AddressTokenTransfer, + "conflict_do_update": True, + "update_strategy": None, + "converter": general_converter, + } + ] + + __table_args__ = ( + Index( + "address_token_transfers_wallet_address_token_address__idx", + text("address, block_number DESC, block_timestamp DESC"), + ), + ) diff --git a/hemera/common/models/address/address_transactions.py b/hemera/common/models/address/address_transactions.py new file mode 100644 index 000000000..49a961a0b --- /dev/null +++ b/hemera/common/models/address/address_transactions.py @@ -0,0 +1,58 @@ +from datetime import datetime +from decimal import Decimal +from typing import Optional + +from sqlalchemy import Column, text +from sqlalchemy.dialects.postgresql import SMALLINT +from sqlmodel import Field, Index + +from hemera.common.models import HemeraModel, general_converter +from hemera_udf.address_index import AddressTransaction + + +class AddressTransactions(HemeraModel, table=True): + __tablename__ = "address_transactions" + + # Primary key fields + address: bytes = Field(primary_key=True) + block_number: int = Field(primary_key=True) + transaction_index: int = Field(primary_key=True) + block_timestamp: datetime = Field(primary_key=True) + + # Transaction related fields + transaction_hash: Optional[bytes] = Field(default=None) + block_hash: Optional[bytes] = Field(default=None) + txn_type: Optional[int] = Field(default=None, sa_column=Column(SMALLINT)) + related_address: Optional[bytes] = Field(default=None) + value: Optional[Decimal] = Field(default=None, max_digits=100) + transaction_fee: Optional[Decimal] = Field(default=None, max_digits=100) + receipt_status: Optional[int] = Field(default=None) + method: Optional[str] = Field(default=None) + + # Metadata fields + create_time: datetime = Field(default_factory=datetime.utcnow) + update_time: datetime = Field(default_factory=datetime.utcnow) + + __query_order__ = [block_number, transaction_index] + + @staticmethod + def model_domain_mapping(): + return [ + { + "domain": AddressTransaction, + "conflict_do_update": True, + "update_strategy": None, + "converter": general_converter, + } + ] + + __table_args__ = ( + Index( + "address_transactions_address_block_timestamp_block_number_t_idx", + text("address, block_timestamp DESC, block_number DESC, transaction_index DESC"), + ), + Index( + "address_transactions_address_txn_type_block_timestamp_block_idx", + text("address, txn_type, block_timestamp DESC, block_number DESC, transaction_index DESC"), + ), + ) diff --git a/hemera/common/models/address/stats/__init__.py b/hemera/common/models/address/stats/__init__.py new file mode 100644 index 000000000..abe6947b8 --- /dev/null +++ b/hemera/common/models/address/stats/__init__.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2025/2/11 16:07 +# @Author ideal93 +# @File __init__.py.py +# @Brief diff --git a/hemera/common/models/address/stats/address_index_daily_stats.py b/hemera/common/models/address/stats/address_index_daily_stats.py new file mode 100644 index 000000000..5e24da68d --- /dev/null +++ b/hemera/common/models/address/stats/address_index_daily_stats.py @@ -0,0 +1,62 @@ +from datetime import date +from decimal import Decimal +from typing import Optional + +from sqlmodel import Field, SQLModel + +from hemera.common.models import HemeraModel + + +class AddressIndexDailyStats(HemeraModel, table=True): + __tablename__ = "af_index_daily_stats" + + address: bytes = Field(primary_key=True) + block_date: date = Field(primary_key=True) + + transaction_in_count: Optional[int] = Field(default=None) + transaction_out_count: Optional[int] = Field(default=None) + transaction_self_count: Optional[int] = Field(default=None) + + transaction_in_value: Optional[Decimal] = Field(default=None) + transaction_out_value: Optional[Decimal] = Field(default=None) + transaction_self_value: Optional[Decimal] = Field(default=None) + + transaction_in_fee: Optional[Decimal] = Field(default=None) + transaction_out_fee: Optional[Decimal] = Field(default=None) + transaction_self_fee: Optional[Decimal] = Field(default=None) + + internal_transaction_in_count: Optional[int] = Field(default=None) + internal_transaction_out_count: Optional[int] = Field(default=None) + internal_transaction_self_count: Optional[int] = Field(default=None) + + internal_transaction_in_value: Optional[Decimal] = Field(default=None) + internal_transaction_out_value: Optional[Decimal] = Field(default=None) + internal_transaction_self_value: Optional[Decimal] = Field(default=None) + + erc20_transfer_in_count: Optional[int] = Field(default=None) + erc20_transfer_out_count: Optional[int] = Field(default=None) + erc20_transfer_self_count: Optional[int] = Field(default=None) + + nft_transfer_in_count: Optional[int] = Field(default=None) + nft_transfer_out_count: Optional[int] = Field(default=None) + nft_transfer_self_count: Optional[int] = Field(default=None) + + nft_721_transfer_in_count: Optional[int] = Field(default=None) + nft_721_transfer_out_count: Optional[int] = Field(default=None) + nft_721_transfer_self_count: Optional[int] = Field(default=None) + + nft_1155_transfer_in_count: Optional[int] = Field(default=None) + nft_1155_transfer_out_count: Optional[int] = Field(default=None) + nft_1155_transfer_self_count: Optional[int] = Field(default=None) + + contract_creation_count: Optional[int] = Field(default=None) + contract_destruction_count: Optional[int] = Field(default=None) + contract_operation_count: Optional[int] = Field(default=None) + + transaction_count: Optional[int] = Field(default=None) + internal_transaction_count: Optional[int] = Field(default=None) + erc20_transfer_count: Optional[int] = Field(default=None) + + nft_transfer_count: Optional[int] = Field(default=None) + nft_721_transfer_count: Optional[int] = Field(default=None) + nft_1155_transfer_count: Optional[int] = Field(default=None) diff --git a/hemera/common/models/address/stats/address_index_stats.py b/hemera/common/models/address/stats/address_index_stats.py new file mode 100644 index 000000000..362c57a2f --- /dev/null +++ b/hemera/common/models/address/stats/address_index_stats.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2025/2/18 18:07 +# @Author ideal93 +# @File address_index_stats.py +# @Brief +from decimal import Decimal +from typing import Optional + +from sqlmodel import Field + +from hemera.common.models import HemeraModel + + +class AddressIndexStats(HemeraModel, table=True): + __tablename__ = "af_index_stats" + + address: bytes = Field(primary_key=True) + + transaction_in_count: Optional[int] = Field(default=None) + transaction_out_count: Optional[int] = Field(default=None) + transaction_self_count: Optional[int] = Field(default=None) + + transaction_in_value: Optional[Decimal] = Field(default=None) + transaction_out_value: Optional[Decimal] = Field(default=None) + transaction_self_value: Optional[Decimal] = Field(default=None) + + transaction_in_fee: Optional[Decimal] = Field(default=None) + transaction_out_fee: Optional[Decimal] = Field(default=None) + transaction_self_fee: Optional[Decimal] = Field(default=None) + + internal_transaction_in_count: Optional[int] = Field(default=None) + internal_transaction_out_count: Optional[int] = Field(default=None) + internal_transaction_self_count: Optional[int] = Field(default=None) + + internal_transaction_in_value: Optional[Decimal] = Field(default=None) + internal_transaction_out_value: Optional[Decimal] = Field(default=None) + internal_transaction_self_value: Optional[Decimal] = Field(default=None) + + erc20_transfer_in_count: Optional[int] = Field(default=None) + erc20_transfer_out_count: Optional[int] = Field(default=None) + erc20_transfer_self_count: Optional[int] = Field(default=None) + + nft_transfer_in_count: Optional[int] = Field(default=None) + nft_transfer_out_count: Optional[int] = Field(default=None) + nft_transfer_self_count: Optional[int] = Field(default=None) + + nft_721_transfer_in_count: Optional[int] = Field(default=None) + nft_721_transfer_out_count: Optional[int] = Field(default=None) + nft_721_transfer_self_count: Optional[int] = Field(default=None) + + nft_1155_transfer_in_count: Optional[int] = Field(default=None) + nft_1155_transfer_out_count: Optional[int] = Field(default=None) + nft_1155_transfer_self_count: Optional[int] = Field(default=None) + + contract_creation_count: Optional[int] = Field(default=None) + contract_destruction_count: Optional[int] = Field(default=None) + contract_operation_count: Optional[int] = Field(default=None) + + transaction_count: Optional[int] = Field(default=None) + internal_transaction_count: Optional[int] = Field(default=None) + erc20_transfer_count: Optional[int] = Field(default=None) + + nft_transfer_count: Optional[int] = Field(default=None) + nft_721_transfer_count: Optional[int] = Field(default=None) + nft_1155_transfer_count: Optional[int] = Field(default=None) diff --git a/hemera/common/models/address/stats/token_address_index.py b/hemera/common/models/address/stats/token_address_index.py new file mode 100644 index 000000000..6ffa9d979 --- /dev/null +++ b/hemera/common/models/address/stats/token_address_index.py @@ -0,0 +1,15 @@ +from datetime import datetime + +from sqlmodel import Field + +from hemera.common.models import HemeraModel + + +class TokenAddressIndexStats(HemeraModel, table=True): + __tablename__ = "af_index_token_address_stats" + + address: bytes = Field(primary_key=True) + token_holder_count: int = Field() + token_transfer_count: int = Field() + + update_time: datetime = Field(default_factory=datetime.utcnow) diff --git a/hemera/common/models/address/stats/token_address_index_daily_stats.py b/hemera/common/models/address/stats/token_address_index_daily_stats.py new file mode 100644 index 000000000..453a093ec --- /dev/null +++ b/hemera/common/models/address/stats/token_address_index_daily_stats.py @@ -0,0 +1,15 @@ +from datetime import datetime + +from sqlmodel import Field + +from hemera.common.models import HemeraModel + + +class TokenAddressIndexStats(HemeraModel, table=True): + __tablename__ = "af_index_token_address_daily_stats" + + address: bytes = Field(primary_key=True) + token_holder_count: int = Field() + token_transfer_count: int = Field() + + update_time: datetime = Field(default_factory=datetime.utcnow) diff --git a/hemera/common/models/address/token_address_nft_inventories.py b/hemera/common/models/address/token_address_nft_inventories.py new file mode 100644 index 000000000..561ca43e2 --- /dev/null +++ b/hemera/common/models/address/token_address_nft_inventories.py @@ -0,0 +1,37 @@ +from datetime import datetime +from decimal import Decimal + +from sqlalchemy import Index +from sqlalchemy.sql import text +from sqlmodel import Field + +from hemera.common.models import HemeraModel, general_converter +from hemera_udf.address_index.domains import TokenAddressNftInventory + + +class TokenAddressNftInventories(HemeraModel, table=True): + __tablename__ = "token_address_nft_inventories" + + token_address: bytes = Field(primary_key=True) + token_id: Decimal = Field(primary_key=True) + wallet_address: bytes = Field() + create_time: datetime = Field(default_factory=datetime.utcnow) + update_time: datetime = Field(default_factory=datetime.utcnow) + + __table_args__ = ( + Index( + "token_address_nft_inventories_wallet_address_token_address__idx", + text("wallet_address, token_address, token_id"), + ), + ) + + @staticmethod + def model_domain_mapping(): + return [ + { + "domain": TokenAddressNftInventory, + "conflict_do_update": True, + "update_strategy": None, + "converter": general_converter, + } + ] diff --git a/hemera/common/models/base/__init__.py b/hemera/common/models/base/__init__.py new file mode 100644 index 000000000..67bd8d27d --- /dev/null +++ b/hemera/common/models/base/__init__.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2025/3/4 11:49 +# @Author ideal93 +# @File __init__.py.py +# @Brief diff --git a/hemera/common/models/block_timestamp_mapper.py b/hemera/common/models/base/block_timestamp_mapper.py similarity index 52% rename from hemera/common/models/block_timestamp_mapper.py rename to hemera/common/models/base/block_timestamp_mapper.py index 1974eaf58..6bc21f39e 100644 --- a/hemera/common/models/block_timestamp_mapper.py +++ b/hemera/common/models/base/block_timestamp_mapper.py @@ -1,15 +1,19 @@ -from sqlalchemy import Column, Index, desc -from sqlalchemy.dialects.postgresql import BIGINT, TIMESTAMP +from datetime import datetime +from typing import Optional + +from sqlalchemy import Index, text +from sqlmodel import Field from hemera.common.models import HemeraModel, general_converter from hemera.indexer.domains.block_ts_mapper import BlockTsMapper -class BlockTimestampMapper(HemeraModel): +class BlockTimestampMapper(HemeraModel, table=True): __tablename__ = "block_ts_mapper" - ts = Column(BIGINT, primary_key=True) - block_number = Column(BIGINT) - timestamp = Column(TIMESTAMP) + + ts: int = Field(primary_key=True) + block_number: Optional[int] = Field(default=None) + timestamp: Optional[datetime] = Field(default=None) @staticmethod def model_domain_mapping(): @@ -22,5 +26,4 @@ def model_domain_mapping(): } ] - -Index("block_ts_mapper_idx", desc(BlockTimestampMapper.block_number)) + __table_args__ = (Index("block_ts_mapper_block_number_idx", text("block_number DESC")),) diff --git a/hemera/common/models/base/blocks.py b/hemera/common/models/base/blocks.py new file mode 100644 index 000000000..cc8f803f7 --- /dev/null +++ b/hemera/common/models/base/blocks.py @@ -0,0 +1,87 @@ +from datetime import datetime +from decimal import Decimal +from typing import Optional, Type, Union + +from sqlalchemy.sql import text +from sqlmodel import Field, Index + +from hemera.common.models import HemeraModel, general_converter +from hemera.indexer.domains.block import Block, UpdateBlockInternalCount + + +class Blocks(HemeraModel, table=True): + __tablename__ = "blocks" + + # Primary key and basic fields + hash: bytes = Field(primary_key=True) + number: Optional[int] = Field(default=None) + timestamp: Optional[datetime] = Field(default=None) + parent_hash: Optional[bytes] = Field(default=None) + nonce: Optional[bytes] = Field(default=None) + + # Gas related fields + gas_limit: Optional[Decimal] = Field(default=None, max_digits=100) + gas_used: Optional[Decimal] = Field(default=None, max_digits=100) + base_fee_per_gas: Optional[Decimal] = Field(default=None, max_digits=100) + blob_gas_used: Optional[Decimal] = Field(default=None, max_digits=100) + excess_blob_gas: Optional[Decimal] = Field(default=None, max_digits=100) + + # Blockchain specific fields + difficulty: Optional[Decimal] = Field(default=None, max_digits=38) + total_difficulty: Optional[Decimal] = Field(default=None, max_digits=38) + size: Optional[int] = Field(default=None) + miner: Optional[bytes] = Field(default=None) + sha3_uncles: Optional[bytes] = Field(default=None) + transactions_root: Optional[bytes] = Field(default=None) + transactions_count: Optional[int] = Field(default=None) + traces_count: Optional[int] = Field(default=0) + internal_transactions_count: Optional[int] = Field(default=0) + + # Root fields + state_root: Optional[bytes] = Field(default=None) + receipts_root: Optional[bytes] = Field(default=None) + withdrawals_root: Optional[bytes] = Field(default=None) + extra_data: Optional[bytes] = Field(default=None) + + # Metadata fields + create_time: Optional[datetime] = Field(default_factory=datetime.utcnow) + update_time: Optional[datetime] = Field(default_factory=datetime.utcnow) + reorg: Optional[bool] = Field(default=False) + + __query_order__ = [number] + + @staticmethod + def model_domain_mapping(): + return [ + { + "domain": Block, + "conflict_do_update": True, + "update_strategy": None, + "converter": converter, + }, + { + "domain": UpdateBlockInternalCount, + "conflict_do_update": True, + "update_strategy": None, + "converter": converter, + }, + ] + + __table_args__ = ( + Index("blocks_timestamp_index", text("timestamp DESC")), + Index("blocks_number_index", text("number DESC")), + Index("blocks_number_unique_when_not_reorg", "number", unique=True, postgresql_where="reorg = false"), + Index("blocks_hash_unique_when_not_reorg", "hash", unique=True, postgresql_where="reorg = false"), + ) + + +def converter( + table: Type[HemeraModel], + data: Union[Block, UpdateBlockInternalCount], + is_update=False, +): + converted_data = general_converter(table, data, is_update) + if isinstance(data, Block): + converted_data["transactions_count"] = len(data.transactions) if data.transactions else 0 + + return converted_data diff --git a/hemera/common/models/base/logs.py b/hemera/common/models/base/logs.py new file mode 100644 index 000000000..9e480236c --- /dev/null +++ b/hemera/common/models/base/logs.py @@ -0,0 +1,63 @@ +from datetime import datetime +from typing import Optional + +from sqlalchemy import Column, desc, func, text +from sqlalchemy.dialects.postgresql import BOOLEAN, BYTEA, TIMESTAMP +from sqlmodel import Field, Index + +from hemera.common.models import HemeraModel, general_converter +from hemera.indexer.domains.log import Log + + +class Logs(HemeraModel, table=True): + __tablename__ = "logs" + + # Primary keys + transaction_hash: bytes = Field(primary_key=True) + log_index: int = Field(primary_key=True) + block_hash: bytes = Field(primary_key=True) + block_number: Optional[int] = Field(default=True) + block_timestamp: Optional[datetime] = Field(primary_key=True) + + # Log data + address: Optional[bytes] = Field(default=None) + data: Optional[bytes] = Field(default=None) + topic0: Optional[bytes] = Field(default=None) + topic1: Optional[bytes] = Field(default=None) + topic2: Optional[bytes] = Field(default=None) + topic3: Optional[bytes] = Field(default=None) + + # Block info + transaction_index: Optional[int] = Field(default=None) + + # Metadata + create_time: datetime = Field( + default_factory=datetime.utcnow, sa_column=Column(TIMESTAMP, server_default=func.now()) + ) + update_time: datetime = Field( + default_factory=datetime.utcnow, sa_column=Column(TIMESTAMP, server_default=func.now()) + ) + reorg: bool = Field(default=False, sa_column=Column(BOOLEAN, server_default=text("false"))) + + __query_order__ = [block_number, log_index] + + @staticmethod + def model_domain_mapping(): + return [ + { + "domain": Log, + "conflict_do_update": False, + "update_strategy": None, + "converter": general_converter, + } + ] + + __table_args__ = ( + # Address with block number index + Index("logs_address_block_number_log_index_index", text("address, block_number DESC, log_index DESC")), + # Address with topic index + Index( + "logs_address_topic_0_number_log_index_index", + text("address, topic0, block_number DESC, log_index DESC"), + ), + ) diff --git a/hemera/common/models/base/transactions.py b/hemera/common/models/base/transactions.py new file mode 100644 index 000000000..fd783d38b --- /dev/null +++ b/hemera/common/models/base/transactions.py @@ -0,0 +1,108 @@ +import os +from datetime import datetime +from decimal import Decimal +from typing import List, Optional, Type + +from sqlalchemy import Column, Computed, text +from sqlalchemy.dialects.postgresql import ARRAY, BYTEA, VARCHAR +from sqlmodel import Field, Index + +from hemera.common.models import HemeraModel, general_converter +from hemera.common.utils.format_utils import hex_str_to_bytes +from hemera.indexer.domains.transaction import Transaction + + +class Transactions(HemeraModel, table=True): + __tablename__ = "transactions" + + # Primary key and transaction basic fields + hash: bytes = Field(primary_key=True) + block_timestamp: datetime = Field(primary_key=True) + + transaction_index: Optional[int] = Field(default=None) + from_address: Optional[bytes] = Field(default=None) + to_address: Optional[bytes] = Field(default=None) + value: Optional[Decimal] = Field(default=None, max_digits=100) + transaction_type: Optional[int] = Field(default=None) + input: Optional[bytes] = Field(default=None, sa_column=Column(BYTEA)) + method_id: Optional[str] = Field( + default=None, sa_column=Column(VARCHAR, Computed("substring((input)::varchar for 8)::bigint::varchar")) + ) + nonce: Optional[int] = Field(default=None) + + # Block related fields + block_hash: Optional[bytes] = Field(default=None) + block_number: Optional[int] = Field(default=None) + + # Gas related fields + gas: Optional[Decimal] = Field(default=None, max_digits=100) + gas_price: Optional[Decimal] = Field(default=None, max_digits=100) + max_fee_per_gas: Optional[Decimal] = Field(default=None, max_digits=100) + max_priority_fee_per_gas: Optional[Decimal] = Field(default=None, max_digits=100) + + # Receipt fields + receipt_root: Optional[bytes] = Field(default=None) + receipt_status: Optional[int] = Field(default=None) + receipt_gas_used: Optional[Decimal] = Field(default=None, max_digits=100) + receipt_cumulative_gas_used: Optional[Decimal] = Field(default=None, max_digits=100) + receipt_effective_gas_price: Optional[Decimal] = Field(default=None, max_digits=100) + receipt_l1_fee: Optional[Decimal] = Field(default=None, max_digits=100) + receipt_l1_fee_scalar: Optional[Decimal] = Field(default=None, max_digits=100, decimal_places=18) + receipt_l1_gas_used: Optional[Decimal] = Field(default=None, max_digits=100) + receipt_l1_gas_price: Optional[Decimal] = Field(default=None, max_digits=100) + receipt_blob_gas_used: Optional[Decimal] = Field(default=None, max_digits=100) + receipt_blob_gas_price: Optional[Decimal] = Field(default=None, max_digits=100) + + # Blob fields + blob_versioned_hashes: Optional[List[bytes]] = Field(default=None, sa_column=Column(ARRAY(BYTEA))) + receipt_contract_address: Optional[bytes] = Field(default=None) + + # Error fields + exist_error: Optional[bool] = Field(default=None) + error: Optional[str] = Field(default=None) + revert_reason: Optional[str] = Field(default=None) + + # Metadata fields + create_time: Optional[datetime] = Field(default_factory=datetime.utcnow) + update_time: Optional[datetime] = Field(default_factory=datetime.utcnow) + reorg: Optional[bool] = Field(default=False) + + __query_order__ = [block_number, transaction_index] + + @staticmethod + def model_domain_mapping(): + return [ + { + "domain": Transaction, + "conflict_do_update": False, + "update_strategy": None, + "converter": converter, + } + ] + + __table_args__ = ( + Index("transactions_block_timestamp_index", text("block_timestamp DESC")), + Index("transactions_block_number_transaction_index", text("block_number DESC,transaction_index DESC")), + ) + + +def converter(table: Type[HemeraModel], data: Transaction, is_update=False): + converted_data = general_converter(table, data, is_update) + receipt = data.receipt + + converted_data["receipt_root"] = hex_str_to_bytes(receipt.root) if receipt and receipt.root else None + converted_data["receipt_status"] = receipt.status if receipt else None + converted_data["receipt_gas_used"] = receipt.gas_used if receipt else None + converted_data["receipt_cumulative_gas_used"] = receipt.cumulative_gas_used if receipt else None + converted_data["receipt_effective_gas_price"] = receipt.effective_gas_price if receipt else None + converted_data["receipt_l1_fee"] = receipt.l1_fee if receipt else None + converted_data["receipt_l1_fee_scalar"] = receipt.l1_fee_scalar if receipt else None + converted_data["receipt_l1_gas_used"] = receipt.l1_gas_used if receipt else None + converted_data["receipt_l1_gas_price"] = receipt.l1_gas_price if receipt else None + converted_data["receipt_blob_gas_used"] = receipt.blob_gas_used if receipt else None + converted_data["receipt_blob_gas_price"] = receipt.blob_gas_price if receipt else None + converted_data["receipt_contract_address"] = ( + hex_str_to_bytes(receipt.contract_address) if receipt and receipt.contract_address else None + ) + + return converted_data diff --git a/hemera/common/models/blocks.py b/hemera/common/models/blocks.py deleted file mode 100644 index 1fa753233..000000000 --- a/hemera/common/models/blocks.py +++ /dev/null @@ -1,89 +0,0 @@ -from typing import Type, Union - -from sqlalchemy import Column, Index, desc, func, text -from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, NUMERIC, TIMESTAMP - -from hemera.common.models import HemeraModel, general_converter -from hemera.indexer.domains.block import Block, UpdateBlockInternalCount - - -class Blocks(HemeraModel): - __tablename__ = "blocks" - hash = Column(BYTEA, primary_key=True) - number = Column(BIGINT) - timestamp = Column(TIMESTAMP) - parent_hash = Column(BYTEA) - nonce = Column(BYTEA) - - gas_limit = Column(NUMERIC(100)) - gas_used = Column(NUMERIC(100)) - base_fee_per_gas = Column(NUMERIC(100)) - blob_gas_used = Column(NUMERIC(100)) - excess_blob_gas = Column(NUMERIC(100)) - - # for pow,pos - difficulty = Column(NUMERIC(38)) - total_difficulty = Column(NUMERIC(38)) - size = Column(BIGINT) - miner = Column(BYTEA) - sha3_uncles = Column(BYTEA) - transactions_root = Column(BYTEA) - transactions_count = Column(BIGINT) - traces_count = Column(BIGINT, default=0) - internal_transactions_count = Column(BIGINT, default=0) - - state_root = Column(BYTEA) - receipts_root = Column(BYTEA) - withdrawals_root = Column(BYTEA) - extra_data = Column(BYTEA) - - create_time = Column(TIMESTAMP, server_default=func.now()) - update_time = Column(TIMESTAMP, server_default=func.now()) - reorg = Column(BOOLEAN, server_default=text("false")) - - __query_order__ = [number] - - @staticmethod - def model_domain_mapping(): - return [ - { - "domain": Block, - "conflict_do_update": True, - "update_strategy": None, - "converter": converter, - }, - { - "domain": UpdateBlockInternalCount, - "conflict_do_update": True, - "update_strategy": None, - "converter": converter, - }, - ] - - -Index("blocks_timestamp_index", desc(Blocks.timestamp)) -Index("blocks_number_index", desc(Blocks.number)) -Index( - "blocks_number_unique_when_not_reorg", - Blocks.number, - unique=True, - postgresql_where=(Blocks.reorg == False), -) -Index( - "blocks_hash_unique_when_not_reorg", - Blocks.hash, - unique=True, - postgresql_where=(Blocks.reorg == False), -) - - -def converter( - table: Type[HemeraModel], - data: Union[Block, UpdateBlockInternalCount], - is_update=False, -): - converted_data = general_converter(table, data, is_update) - if isinstance(data, Block): - converted_data["transactions_count"] = len(data.transactions) if data.transactions else 0 - - return converted_data diff --git a/hemera/common/models/coin_balances.py b/hemera/common/models/coin_balances.py deleted file mode 100644 index 80fba580a..000000000 --- a/hemera/common/models/coin_balances.py +++ /dev/null @@ -1,38 +0,0 @@ -from sqlalchemy import Column, Index, PrimaryKeyConstraint, desc, func, text -from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, NUMERIC, TIMESTAMP - -from hemera.common.models import HemeraModel, general_converter -from hemera.indexer.domains.coin_balance import CoinBalance - - -class CoinBalances(HemeraModel): - __tablename__ = "address_coin_balances" - - address = Column(BYTEA, primary_key=True) - balance = Column(NUMERIC(100)) - block_number = Column(BIGINT, primary_key=True) - block_timestamp = Column(TIMESTAMP) - - create_time = Column(TIMESTAMP, server_default=func.now()) - update_time = Column(TIMESTAMP, server_default=func.now()) - reorg = Column(BOOLEAN, server_default=text("false")) - - __table_args__ = (PrimaryKeyConstraint("address", "block_number"),) - - @staticmethod - def model_domain_mapping(): - return [ - { - "domain": CoinBalance, - "conflict_do_update": False, - "update_strategy": None, - "converter": general_converter, - } - ] - - -Index( - "coin_balance_address_number_desc_index", - desc(CoinBalances.address), - desc(CoinBalances.block_number), -) diff --git a/hemera/common/models/contract_internal_transactions.py b/hemera/common/models/contract_internal_transactions.py deleted file mode 100644 index 8199c38ed..000000000 --- a/hemera/common/models/contract_internal_transactions.py +++ /dev/null @@ -1,70 +0,0 @@ -from sqlalchemy import Column, Index, desc, func, text -from sqlalchemy.dialects.postgresql import ARRAY, BIGINT, BOOLEAN, BYTEA, INTEGER, NUMERIC, TEXT, TIMESTAMP, VARCHAR - -from hemera.common.models import HemeraModel, general_converter -from hemera.indexer.domains.contract_internal_transaction import ContractInternalTransaction - - -class ContractInternalTransactions(HemeraModel): - __tablename__ = "contract_internal_transactions" - - trace_id = Column(VARCHAR, primary_key=True) - from_address = Column(BYTEA) - to_address = Column(BYTEA) - value = Column(NUMERIC(100)) - trace_type = Column(VARCHAR) - call_type = Column(VARCHAR) - gas = Column(NUMERIC(100)) - gas_used = Column(NUMERIC(100)) - trace_address = Column(ARRAY(INTEGER)) - error = Column(TEXT) - status = Column(INTEGER) - block_number = Column(BIGINT) - block_hash = Column(BYTEA) - block_timestamp = Column(TIMESTAMP) - transaction_index = Column(INTEGER) - transaction_hash = Column(BYTEA) - - create_time = Column(TIMESTAMP, server_default=func.now()) - update_time = Column(TIMESTAMP, server_default=func.now()) - reorg = Column(BOOLEAN, server_default=text("false")) - - __query_order__ = [block_number, transaction_index] - - @staticmethod - def model_domain_mapping(): - return [ - { - "domain": ContractInternalTransaction, - "conflict_do_update": False, - "update_strategy": None, - "converter": general_converter, - } - ] - - -Index( - "contract_internal_transactions_transaction_hash_idx", - ContractInternalTransactions.transaction_hash, -) -Index( - "internal_transactions_block_number_index", - desc(ContractInternalTransactions.block_number), -) -Index( - "internal_transactions_number_transaction_index", - desc(ContractInternalTransactions.block_number), - desc(ContractInternalTransactions.transaction_index), -) -Index( - "internal_transactions_from_address_number_transaction_index", - ContractInternalTransactions.from_address, - desc(ContractInternalTransactions.block_number), - desc(ContractInternalTransactions.transaction_index), -) -Index( - "internal_transactions_to_address_number_transaction_index", - ContractInternalTransactions.to_address, - desc(ContractInternalTransactions.block_number), - desc(ContractInternalTransactions.transaction_index), -) diff --git a/hemera/common/models/contracts.py b/hemera/common/models/contracts.py deleted file mode 100644 index 622119756..000000000 --- a/hemera/common/models/contracts.py +++ /dev/null @@ -1,58 +0,0 @@ -from sqlalchemy import Column, Computed, func, text -from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, INTEGER, JSONB, TIMESTAMP, VARCHAR - -from hemera.common.models import HemeraModel, general_converter -from hemera.indexer.domains.contract import Contract, ContractFromTransaction - - -class Contracts(HemeraModel): - __tablename__ = "contracts" - - address = Column(BYTEA, primary_key=True) - name = Column(VARCHAR) - contract_creator = Column(BYTEA) - creation_code = Column(BYTEA) - deployed_code = Column(BYTEA) - - block_number = Column(BIGINT) - block_hash = Column(BYTEA) - block_timestamp = Column(TIMESTAMP) - transaction_index = Column(INTEGER) - transaction_hash = Column(BYTEA) - transaction_from_address = Column(BYTEA) - - official_website = Column(VARCHAR) - description = Column(VARCHAR) - email = Column(VARCHAR) - social_list = Column(JSONB) - is_verified = Column(BOOLEAN, default=False) - is_proxy = Column(BOOLEAN) - implementation_contract = Column(BYTEA) - verified_implementation_contract = Column(BYTEA) - proxy_standard = Column(VARCHAR) - - create_time = Column(TIMESTAMP, server_default=func.now()) - update_time = Column(TIMESTAMP, server_default=func.now()) - reorg = Column(BOOLEAN, server_default=text("false")) - - deployed_code_hash = Column( - VARCHAR, - Computed("encode(digest('0x'||encode(deployed_code, 'hex'), 'sha256'), 'hex')"), - ) - - @staticmethod - def model_domain_mapping(): - return [ - { - "domain": Contract, - "conflict_do_update": False, - "update_strategy": None, - "converter": general_converter, - }, - { - "domain": ContractFromTransaction, - "conflict_do_update": False, - "update_strategy": None, - "converter": general_converter, - }, - ] diff --git a/hemera/common/models/current_token_balances.py b/hemera/common/models/current_token_balances.py deleted file mode 100644 index 61ddc5b27..000000000 --- a/hemera/common/models/current_token_balances.py +++ /dev/null @@ -1,49 +0,0 @@ -from sqlalchemy import Column, Index, PrimaryKeyConstraint, desc, func, text -from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, NUMERIC, TIMESTAMP, VARCHAR - -from hemera.common.models import HemeraModel -from hemera.common.models.token_balances import token_balances_general_converter -from hemera.indexer.domains.current_token_balance import CurrentTokenBalance - - -class CurrentTokenBalances(HemeraModel): - __tablename__ = "address_current_token_balances" - - address = Column(BYTEA, primary_key=True) - token_id = Column(NUMERIC(78), primary_key=True) - token_type = Column(VARCHAR) - token_address = Column(BYTEA, primary_key=True) - balance = Column(NUMERIC(100)) - - block_number = Column(BIGINT) - block_timestamp = Column(TIMESTAMP) - - create_time = Column(TIMESTAMP, server_default=func.now()) - update_time = Column(TIMESTAMP, server_default=func.now()) - reorg = Column(BOOLEAN, server_default=text("false")) - - __table_args__ = (PrimaryKeyConstraint("address", "token_address", "token_id"),) - - @staticmethod - def model_domain_mapping(): - return [ - { - "domain": CurrentTokenBalance, - "conflict_do_update": True, - "update_strategy": "EXCLUDED.block_number > address_current_token_balances.block_number", - "converter": token_balances_general_converter, - } - ] - - -Index( - "current_token_balances_token_address_balance_of_index", - CurrentTokenBalances.token_address, - desc(CurrentTokenBalances.balance), -) -Index( - "current_token_balances_token_address_id_balance_of_index", - CurrentTokenBalances.token_address, - CurrentTokenBalances.token_id, - desc(CurrentTokenBalances.balance), -) diff --git a/hemera/common/models/erc1155_token_id_details.py b/hemera/common/models/erc1155_token_id_details.py deleted file mode 100644 index 978b486f8..000000000 --- a/hemera/common/models/erc1155_token_id_details.py +++ /dev/null @@ -1,49 +0,0 @@ -from sqlalchemy import Column, Index, PrimaryKeyConstraint, desc, func, text -from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, JSONB, NUMERIC, TIMESTAMP, VARCHAR - -from hemera.common.models import HemeraModel, general_converter -from hemera.common.models.erc721_token_id_details import token_uri_format_converter -from hemera.indexer.domains.token_id_infos import ERC1155TokenIdDetail, UpdateERC1155TokenIdDetail - - -class ERC1155TokenIdDetails(HemeraModel): - __tablename__ = "erc1155_token_id_details" - - token_address = Column(BYTEA, primary_key=True) - token_id = Column(NUMERIC(100), primary_key=True) - token_supply = Column(NUMERIC(78)) - token_uri = Column(VARCHAR) - token_uri_info = Column(JSONB) - - block_number = Column(BIGINT) - block_timestamp = Column(TIMESTAMP) - - create_time = Column(TIMESTAMP, server_default=func.now()) - update_time = Column(TIMESTAMP, server_default=func.now()) - reorg = Column(BOOLEAN, server_default=text("false")) - - __table_args__ = (PrimaryKeyConstraint("token_address", "token_id"),) - - @staticmethod - def model_domain_mapping(): - return [ - { - "domain": ERC1155TokenIdDetail, - "conflict_do_update": False, - "update_strategy": None, - "converter": token_uri_format_converter, - }, - { - "domain": UpdateERC1155TokenIdDetail, - "conflict_do_update": True, - "update_strategy": "EXCLUDED.block_number >= erc1155_token_id_details.block_number", - "converter": general_converter, - }, - ] - - -Index( - "erc1155_detail_desc_address_id_index", - desc(ERC1155TokenIdDetails.token_address), - ERC1155TokenIdDetails.token_id, -) diff --git a/hemera/common/models/erc1155_token_transfers.py b/hemera/common/models/erc1155_token_transfers.py deleted file mode 100644 index f725ed51d..000000000 --- a/hemera/common/models/erc1155_token_transfers.py +++ /dev/null @@ -1,80 +0,0 @@ -from sqlalchemy import Column, Index, PrimaryKeyConstraint, desc, func, text -from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, INTEGER, NUMERIC, TIMESTAMP - -from hemera.common.models import HemeraModel, general_converter -from hemera.indexer.domains.token_transfer import ERC1155TokenTransfer - - -class ERC1155TokenTransfers(HemeraModel): - __tablename__ = "erc1155_token_transfers" - - transaction_hash = Column(BYTEA, primary_key=True) - log_index = Column(INTEGER, primary_key=True) - from_address = Column(BYTEA) - to_address = Column(BYTEA) - token_address = Column(BYTEA) - token_id = Column(NUMERIC(100), primary_key=True) - value = Column(NUMERIC(100)) - - block_number = Column(BIGINT) - block_hash = Column(BYTEA, primary_key=True) - block_timestamp = Column(TIMESTAMP) - - create_time = Column(TIMESTAMP, server_default=func.now()) - update_time = Column(TIMESTAMP, server_default=func.now()) - reorg = Column(BOOLEAN, server_default=text("false")) - - __table_args__ = (PrimaryKeyConstraint("transaction_hash", "block_hash", "log_index", "token_id"),) - __query_order__ = [block_number, log_index] - - @staticmethod - def model_domain_mapping(): - return [ - { - "domain": ERC1155TokenTransfer, - "conflict_do_update": False, - "update_strategy": None, - "converter": general_converter, - } - ] - - -Index( - "erc1155_token_transfers_number_log_index", - desc(ERC1155TokenTransfers.block_number), - desc(ERC1155TokenTransfers.log_index), -) - -Index( - "erc1155_token_transfers_from_address_number_log_index_index", - ERC1155TokenTransfers.from_address, - desc(ERC1155TokenTransfers.block_number), - desc(ERC1155TokenTransfers.log_index), -) -Index( - "erc1155_token_transfers_to_address_number_log_index_index", - ERC1155TokenTransfers.to_address, - desc(ERC1155TokenTransfers.block_number), - desc(ERC1155TokenTransfers.log_index), -) -Index( - "erc1155_token_transfers_token_address_number_log_index_index", - ERC1155TokenTransfers.token_address, - desc(ERC1155TokenTransfers.block_number), - desc(ERC1155TokenTransfers.log_index), -) -Index( - "erc1155_token_transfers_token_address_id_index", - ERC1155TokenTransfers.token_address, - ERC1155TokenTransfers.token_id, -) -Index( - "erc1155_token_transfers_token_address_from_index", - ERC1155TokenTransfers.token_address, - ERC1155TokenTransfers.from_address, -) -Index( - "erc1155_token_transfers_token_address_to_index", - ERC1155TokenTransfers.token_address, - ERC1155TokenTransfers.to_address, -) diff --git a/hemera/common/models/erc20_token_transfers.py b/hemera/common/models/erc20_token_transfers.py deleted file mode 100644 index b2dcaf9ef..000000000 --- a/hemera/common/models/erc20_token_transfers.py +++ /dev/null @@ -1,74 +0,0 @@ -from sqlalchemy import Column, Index, PrimaryKeyConstraint, desc, func, text -from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, INTEGER, NUMERIC, TIMESTAMP - -from hemera.common.models import HemeraModel, general_converter -from hemera.indexer.domains.token_transfer import ERC20TokenTransfer - - -class ERC20TokenTransfers(HemeraModel): - __tablename__ = "erc20_token_transfers" - - transaction_hash = Column(BYTEA, primary_key=True) - log_index = Column(INTEGER, primary_key=True) - from_address = Column(BYTEA) - to_address = Column(BYTEA) - token_address = Column(BYTEA) - value = Column(NUMERIC(100)) - - block_number = Column(BIGINT) - block_hash = Column(BYTEA, primary_key=True) - block_timestamp = Column(TIMESTAMP) - - create_time = Column(TIMESTAMP, server_default=func.now()) - update_time = Column(TIMESTAMP, server_default=func.now()) - reorg = Column(BOOLEAN, server_default=text("false")) - - __table_args__ = (PrimaryKeyConstraint("transaction_hash", "block_hash", "log_index"),) - __query_order__ = [block_number, log_index] - - @staticmethod - def model_domain_mapping(): - return [ - { - "domain": ERC20TokenTransfer, - "conflict_do_update": False, - "update_strategy": None, - "converter": general_converter, - } - ] - - -Index( - "erc20_token_transfers_number_log_index", - desc(ERC20TokenTransfers.block_number), - desc(ERC20TokenTransfers.log_index), -) - -Index( - "erc20_token_transfers_from_address_number_log_index_index", - ERC20TokenTransfers.from_address, - desc(ERC20TokenTransfers.block_number), - desc(ERC20TokenTransfers.log_index), -) -Index( - "erc20_token_transfers_to_address_number_log_index_index", - ERC20TokenTransfers.to_address, - desc(ERC20TokenTransfers.block_number), - desc(ERC20TokenTransfers.log_index), -) -Index( - "erc20_token_transfers_token_address_number_log_index_index", - ERC20TokenTransfers.token_address, - desc(ERC20TokenTransfers.block_number), - desc(ERC20TokenTransfers.log_index), -) -Index( - "erc20_token_transfers_token_address_from_index_index", - ERC20TokenTransfers.token_address, - ERC20TokenTransfers.from_address, -) -Index( - "erc20_token_transfers_token_address_to_index_index", - ERC20TokenTransfers.token_address, - ERC20TokenTransfers.to_address, -) diff --git a/hemera/common/models/erc721_token_id_changes.py b/hemera/common/models/erc721_token_id_changes.py deleted file mode 100644 index f8dc0ab92..000000000 --- a/hemera/common/models/erc721_token_id_changes.py +++ /dev/null @@ -1,41 +0,0 @@ -from sqlalchemy import Column, Index, PrimaryKeyConstraint, desc, func, text -from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, NUMERIC, TIMESTAMP - -from hemera.common.models import HemeraModel, general_converter -from hemera.indexer.domains.token_id_infos import ERC721TokenIdChange - - -class ERC721TokenIdChanges(HemeraModel): - __tablename__ = "erc721_token_id_changes" - - token_address = Column(BYTEA, primary_key=True) - token_id = Column(NUMERIC(100), primary_key=True) - token_owner = Column(BYTEA) - - block_number = Column(BIGINT, primary_key=True) - block_timestamp = Column(TIMESTAMP) - - create_time = Column(TIMESTAMP, server_default=func.now()) - update_time = Column(TIMESTAMP, server_default=func.now()) - reorg = Column(BOOLEAN, server_default=text("false")) - - __table_args__ = (PrimaryKeyConstraint("token_address", "token_id", "block_number"),) - - @staticmethod - def model_domain_mapping(): - return [ - { - "domain": ERC721TokenIdChange, - "conflict_do_update": False, - "update_strategy": None, - "converter": general_converter, - } - ] - - -Index( - "erc721_change_address_id_number_desc_index", - ERC721TokenIdChanges.token_address, - ERC721TokenIdChanges.token_id, - desc(ERC721TokenIdChanges.block_number), -) diff --git a/hemera/common/models/erc721_token_id_details.py b/hemera/common/models/erc721_token_id_details.py deleted file mode 100644 index d3e44a16f..000000000 --- a/hemera/common/models/erc721_token_id_details.py +++ /dev/null @@ -1,60 +0,0 @@ -from typing import Type -from urllib import parse - -from sqlalchemy import Column, Index, PrimaryKeyConstraint, desc, func, text -from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, JSONB, NUMERIC, TIMESTAMP, VARCHAR - -from hemera.common.models import HemeraModel, general_converter -from hemera.indexer.domains.token_id_infos import ERC721TokenIdDetail, UpdateERC721TokenIdDetail - - -def token_uri_format_converter(table: Type[HemeraModel], data, is_update=False): - - if data.token_uri is not None: - data.token_uri = parse.quote_plus(data.token_uri) - - return general_converter(table, data, is_update) - - -class ERC721TokenIdDetails(HemeraModel): - __tablename__ = "erc721_token_id_details" - - token_address = Column(BYTEA, primary_key=True) - token_id = Column(NUMERIC(100), primary_key=True) - token_owner = Column(BYTEA) - token_uri = Column(VARCHAR) - token_uri_info = Column(JSONB) - - block_number = Column(BIGINT) - block_timestamp = Column(TIMESTAMP) - - create_time = Column(TIMESTAMP, server_default=func.now()) - update_time = Column(TIMESTAMP, server_default=func.now()) - reorg = Column(BOOLEAN, server_default=text("false")) - - __table_args__ = (PrimaryKeyConstraint("token_address", "token_id"),) - - @staticmethod - def model_domain_mapping(): - return [ - { - "domain": ERC721TokenIdDetail, - "conflict_do_update": False, - "update_strategy": None, - "converter": token_uri_format_converter, - }, - { - "domain": UpdateERC721TokenIdDetail, - "conflict_do_update": True, - "update_strategy": "EXCLUDED.block_number >= erc721_token_id_details.block_number", - "converter": general_converter, - }, - ] - - -Index( - "erc721_detail_owner_address_id_index", - desc(ERC721TokenIdDetails.token_owner), - ERC721TokenIdDetails.token_address, - ERC721TokenIdDetails.token_id, -) diff --git a/hemera/common/models/erc721_token_transfers.py b/hemera/common/models/erc721_token_transfers.py deleted file mode 100644 index 6b810ff24..000000000 --- a/hemera/common/models/erc721_token_transfers.py +++ /dev/null @@ -1,83 +0,0 @@ -from sqlalchemy import Column, Index, PrimaryKeyConstraint, desc, func, text -from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, INTEGER, NUMERIC, TIMESTAMP - -from hemera.common.models import HemeraModel, general_converter -from hemera.indexer.domains.token_transfer import ERC721TokenTransfer - - -class ERC721TokenTransfers(HemeraModel): - __tablename__ = "erc721_token_transfers" - - transaction_hash = Column(BYTEA, primary_key=True) - log_index = Column(INTEGER, primary_key=True) - from_address = Column(BYTEA) - to_address = Column(BYTEA) - token_address = Column(BYTEA) - token_id = Column(NUMERIC(100)) - - block_number = Column(BIGINT) - block_hash = Column(BYTEA, primary_key=True) - block_timestamp = Column(TIMESTAMP) - - create_time = Column(TIMESTAMP, server_default=func.now()) - update_time = Column(TIMESTAMP, server_default=func.now()) - reorg = Column(BOOLEAN, server_default=text("false")) - - __table_args__ = (PrimaryKeyConstraint("transaction_hash", "block_hash", "log_index"),) - __query_order__ = [block_number, log_index] - - @staticmethod - def model_domain_mapping(): - return [ - { - "domain": ERC721TokenTransfer, - "conflict_do_update": False, - "update_strategy": None, - "converter": general_converter, - } - ] - - -Index( - "erc721_token_transfers_block_timestamp_index", - desc(ERC721TokenTransfers.block_timestamp), -) -Index( - "erc721_token_transfers_number_log_index", - desc(ERC721TokenTransfers.block_number), - desc(ERC721TokenTransfers.log_index), -) - -Index( - "erc721_token_transfers_from_address_number_log_index_index", - ERC721TokenTransfers.from_address, - desc(ERC721TokenTransfers.block_number), - desc(ERC721TokenTransfers.log_index), -) -Index( - "erc721_token_transfers_to_address_number_log_index_index", - ERC721TokenTransfers.to_address, - desc(ERC721TokenTransfers.block_number), - desc(ERC721TokenTransfers.log_index), -) -Index( - "erc721_token_transfers_token_address_number_log_index_index", - ERC721TokenTransfers.token_address, - desc(ERC721TokenTransfers.block_number), - desc(ERC721TokenTransfers.log_index), -) -Index( - "erc721_token_transfers_token_address_id_index", - ERC721TokenTransfers.token_address, - ERC721TokenTransfers.token_id, -) -Index( - "erc721_token_transfers_token_address_from_index", - ERC721TokenTransfers.token_address, - ERC721TokenTransfers.from_address, -) -Index( - "erc721_token_transfers_token_address_to_index", - ERC721TokenTransfers.token_address, - ERC721TokenTransfers.to_address, -) diff --git a/hemera/common/models/exception_records.py b/hemera/common/models/exception_records.py deleted file mode 100644 index 8ac2b2a8e..000000000 --- a/hemera/common/models/exception_records.py +++ /dev/null @@ -1,20 +0,0 @@ -from datetime import datetime - -from sqlalchemy import Column -from sqlalchemy.dialects.postgresql import BIGINT, JSONB, TIMESTAMP, VARCHAR - -from hemera.common.models import HemeraModel - - -class ExceptionRecords(HemeraModel): - __tablename__ = "exception_records" - - id = Column(BIGINT, primary_key=True, autoincrement=True) - block_number = Column(BIGINT) - dataclass = Column(VARCHAR) - level = Column(VARCHAR) - message_type = Column(VARCHAR) - message = Column(VARCHAR) - exception_env = Column(JSONB) - - record_time = Column(TIMESTAMP, default=datetime.utcnow) diff --git a/hemera/common/models/failure_records.py b/hemera/common/models/failure_records.py deleted file mode 100644 index 4c64497f2..000000000 --- a/hemera/common/models/failure_records.py +++ /dev/null @@ -1,16 +0,0 @@ -from sqlalchemy import Column -from sqlalchemy.dialects.postgresql import BIGINT, JSON, TIMESTAMP, VARCHAR - -from hemera.common.models import HemeraModel - - -class FailureRecords(HemeraModel): - __tablename__ = "failure_records" - record_id = Column(BIGINT, primary_key=True, autoincrement=True) - mission_sign = Column(VARCHAR) - output_types = Column(VARCHAR) - start_block_number = Column(BIGINT) - end_block_number = Column(BIGINT) - exception_stage = Column(VARCHAR) - exception = Column(JSON) - crash_time = Column(TIMESTAMP) diff --git a/hemera/common/models/fix_record.py b/hemera/common/models/fix_record.py deleted file mode 100644 index 585dabfd2..000000000 --- a/hemera/common/models/fix_record.py +++ /dev/null @@ -1,15 +0,0 @@ -from sqlalchemy import Column, func -from sqlalchemy.dialects.postgresql import BIGINT, INTEGER, TIMESTAMP, VARCHAR - -from hemera.common.models import HemeraModel - - -class FixRecord(HemeraModel): - __tablename__ = "fix_record" - job_id = Column(INTEGER, primary_key=True) - start_block_number = Column(BIGINT) - last_fixed_block_number = Column(BIGINT) - remain_process = Column(INTEGER) - job_status = Column(VARCHAR) - create_time = Column(TIMESTAMP, server_default=func.now()) - update_time = Column(TIMESTAMP) diff --git a/hemera/common/models/logs.py b/hemera/common/models/logs.py deleted file mode 100644 index 17b475de7..000000000 --- a/hemera/common/models/logs.py +++ /dev/null @@ -1,57 +0,0 @@ -from sqlalchemy import Column, Index, PrimaryKeyConstraint, desc, func, text -from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, INTEGER, TIMESTAMP - -from hemera.common.models import HemeraModel, general_converter -from hemera.indexer.domains.log import Log - - -class Logs(HemeraModel): - __tablename__ = "logs" - - log_index = Column(INTEGER, primary_key=True) - address = Column(BYTEA) - data = Column(BYTEA) - topic0 = Column(BYTEA) - topic1 = Column(BYTEA) - topic2 = Column(BYTEA) - topic3 = Column(BYTEA) - transaction_hash = Column(BYTEA, primary_key=True) - transaction_index = Column(INTEGER) - block_number = Column(BIGINT) - block_hash = Column(BYTEA, primary_key=True) - block_timestamp = Column(TIMESTAMP) - - create_time = Column(TIMESTAMP, server_default=func.now()) - update_time = Column(TIMESTAMP, server_default=func.now()) - reorg = Column(BOOLEAN, server_default=text("false")) - - __table_args__ = (PrimaryKeyConstraint("transaction_hash", "block_hash", "log_index"),) - __query_order__ = [block_number, log_index] - - @staticmethod - def model_domain_mapping(): - return [ - { - "domain": Log, - "conflict_do_update": False, - "update_strategy": None, - "converter": general_converter, - } - ] - - -Index("logs_block_timestamp_index", desc(Logs.block_timestamp)) -Index( - "logs_address_block_number_log_index_index", - Logs.address, - desc(Logs.block_number), - desc(Logs.log_index), -) -Index("logs_block_number_log_index_index", desc(Logs.block_number), desc(Logs.log_index)) -Index( - "logs_address_topic_0_number_log_index_index", - Logs.address, - Logs.topic0, - desc(Logs.block_number), - desc(Logs.log_index), -) diff --git a/hemera/common/models/metrics_persistence.py b/hemera/common/models/metrics_persistence.py deleted file mode 100644 index 2f591bf95..000000000 --- a/hemera/common/models/metrics_persistence.py +++ /dev/null @@ -1,14 +0,0 @@ -from sqlalchemy import Column, func -from sqlalchemy.dialects.postgresql import JSON, TIMESTAMP, VARCHAR - -from hemera.common.models import HemeraModel - - -class MetricsPersistence(HemeraModel): - __tablename__ = "metrics_persistence" - - instance = Column(VARCHAR, primary_key=True) - metrics = Column(JSON) - - create_time = Column(TIMESTAMP, server_default=func.now()) - update_time = Column(TIMESTAMP, server_default=func.now()) diff --git a/hemera/common/models/nft_transfers.py b/hemera/common/models/nft_transfers.py deleted file mode 100644 index 9b8f5bb5a..000000000 --- a/hemera/common/models/nft_transfers.py +++ /dev/null @@ -1,101 +0,0 @@ -from sqlalchemy import ( - BigInteger, - Boolean, - Column, - DateTime, - Index, - Integer, - LargeBinary, - Numeric, - PrimaryKeyConstraint, - desc, - func, - text, -) - -from hemera.common.models import HemeraModel, general_converter -from hemera.indexer.domains.token_transfer import ERC721TokenTransfer, ERC1155TokenTransfer - - -class NftTransfers(HemeraModel): - """ - Model for tracking nft(ERC721/ERC1155) transfer events. - """ - - __tablename__ = "nft_transfers" - - # Primary columns - transaction_hash = Column(LargeBinary, nullable=False) - block_hash = Column(LargeBinary, nullable=False) - log_index = Column(Integer, nullable=False) - token_id = Column(Numeric(100), nullable=False) - - # Transfer info - from_address = Column(LargeBinary) - to_address = Column(LargeBinary) - token_address = Column(LargeBinary) - value = Column(Numeric(100), nullable=True) - - # Block info - block_number = Column(BigInteger) - block_timestamp = Column(DateTime) - - # Metadata columns - create_time = Column(DateTime, server_default=func.now(), nullable=False) - update_time = Column(DateTime, server_default=func.now(), onupdate=func.now(), nullable=False) - reorg = Column(Boolean, server_default=text("false"), nullable=False) - - # Table constraints - __table_args__ = ( - PrimaryKeyConstraint( - "transaction_hash", - "block_timestamp", - "block_number", - "log_index", - "block_hash", - "token_id", - name="pk_nft_transfers", - ), - # Block-based indices - Index("idx_nft_transfers_block_log", desc(block_timestamp), desc(block_number), desc(log_index)), - # Address-based indices with time - Index( - "idx_nft_transfers_token_time", - token_address, - desc(block_timestamp), - desc(block_number), - desc(log_index), - ), - # Token-specific indices - Index( - "idx_nft_transfers_token_id", - token_address, - token_id, - desc(block_timestamp), - desc(block_number), - desc(log_index), - ), - ) - - # Query order specification - __query_order__ = [block_timestamp, block_number, log_index] - - @staticmethod - def model_domain_mapping(): - """ - Define the domain model mapping configuration. - """ - return [ - { - "domain": ERC1155TokenTransfer, - "conflict_do_update": True, - "update_strategy": None, - "converter": general_converter, - }, - { - "domain": ERC721TokenTransfer, - "conflict_do_update": True, - "update_strategy": None, - "converter": general_converter, - }, - ] diff --git a/hemera/common/models/period_wallet_addresses_aggregates.py b/hemera/common/models/period_wallet_addresses_aggregates.py deleted file mode 100644 index 9a5d8eb66..000000000 --- a/hemera/common/models/period_wallet_addresses_aggregates.py +++ /dev/null @@ -1,51 +0,0 @@ -from sqlalchemy import DATE, Column, Computed -from sqlalchemy.dialects.postgresql import BYTEA, INTEGER, NUMERIC - -from hemera.common.models import HemeraModel - - -class PeriodWalletAddressesAggregates(HemeraModel): - __tablename__ = "period_wallet_addresses_aggregates" - - address = Column(BYTEA, primary_key=True, nullable=False) - period_date = Column(DATE, primary_key=True, nullable=False) - txn_in_cnt = Column(INTEGER, default=0) - txn_out_cnt = Column(INTEGER, default=0) - txn_in_value = Column(NUMERIC(78)) - txn_out_value = Column(NUMERIC(78)) - internal_txn_in_cnt = Column(INTEGER, default=0) - internal_txn_out_cnt = Column(INTEGER, default=0) - internal_txn_in_value = Column(NUMERIC(78)) - internal_txn_out_value = Column(NUMERIC(78)) - erc20_transfer_in_cnt = Column(INTEGER, default=0) - erc721_transfer_in_cnt = Column(INTEGER, default=0) - erc1155_transfer_in_cnt = Column(INTEGER, default=0) - erc20_transfer_out_cnt = Column(INTEGER, default=0) - erc721_transfer_out_cnt = Column(INTEGER, default=0) - erc1155_transfer_out_cnt = Column(INTEGER, default=0) - - internal_txn_cnt = Column(INTEGER, Computed("internal_txn_in_cnt + internal_txn_out_cnt")) - erc20_transfer_cnt = Column(INTEGER, Computed("erc20_transfer_in_cnt + erc20_transfer_out_cnt")) - erc721_transfer_cnt = Column(INTEGER, Computed("erc721_transfer_in_cnt + erc721_transfer_out_cnt")) - erc1155_transfer_cnt = Column(INTEGER, Computed("erc1155_transfer_in_cnt + erc1155_transfer_out_cnt")) - - txn_self_cnt = Column(INTEGER, default=0, nullable=False) - txn_in_error_cnt = Column(INTEGER, default=0, nullable=False) - txn_out_error_cnt = Column(INTEGER, default=0, nullable=False) - txn_self_error_cnt = Column(INTEGER, default=0, nullable=False) - - txn_cnt = Column(INTEGER, Computed("((txn_in_cnt + txn_out_cnt) - txn_self_cnt)")) - - deposit_cnt = Column(INTEGER, default=0) - withdraw_cnt = Column(INTEGER, default=0) - gas_in_used = Column(NUMERIC(78), default=0) - l2_txn_in_fee = Column(NUMERIC(78), default=0) - l1_txn_in_fee = Column(NUMERIC(78), default=0) - txn_in_fee = Column(NUMERIC(78), default=0) - gas_out_used = Column(NUMERIC(78), default=0) - l2_txn_out_fee = Column(NUMERIC(78), default=0) - l1_txn_out_fee = Column(NUMERIC(78), default=0) - txn_out_fee = Column(NUMERIC(78), default=0) - contract_deployed_cnt = Column(INTEGER, default=0) - from_address_unique_interacted_cnt = Column(INTEGER, default=0) - to_address_unique_interacted_cnt = Column(INTEGER, default=0) diff --git a/hemera/common/models/scheduled_metadata.py b/hemera/common/models/scheduled_metadata.py deleted file mode 100644 index 719b5277f..000000000 --- a/hemera/common/models/scheduled_metadata.py +++ /dev/null @@ -1,13 +0,0 @@ -from sqlalchemy import Column, DateTime -from sqlalchemy.dialects.postgresql import INTEGER, VARCHAR - -from hemera.common.models import HemeraModel - - -class ScheduledMetadata(HemeraModel): - __tablename__ = "af_index_na_scheduled_metadata" - __table_args__ = {"extend_existing": True} - id = Column(INTEGER, primary_key=True) - dag_id = Column(VARCHAR) - execution_date = Column(DateTime) - last_data_timestamp = Column(DateTime) diff --git a/hemera/common/models/stats/__init__.py b/hemera/common/models/stats/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/hemera/common/models/stats/daily_addresses_stats.py b/hemera/common/models/stats/daily_addresses_stats.py new file mode 100644 index 000000000..dc6d8a6cf --- /dev/null +++ b/hemera/common/models/stats/daily_addresses_stats.py @@ -0,0 +1,24 @@ +from datetime import datetime +from typing import Optional + +from sqlmodel import Field + +from hemera.common.models import HemeraModel + + +class DailyAddressesStats(HemeraModel, table=True): + __tablename__ = "af_stats_na_daily_addresses" + + # Primary key fields + block_date: datetime = Field(primary_key=True) + + # Numerical fields + active_address_cnt: Optional[int] = Field(default=None) + receiver_address_cnt: Optional[int] = Field(default=None) + sender_address_cnt: Optional[int] = Field(default=None) + total_address_cnt: Optional[int] = Field(default=None) + new_address_cnt: Optional[int] = Field(default=None) + + # Metadata fields (Optional) + create_time: Optional[datetime] = Field(default_factory=datetime.utcnow) + update_time: Optional[datetime] = Field(default_factory=datetime.utcnow) diff --git a/hemera/common/models/stats/daily_blocks_stats.py b/hemera/common/models/stats/daily_blocks_stats.py new file mode 100644 index 000000000..84ce26e50 --- /dev/null +++ b/hemera/common/models/stats/daily_blocks_stats.py @@ -0,0 +1,33 @@ +from datetime import datetime +from decimal import Decimal +from typing import Optional + +from sqlmodel import Field + +from hemera.common.models import HemeraModel + + +class DailyBlocksStats(HemeraModel, table=True): + __tablename__ = "af_stats_na_daily_blocks" + + # Primary key fields + block_date: datetime = Field(primary_key=True) # Assuming block_date is the primary key + + # Numerical fields + cnt: Optional[int] = Field(default=None) + avg_size: Optional[Decimal] = Field(default=None) + avg_gas_limit: Optional[Decimal] = Field(default=None) + avg_gas_used: Optional[Decimal] = Field(default=None) + total_gas_used: Optional[int] = Field(default=None) + avg_gas_used_percentage: Optional[Decimal] = Field(default=None) + avg_txn_cnt: Optional[Decimal] = Field(default=None) + total_cnt: Optional[int] = Field(default=None) + block_interval: Optional[Decimal] = Field(default=None) + + # Timestamp fields + max_timestamp: Optional[datetime] = Field(default=None) + min_timestamp: Optional[datetime] = Field(default=None) + + # Metadata fields (Optional) + create_time: Optional[datetime] = Field(default_factory=datetime.utcnow) + update_time: Optional[datetime] = Field(default_factory=datetime.utcnow) diff --git a/hemera/common/models/stats/daily_boards_stats.py b/hemera/common/models/stats/daily_boards_stats.py new file mode 100644 index 000000000..7abfa7e2d --- /dev/null +++ b/hemera/common/models/stats/daily_boards_stats.py @@ -0,0 +1,17 @@ +from datetime import date +from typing import Optional + +from sqlalchemy import Column +from sqlalchemy.dialects.postgresql import BIGINT, DATE +from sqlmodel import Field + +from hemera.common.models import HemeraModel + + +class DailyBoardsStats(HemeraModel, table=True): + __tablename__ = "af_eco_boards" + + board_id: str = Field(default=None, primary_key=True) + block_date: date = Field(sa_column=Column(DATE, primary_key=True)) + key: str = Field(default=None, primary_key=True) + count: Optional[int] = Field(default=None, sa_column=Column(BIGINT)) diff --git a/hemera/common/models/stats/daily_tokens_stats.py b/hemera/common/models/stats/daily_tokens_stats.py new file mode 100644 index 000000000..0405b011a --- /dev/null +++ b/hemera/common/models/stats/daily_tokens_stats.py @@ -0,0 +1,21 @@ +from datetime import datetime +from typing import Optional + +from sqlmodel import Field + +from hemera.common.models import HemeraModel + + +class DailyTokensStats(HemeraModel, table=True): + __tablename__ = "af_stats_na_daily_tokens" + + # Primary key + block_date: datetime = Field(primary_key=True) + + # Fields + erc20_active_address_cnt: Optional[int] = Field(default=None) + erc20_total_transfer_cnt: Optional[int] = Field(default=None) + erc721_active_address_cnt: Optional[int] = Field(default=None) + erc721_total_transfer_cnt: Optional[int] = Field(default=None) + erc1155_active_address_cnt: Optional[int] = Field(default=None) + erc1155_total_transfer_cnt: Optional[int] = Field(default=None) diff --git a/hemera/common/models/stats/daily_transactions_stats.py b/hemera/common/models/stats/daily_transactions_stats.py new file mode 100644 index 000000000..00e12707a --- /dev/null +++ b/hemera/common/models/stats/daily_transactions_stats.py @@ -0,0 +1,39 @@ +from datetime import date +from decimal import Decimal +from typing import Optional + +from sqlalchemy import Column +from sqlalchemy.dialects.postgresql import BIGINT, DATE +from sqlmodel import Field + +from hemera.common.models import HemeraModel + + +class DailyTransactionsStats(HemeraModel, table=True): + __tablename__ = "af_stats_na_daily_transactions" + + # Primary key field + block_date: date = Field(sa_column=Column(DATE, primary_key=True)) + + # Count fields + cnt: Optional[int] = Field(default=None, sa_column=Column(BIGINT)) + total_cnt: Optional[int] = Field(default=None, sa_column=Column(BIGINT)) + txn_error_cnt: Optional[int] = Field(default=None, sa_column=Column(BIGINT)) + + # Transaction fee statistics + avg_transaction_fee: Optional[Decimal] = Field(default=None) + avg_gas_price: Optional[Decimal] = Field(default=None) + max_gas_price: Optional[Decimal] = Field(default=None) + min_gas_price: Optional[Decimal] = Field(default=None) + + # L1 fee statistics + avg_receipt_l1_fee: Optional[Decimal] = Field(default=None) + max_receipt_l1_fee: Optional[Decimal] = Field(default=None) + min_receipt_l1_fee: Optional[Decimal] = Field(default=None) + + # L1 gas price statistics + avg_receipt_l1_gas_price: Optional[Decimal] = Field(default=None) + max_receipt_l1_gas_price: Optional[Decimal] = Field(default=None) + min_receipt_l1_gas_price: Optional[Decimal] = Field(default=None) + + __query_order__ = [block_date] diff --git a/hemera/common/models/sync_record.py b/hemera/common/models/sync_record.py deleted file mode 100644 index 24b0bbcca..000000000 --- a/hemera/common/models/sync_record.py +++ /dev/null @@ -1,11 +0,0 @@ -from sqlalchemy import Column -from sqlalchemy.dialects.postgresql import BIGINT, TIMESTAMP, VARCHAR - -from hemera.common.models import HemeraModel - - -class SyncRecord(HemeraModel): - __tablename__ = "sync_record" - mission_sign = Column(VARCHAR, primary_key=True) - last_block_number = Column(BIGINT) - update_time = Column(TIMESTAMP) diff --git a/hemera/common/models/token/__init__.py b/hemera/common/models/token/__init__.py new file mode 100644 index 000000000..34c0f7ad7 --- /dev/null +++ b/hemera/common/models/token/__init__.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2025/3/4 11:24 +# @Author ideal93 +# @File __init__.py +# @Brief diff --git a/hemera/common/models/token/nft.py b/hemera/common/models/token/nft.py new file mode 100644 index 000000000..07b0a15d7 --- /dev/null +++ b/hemera/common/models/token/nft.py @@ -0,0 +1,103 @@ +from datetime import datetime +from decimal import Decimal +from typing import Optional, Type +from urllib import parse + +from sqlalchemy import Column, text +from sqlalchemy.dialects.postgresql import BIGINT, BYTEA, JSONB, NUMERIC, TIMESTAMP, VARCHAR +from sqlmodel import Field, Index + +from hemera.common.models import HemeraModel, general_converter +from hemera.indexer.domains.token_id_infos import ( + ERC721TokenIdChange, + ERC721TokenIdDetail, + ERC1155TokenIdDetail, + UpdateERC721TokenIdDetail, + UpdateERC1155TokenIdDetail, +) + + +def token_uri_format_converter(table: Type[HemeraModel], data, is_update=False): + if data.token_uri is not None: + data.token_uri = parse.quote_plus(data.token_uri) + return general_converter(table, data, is_update) + + +class NFTDetails(HemeraModel, table=True): + __tablename__ = "nft_details" + + token_address: bytes = Field(sa_column=Column(BYTEA, primary_key=True)) + token_id: Decimal = Field(sa_column=Column(NUMERIC(100), primary_key=True)) + token_supply: Optional[Decimal] = Field(sa_column=Column(NUMERIC(78))) + token_owner: Optional[bytes] = Field(sa_column=Column(BYTEA)) + token_uri: Optional[str] = Field(sa_column=Column(VARCHAR)) + token_uri_info: Optional[dict] = Field(sa_column=Column(JSONB)) + + block_number: Optional[int] = Field(sa_column=Column(BIGINT)) + block_timestamp: Optional[datetime] = Field(sa_column=Column(TIMESTAMP)) + + create_time: datetime = Field(default_factory=datetime.utcnow) + update_time: datetime = Field(default_factory=datetime.utcnow) + reorg: bool = Field(default=False) + + __table_args__ = ( + Index("nft_details_token_address_index", text("token_address DESC, token_id")), + Index("nft_details_address_index", text("token_owner DESC, token_id ASC")), + ) + + @staticmethod + def model_domain_mapping(): + return [ + { + "domain": ERC1155TokenIdDetail, + "conflict_do_update": False, + "update_strategy": None, + "converter": token_uri_format_converter, + }, + { + "domain": UpdateERC1155TokenIdDetail, + "conflict_do_update": True, + "update_strategy": "EXCLUDED.block_number >= nft_details.block_number", + "converter": general_converter, + }, + { + "domain": ERC721TokenIdDetail, + "conflict_do_update": False, + "update_strategy": None, + "converter": token_uri_format_converter, + }, + { + "domain": UpdateERC721TokenIdDetail, + "conflict_do_update": True, + "update_strategy": "EXCLUDED.block_number >= nft_details.block_number", + "converter": general_converter, + }, + ] + + +class NFTIdChanges(HemeraModel, table=True): + __tablename__ = "nft_id_changes" + + token_address: bytes = Field(sa_column=Column(BYTEA, primary_key=True)) + token_id: Decimal = Field(sa_column=Column(NUMERIC(100), primary_key=True)) + token_owner: Optional[bytes] = Field(sa_column=Column(BYTEA)) + + block_number: int = Field(sa_column=Column(BIGINT, primary_key=True)) + block_timestamp: Optional[datetime] = Field(sa_column=Column(TIMESTAMP)) + + create_time: datetime = Field(default_factory=datetime.utcnow) + update_time: datetime = Field(default_factory=datetime.utcnow) + reorg: bool = Field(default=False) + + __table_args__ = (Index("nft_id_number_desc_index", text("token_address, token_id, block_number DESC")),) + + @staticmethod + def model_domain_mapping(): + return [ + { + "domain": ERC721TokenIdChange, + "conflict_do_update": False, + "update_strategy": None, + "converter": general_converter, + } + ] diff --git a/hemera/common/models/token/token_balances.py b/hemera/common/models/token/token_balances.py new file mode 100644 index 000000000..45482daf1 --- /dev/null +++ b/hemera/common/models/token/token_balances.py @@ -0,0 +1,87 @@ +from datetime import datetime +from decimal import Decimal +from typing import Optional, Type + +from sqlalchemy import Column, func +from sqlalchemy.dialects.postgresql import BOOLEAN, TIMESTAMP +from sqlalchemy.sql import text +from sqlmodel import Field, Index + +from hemera.common.models import HemeraModel, general_converter +from hemera.indexer.domains.current_token_balance import CurrentTokenBalance +from hemera.indexer.domains.token_balance import TokenBalance + + +class AddressTokenBalances(HemeraModel, table=True): + __tablename__ = "address_token_balances" + + # Primary keys + address: bytes = Field(primary_key=True) + token_address: bytes = Field(primary_key=True) + block_number: int = Field(primary_key=True) + block_timestamp: datetime = Field(primary_key=True) + + # Token info + balance: Optional[Decimal] = Field(default=None, max_digits=100) + + # Metadata + create_time: datetime = Field( + default_factory=datetime.utcnow, sa_column=Column(TIMESTAMP, server_default=func.now()) + ) + update_time: datetime = Field( + default_factory=datetime.utcnow, sa_column=Column(TIMESTAMP, server_default=func.now()) + ) + reorg: bool = Field(default=False, sa_column=Column(BOOLEAN, server_default=text("false"))) + + @staticmethod + def model_domain_mapping(): + return [ + { + "domain": TokenBalance, + "conflict_do_update": True, + "update_strategy": None, + "converter": general_converter, + } + ] + + __table_args__ = ( + Index( + "token_balance_address_id_number_index", + text("address, token_address, block_number DESC"), + ), + ) + + +class CurrentTokenBalances(HemeraModel, table=True): + __tablename__ = "address_current_token_balances" + + # Primary key fields + address: bytes = Field(primary_key=True) + token_address: bytes = Field(primary_key=True) + + # Token fields + balance: Optional[Decimal] = Field(default=None, max_digits=100) + + # Block related fields + block_number: Optional[int] = Field(default=None) + block_timestamp: Optional[datetime] = Field(default=None) + + # Metadata fields + create_time: Optional[datetime] = Field(default_factory=datetime.utcnow) + update_time: Optional[datetime] = Field(default_factory=datetime.utcnow) + reorg: Optional[bool] = Field(default=False) + + @staticmethod + def model_domain_mapping(): + return [ + { + "domain": CurrentTokenBalance, + "conflict_do_update": True, + "update_strategy": "EXCLUDED.block_number >= address_current_token_balances.block_number", + "converter": general_converter, + } + ] + + __table_args__ = ( + Index("current_token_balances_token_address_balance_of_index", text("token_address, balance DESC")), + ) diff --git a/hemera/common/models/token/token_id_balances.py b/hemera/common/models/token/token_id_balances.py new file mode 100644 index 000000000..688581587 --- /dev/null +++ b/hemera/common/models/token/token_id_balances.py @@ -0,0 +1,93 @@ +from datetime import datetime +from decimal import Decimal +from typing import Optional + +from sqlalchemy import Column, func +from sqlalchemy.dialects.postgresql import BOOLEAN, TIMESTAMP +from sqlalchemy.sql import text +from sqlmodel import Field, Index + +from hemera.common.models import HemeraModel, general_converter +from hemera.indexer.domains.current_token_id_balance import CurrentTokenIdBalance +from hemera.indexer.domains.token_id_balance import TokenIdBalance + + +class AddressTokenIdBalances(HemeraModel, table=True): + __tablename__ = "address_token_id_balances" + + # Primary keys + address: bytes = Field(primary_key=True) + token_id: Decimal = Field(primary_key=True, max_digits=78) + token_address: bytes = Field(primary_key=True) + block_number: int = Field(primary_key=True) + block_timestamp: datetime = Field(primary_key=True) + + # Token info + balance: Optional[Decimal] = Field(default=None, max_digits=100) + + # Metadata + create_time: datetime = Field( + default_factory=datetime.utcnow, sa_column=Column(TIMESTAMP, server_default=func.now()) + ) + update_time: datetime = Field( + default_factory=datetime.utcnow, sa_column=Column(TIMESTAMP, server_default=func.now()) + ) + reorg: bool = Field(default=False, sa_column=Column(BOOLEAN, server_default=text("false"))) + + @staticmethod + def model_domain_mapping(): + return [ + { + "domain": TokenIdBalance, + "conflict_do_update": True, + "update_strategy": None, + "converter": general_converter, + } + ] + + __table_args__ = ( + Index( + "token_balance_id_address_id_number_index", + text("address, token_address, token_id, block_number DESC"), + ), + ) + + +class CurrentTokenIdBalances(HemeraModel, table=True): + __tablename__ = "address_current_token_id_balances" + + # Primary key fields + address: bytes = Field(primary_key=True) + token_id: Decimal = Field(primary_key=True, max_digits=78) + token_address: bytes = Field(primary_key=True) + + # Token fields + balance: Optional[Decimal] = Field(default=None, max_digits=100) + + # Block related fields + block_number: Optional[int] = Field(default=None) + block_timestamp: Optional[datetime] = Field(default=None) + + # Metadata fields + create_time: Optional[datetime] = Field(default_factory=datetime.utcnow) + update_time: Optional[datetime] = Field(default_factory=datetime.utcnow) + reorg: Optional[bool] = Field(default=False) + + @staticmethod + def model_domain_mapping(): + return [ + { + "domain": CurrentTokenIdBalance, + "conflict_do_update": True, + "update_strategy": "EXCLUDED.block_number >= address_current_token_balances.block_number", + "converter": general_converter, + } + ] + + __table_args__ = ( + Index("current_token_id_balances_token_address_balance_of_index", text("token_address, balance DESC")), + Index( + "current_token_balances_token_address_id_balance_of_index", + text("token_address, token_id, balance DESC"), + ), + ) diff --git a/hemera/common/models/token/token_transfers.py b/hemera/common/models/token/token_transfers.py new file mode 100644 index 000000000..c29592567 --- /dev/null +++ b/hemera/common/models/token/token_transfers.py @@ -0,0 +1,210 @@ +from datetime import datetime +from decimal import Decimal +from typing import Optional + +from sqlalchemy import PrimaryKeyConstraint, text +from sqlmodel import Field, Index, SQLModel + +from hemera.common.models import HemeraModel, general_converter +from hemera.indexer.domains.token_transfer import ERC20TokenTransfer, ERC721TokenTransfer, ERC1155TokenTransfer + + +class BaseTokenTransfer(SQLModel): + """Base model for all token transfers""" + + transaction_hash: bytes = Field(primary_key=True) + log_index: int = Field(primary_key=True) + block_number: Optional[int] = Field(primary_key=True) + block_hash: bytes = Field(primary_key=True) + block_timestamp: Optional[datetime] = Field(primary_key=True) + + from_address: Optional[bytes] = Field(default=None) + to_address: Optional[bytes] = Field(default=None) + token_address: Optional[bytes] = Field(default=None) + + create_time: Optional[datetime] = Field(default_factory=datetime.utcnow) + update_time: Optional[datetime] = Field(default_factory=datetime.utcnow) + reorg: Optional[bool] = Field(default=False) + + class Config: + arbitrary_types_allowed = True + + +class ERC20TokenTransfers(BaseTokenTransfer, HemeraModel, table=True): + """Model for ERC20 token transfers""" + + __tablename__ = "erc20_token_transfers" + + value: Optional[Decimal] = Field(default=None) + + __table_args__ = ( + PrimaryKeyConstraint("transaction_hash", "block_hash", "log_index"), + Index( + "erc20_token_transfers_number_log_index", text("block_timestamp DESC, block_number DESC, log_index DESC") + ), + Index( + "erc20_token_transfers_token_address_number_log_index_index", + text("token_address,block_timestamp DESC, block_number DESC, log_index DESC"), + ), + Index( + "erc20_token_transfers_token_address_from_index_index", + text("token_address, from_address, block_timestamp DESC"), + ), + Index( + "erc20_token_transfers_token_address_to_index_index", + text("token_address, to_address, block_timestamp DESC"), + ), + ) + + __query_order__ = ["block_number", "log_index"] + + @staticmethod + def model_domain_mapping(): + return [ + { + "domain": ERC20TokenTransfer, + "conflict_do_update": False, + "update_strategy": None, + "converter": general_converter, + } + ] + + +class ERC721TokenTransfers(BaseTokenTransfer, table=True): + """Model for ERC721 token transfers""" + + __tablename__ = "erc721_token_transfers" + + token_id: Optional[Decimal] = Field(default=None) + + __table_args__ = ( + PrimaryKeyConstraint("transaction_hash", "block_hash", "log_index"), + Index("erc721_token_transfers_block_timestamp_index", text("block_timestamp DESC")), + Index("erc721_token_transfers_number_log_index", text("block_number DESC, log_index DESC")), + Index( + "erc721_token_transfers_token_address_number_log_index_index", + text("token_address, block_number DESC, log_index DESC"), + ), + Index("erc721_token_transfers_token_address_id_index", text("token_address, token_id")), + Index("erc721_token_transfers_token_address_from_index", text("token_address, from_address")), + Index("erc721_token_transfers_token_address_to_index", text("token_address, to_address")), + ) + + __query_order__ = ["block_number", "log_index"] + + @staticmethod + def model_domain_mapping(): + return [ + { + "domain": ERC721TokenTransfer, + "conflict_do_update": False, + "update_strategy": None, + "converter": general_converter, + } + ] + + +class ERC1155TokenTransfers(BaseTokenTransfer, table=True): + """Model for ERC1155 token transfers""" + + __tablename__ = "erc1155_token_transfers" + + token_id: Decimal = Field(primary_key=True) + value: Optional[Decimal] = Field(default=None) + + __table_args__ = ( + PrimaryKeyConstraint("transaction_hash", "block_hash", "log_index", "token_id"), + Index( + "erc1155_token_transfers_number_log_index", text("block_timestamp DESC, block_number DESC, log_index DESC") + ), + Index( + "erc1155_token_transfers_token_address_number_log_index_index", + text("token_address, block_timestamp DESC, block_number DESC, log_index DESC"), + ), + Index("erc1155_token_transfers_token_address_id_index", text("token_address, token_id, block_timestamp DESC")), + Index( + "erc1155_token_transfers_token_address_from_index", + text("token_address, from_address, block_timestamp DESC"), + ), + Index( + "erc1155_token_transfers_token_address_to_index", text("token_address, to_address, block_timestamp DESC") + ), + ) + + __query_order__ = ["block_number", "log_index"] + + @staticmethod + def model_domain_mapping(): + return [ + { + "domain": ERC1155TokenTransfer, + "conflict_do_update": False, + "update_strategy": None, + "converter": general_converter, + } + ] + + +class NftTransfers(HemeraModel, table=True): + """ + Model for tracking nft(ERC721/ERC1155) transfer events. + """ + + __tablename__ = "nft_transfers" + + # Primary keys + transaction_hash: bytes = Field(nullable=False, primary_key=True) + block_hash: bytes = Field(nullable=False, primary_key=True) + log_index: int = Field(nullable=False, primary_key=True) + token_id: Decimal = Field(nullable=False, primary_key=True, max_digits=100) + block_timestamp: datetime = Field(nullable=False, primary_key=True) + block_number: int = Field(nullable=False, primary_key=True) + + # Transfer info + from_address: Optional[bytes] = Field(default=None) + to_address: Optional[bytes] = Field(default=None) + token_address: Optional[bytes] = Field(default=None) + value: Optional[Decimal] = Field(default=None, max_digits=100) + + # Metadata + create_time: datetime = Field(default_factory=datetime.utcnow) + update_time: datetime = Field(default_factory=datetime.utcnow) + reorg: bool = Field(default=False) + + # Query order specification + __query_order__ = [block_timestamp, block_number, log_index] + + @staticmethod + def model_domain_mapping(): + """ + Define the domain model mapping configuration. + """ + return [ + { + "domain": ERC1155TokenTransfer, + "conflict_do_update": True, + "update_strategy": None, + "converter": general_converter, + }, + { + "domain": ERC721TokenTransfer, + "conflict_do_update": True, + "update_strategy": None, + "converter": general_converter, + }, + ] + + __table_args__ = ( + # Block-based indices + Index("idx_nft_transfers_block_log", text("block_timestamp DESC, block_number DESC, log_index DESC")), + # Address-based indices with time + Index( + "idx_nft_transfers_token_time", + text("token_address, block_timestamp DESC, block_number DESC, log_index DESC"), + ), + # Token-specific indices + Index( + "idx_nft_transfers_token_id", + text("token_address, token_id, block_timestamp DESC, block_number DESC, log_index DESC"), + ), + ) diff --git a/hemera/common/models/token/tokens.py b/hemera/common/models/token/tokens.py new file mode 100644 index 000000000..0ef7fbae6 --- /dev/null +++ b/hemera/common/models/token/tokens.py @@ -0,0 +1,99 @@ +from datetime import datetime +from decimal import Decimal +from typing import Dict, Optional + +from sqlalchemy import Column, desc, func, text +from sqlalchemy.dialects.postgresql import JSONB, TIMESTAMP +from sqlmodel import Field, Index + +from hemera.common.models import HemeraModel, general_converter +from hemera.indexer.domains.token import MarkBalanceToken, MarkTotalSupplyToken, Token, UpdateToken + + +class Tokens(HemeraModel, table=True): + __tablename__ = "tokens" + + # Primary key + address: bytes = Field(primary_key=True) + + # Token basic info + token_type: Optional[str] = Field(default=None) + name: Optional[str] = Field(default=None) + symbol: Optional[str] = Field(default=None) + decimals: Optional[Decimal] = Field(default=None, max_digits=100) + total_supply: Optional[Decimal] = Field(default=None, max_digits=100) + block_number: Optional[int] = Field(default=None) + + # Token statistics + holder_count: Optional[int] = Field(default=0) + transfer_count: Optional[int] = Field(default=0) + icon_url: Optional[str] = Field(default=None) + urls: Optional[Dict] = Field(default=None, sa_column=Column(JSONB)) + volume_24h: Optional[Decimal] = Field(default=None, max_digits=38, decimal_places=2) + price: Optional[Decimal] = Field(default=None, max_digits=38, decimal_places=6) + previous_price: Optional[Decimal] = Field(default=None, max_digits=38, decimal_places=6) + market_cap: Optional[Decimal] = Field(default=None, max_digits=38, decimal_places=2) + on_chain_market_cap: Optional[Decimal] = Field(default=None, max_digits=38, decimal_places=2) + is_verified: bool = Field(default=False) + + # External IDs + cmc_id: Optional[int] = Field(default=None) + cmc_slug: Optional[str] = Field(default=None) + gecko_id: Optional[str] = Field(default=None) + description: Optional[str] = Field(default=None) + no_balance_of: bool = Field(default=False) + fail_balance_of_count: Optional[int] = Field(default=0) + succeed_balance_of_count: Optional[int] = Field(default=0) + no_total_supply: bool = Field(default=False) + fail_total_supply_count: Optional[int] = Field(default=0) + + # Metadata + create_time: datetime = Field( + default_factory=datetime.utcnow, sa_column=Column(TIMESTAMP, server_default=func.now()) + ) + update_time: datetime = Field( + default_factory=datetime.utcnow, sa_column=Column(TIMESTAMP, server_default=func.now()) + ) + + @staticmethod + def model_domain_mapping(): + return [ + { + "domain": Token, + "conflict_do_update": False, + "update_strategy": None, + "converter": general_converter, + }, + { + "domain": UpdateToken, + "conflict_do_update": True, + "update_strategy": "EXCLUDED.block_number >= tokens.block_number", + "converter": general_converter, + }, + { + "domain": MarkTotalSupplyToken, + "conflict_do_update": True, + "update_strategy": None, + # "update_strategy": "EXCLUDED.block_number >= tokens.block_number", + "converter": general_converter, + }, + { + "domain": MarkBalanceToken, + "conflict_do_update": True, + "update_strategy": None, + # "update_strategy": "EXCLUDED.block_number >= tokens.block_number", + "converter": general_converter, + }, + ] + + __table_args__ = ( + Index("tokens_name_index", "name"), + Index("tokens_symbol_index", "symbol"), + Index("tokens_type_index", "token_type"), + Index("tokens_type_holders_index", "token_type", desc("holder_count")), + Index("tokens_type_on_chain_market_cap_index", "token_type", desc("on_chain_market_cap")), + # Note: tsvector index needs to be created manually + # CREATE INDEX tokens_tsvector_symbol_name_index + # ON tokens + # USING gin (to_tsvector('english', (symbol || ' ' || name))); + ) diff --git a/hemera/common/models/token_balances.py b/hemera/common/models/token_balances.py deleted file mode 100644 index 4aecc294f..000000000 --- a/hemera/common/models/token_balances.py +++ /dev/null @@ -1,54 +0,0 @@ -from typing import Type - -from sqlalchemy import Column, Index, PrimaryKeyConstraint, desc, func, text -from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, NUMERIC, TIMESTAMP, VARCHAR - -from hemera.common.models import HemeraModel, general_converter -from hemera.indexer.domains.token_balance import TokenBalance - - -def token_balances_general_converter(table: Type[HemeraModel], data: TokenBalance, is_update=False): - - if data.token_id is None: - data.token_id = -1 - - return general_converter(table, data, is_update) - - -class AddressTokenBalances(HemeraModel): - __tablename__ = "address_token_balances" - - address = Column(BYTEA, primary_key=True) - token_id = Column(NUMERIC(78), primary_key=True) - token_type = Column(VARCHAR) - token_address = Column(BYTEA, primary_key=True) - balance = Column(NUMERIC(100)) - - block_number = Column(BIGINT, primary_key=True) - block_timestamp = Column(TIMESTAMP, primary_key=True) - - create_time = Column(TIMESTAMP, server_default=func.now()) - update_time = Column(TIMESTAMP, server_default=func.now()) - reorg = Column(BOOLEAN, server_default=text("false")) - - __table_args__ = (PrimaryKeyConstraint("address", "token_address", "token_id", "block_number", "block_timestamp"),) - - @staticmethod - def model_domain_mapping(): - return [ - { - "domain": TokenBalance, - "conflict_do_update": True, - "update_strategy": None, - "converter": token_balances_general_converter, - } - ] - - -Index( - "token_balance_address_id_number_index", - AddressTokenBalances.address, - AddressTokenBalances.token_address, - desc(AddressTokenBalances.token_id), - desc(AddressTokenBalances.block_number), -) diff --git a/hemera/common/models/token_hourly_price.py b/hemera/common/models/token_hourly_price.py deleted file mode 100644 index 115bd02e9..000000000 --- a/hemera/common/models/token_hourly_price.py +++ /dev/null @@ -1,14 +0,0 @@ -from sqlalchemy import Column, DateTime, Numeric, String - -from hemera.common.models import HemeraModel - - -class TokenHourlyPrices(HemeraModel): - symbol = Column(String, primary_key=True) - timestamp = Column(DateTime, primary_key=True) - price = Column(Numeric) - - -class CoinPrices(HemeraModel): - block_date = Column(DateTime, primary_key=True) - price = Column(Numeric) diff --git a/hemera/common/models/token_prices.py b/hemera/common/models/token_prices.py deleted file mode 100644 index d4c7acfbf..000000000 --- a/hemera/common/models/token_prices.py +++ /dev/null @@ -1,9 +0,0 @@ -from sqlalchemy import Column, DateTime, Numeric, String - -from hemera.common.models import HemeraModel - - -class TokenPrices(HemeraModel): - symbol = Column(String, primary_key=True) - timestamp = Column(DateTime, primary_key=True) - price = Column(Numeric) diff --git a/hemera/common/models/tokens.py b/hemera/common/models/tokens.py deleted file mode 100644 index 3ad019d3e..000000000 --- a/hemera/common/models/tokens.py +++ /dev/null @@ -1,97 +0,0 @@ -from sqlalchemy import Column, Index, PrimaryKeyConstraint, desc, func -from sqlalchemy.dialects.postgresql import BIGINT, BOOLEAN, BYTEA, INTEGER, JSONB, NUMERIC, TIMESTAMP, VARCHAR - -from hemera.common.models import HemeraModel, general_converter -from hemera.indexer.domains.token import MarkBalanceToken, MarkTotalSupplyToken, Token, UpdateToken - - -class Tokens(HemeraModel): - __tablename__ = "tokens" - - address = Column(BYTEA, primary_key=True) - token_type = Column(VARCHAR) - name = Column(VARCHAR) - symbol = Column(VARCHAR) - decimals = Column(NUMERIC(100)) - total_supply = Column(NUMERIC(100)) - block_number = Column(BIGINT) - - holder_count = Column(INTEGER, default=0) - transfer_count = Column(INTEGER, default=0) - icon_url = Column(VARCHAR) - urls = Column(JSONB) - volume_24h = Column(NUMERIC(38, 2)) - price = Column(NUMERIC(38, 6)) - previous_price = Column(NUMERIC(38, 6)) - market_cap = Column(NUMERIC(38, 2)) - on_chain_market_cap = Column(NUMERIC(38, 2)) - is_verified = Column(BOOLEAN, default=False) - - cmc_id = Column(INTEGER) - cmc_slug = Column(VARCHAR) - gecko_id = Column(VARCHAR) - description = Column(VARCHAR) - - no_balance_of = Column(BOOLEAN, default=False) - fail_balance_of_count = Column(INTEGER, default=0) - succeed_balance_of_count = Column(INTEGER, default=0) - no_total_supply = Column(BOOLEAN, default=False) - fail_total_supply_count = Column(BOOLEAN, default=0) - - create_time = Column(TIMESTAMP, server_default=func.now()) - update_time = Column(TIMESTAMP, server_default=func.now()) - - __table_args__ = (PrimaryKeyConstraint("address"),) - - @staticmethod - def model_domain_mapping(): - return [ - { - "domain": Token, - "conflict_do_update": False, - "update_strategy": None, - "converter": general_converter, - }, - { - "domain": UpdateToken, - "conflict_do_update": True, - "update_strategy": "EXCLUDED.block_number > tokens.block_number", - "converter": general_converter, - }, - { - "domain": MarkTotalSupplyToken, - "conflict_do_update": True, - "update_strategy": None, - # "update_strategy": "EXCLUDED.block_number >= tokens.block_number", - "converter": general_converter, - }, - { - "domain": MarkBalanceToken, - "conflict_do_update": True, - "update_strategy": None, - # "update_strategy": "EXCLUDED.block_number >= tokens.block_number", - "converter": general_converter, - }, - ] - - -Index("tokens_name_index", Tokens.name) -Index("tokens_symbol_index", Tokens.symbol) -Index("tokens_type_index", Tokens.token_type) -Index("tokens_type_holders_index", Tokens.token_type, desc(Tokens.holder_count)) -Index( - "tokens_type_on_chain_market_cap_index", - Tokens.token_type, - desc(Tokens.on_chain_market_cap), -) - -# because of sqlalchemy doesn't recognize 'english' with datatype REGCONFIG -# alembic could not track this index -# before sqlalchemy support this case, we suggest running this sql manually - -# Index('tokens_tsvector_symbol_name_index', -# func.to_tsvector('english', (Tokens.symbol + ' ' + Tokens.name)), postgresql_using='gin') - -# CREATE INDEX tokens_tsvector_symbol_name_index -# ON tokens -# USING gin (to_tsvector('english', (symbol || ' ' || name))); diff --git a/hemera/common/models/trace/__init__.py b/hemera/common/models/trace/__init__.py new file mode 100644 index 000000000..48e66b1ec --- /dev/null +++ b/hemera/common/models/trace/__init__.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2025/3/4 11:52 +# @Author ideal93 +# @File __init__.py.py +# @Brief diff --git a/hemera/common/models/trace/address_coin_balances.py b/hemera/common/models/trace/address_coin_balances.py new file mode 100644 index 000000000..64f7f8523 --- /dev/null +++ b/hemera/common/models/trace/address_coin_balances.py @@ -0,0 +1,38 @@ +from datetime import datetime +from decimal import Decimal +from typing import Optional + +from sqlmodel import Field + +from hemera.common.models import HemeraModel, general_converter +from hemera.indexer.domains.coin_balance import CoinBalance + + +class AddressCoinBalances(HemeraModel, table=True): + __tablename__ = "address_coin_balances" + + # Primary key fields + address: bytes = Field(primary_key=True) + block_number: int = Field(primary_key=True) + block_timestamp: datetime = Field(primary_key=True) + + # Balance + balance: Optional[Decimal] = Field(default=None, max_digits=100) + + # Metadata fields + create_time: Optional[datetime] = Field(default_factory=datetime.utcnow) + update_time: Optional[datetime] = Field(default_factory=datetime.utcnow) + reorg: Optional[bool] = Field(default=False) + + @staticmethod + def model_domain_mapping(): + return [ + { + "domain": CoinBalance, + "conflict_do_update": False, + "update_strategy": None, + "converter": general_converter, + } + ] + + __table_args__ = () diff --git a/hemera/common/models/trace/contracts.py b/hemera/common/models/trace/contracts.py new file mode 100644 index 000000000..e138c6572 --- /dev/null +++ b/hemera/common/models/trace/contracts.py @@ -0,0 +1,70 @@ +from datetime import datetime +from typing import Any, Dict, Optional + +from sqlalchemy import Column, String, text +from sqlalchemy.dialects.postgresql import BOOLEAN, JSONB, TIMESTAMP +from sqlalchemy.sql.expression import func +from sqlmodel import Field + +from hemera.common.models import HemeraModel, general_converter +from hemera.indexer.domains.contract import Contract, ContractFromTransaction + + +class Contracts(HemeraModel, table=True): + __tablename__ = "contracts" + + # Primary key + address: bytes = Field(primary_key=True) + + # Basic contract information + name: Optional[str] = Field(default=None) + contract_creator: Optional[bytes] = Field(default=None) + creation_code: Optional[bytes] = Field(default=None) + deployed_code: Optional[bytes] = Field(default=None) + + # Block and transaction details + block_number: Optional[int] = Field(default=None) + block_hash: Optional[bytes] = Field(default=None) + block_timestamp: Optional[datetime] = Field(default=None) + transaction_index: Optional[int] = Field(default=None) + transaction_hash: Optional[bytes] = Field(default=None) + transaction_from_address: Optional[bytes] = Field(default=None) + + # Additional contract metadata + official_website: Optional[str] = Field(default=None) + description: Optional[str] = Field(default=None) + email: Optional[str] = Field(default=None) + social_list: Optional[Dict[str, Any]] = Field(default=None, sa_column=Column(JSONB)) + is_verified: bool = Field(default=False) + is_proxy: Optional[bool] = Field(default=None) + implementation_contract: Optional[bytes] = Field(default=None) + verified_implementation_contract: Optional[bytes] = Field(default=None) + proxy_standard: Optional[str] = Field(default=None) + + # Metadata fields + create_time: datetime = Field( + default_factory=datetime.utcnow, sa_column=Column(TIMESTAMP, server_default=func.now()) + ) + update_time: datetime = Field( + default_factory=datetime.utcnow, sa_column=Column(TIMESTAMP, server_default=func.now()) + ) + reorg: bool = Field(default=False, sa_column=Column(BOOLEAN, server_default=text("false"))) + deployed_code_hash: Optional[str] = Field(default=None) + bytecode: str = Field(default=None) + + @staticmethod + def model_domain_mapping(): + return [ + { + "domain": Contract, + "conflict_do_update": False, + "update_strategy": None, + "converter": general_converter, + }, + { + "domain": ContractFromTransaction, + "conflict_do_update": False, + "update_strategy": None, + "converter": general_converter, + }, + ] diff --git a/hemera/common/models/trace/traces.py b/hemera/common/models/trace/traces.py new file mode 100644 index 000000000..f9ec18233 --- /dev/null +++ b/hemera/common/models/trace/traces.py @@ -0,0 +1,183 @@ +from datetime import datetime +from decimal import Decimal +from typing import Dict, List, Optional + +from sqlalchemy import Column, desc, func, text +from sqlalchemy.dialects.postgresql import ARRAY, BOOLEAN, INTEGER, JSONB, TIMESTAMP +from sqlmodel import Field, Index + +from hemera.common.models import HemeraModel, general_converter +from hemera.indexer.domains.contract_internal_transaction import ContractInternalTransaction +from hemera.indexer.domains.trace import Trace +from hemera.indexer.domains.transaction_trace_json import TransactionTraceJson + + +class Traces(HemeraModel, table=True): + __tablename__ = "traces" + + # Primary key + trace_id: str = Field(primary_key=True) + block_timestamp: datetime = Field(primary_key=True) + + # Address fields + from_address: Optional[bytes] = Field(default=None) + to_address: Optional[bytes] = Field(default=None) + + # Value and data fields + value: Optional[Decimal] = Field(default=None, max_digits=100) + input: Optional[bytes] = Field(default=None) + output: Optional[bytes] = Field(default=None) + + # Type fields + trace_type: Optional[str] = Field(default=None) + call_type: Optional[str] = Field(default=None) + + # Gas fields + gas: Optional[Decimal] = Field(default=None, max_digits=100) + gas_used: Optional[Decimal] = Field(default=None, max_digits=100) + + # Trace specific fields + subtraces: Optional[int] = Field(default=None) + trace_address: Optional[List[int]] = Field(default=None, sa_column=Column(ARRAY(INTEGER))) + error: Optional[str] = Field(default=None) + status: Optional[int] = Field(default=None) + + # Block fields + block_number: Optional[int] = Field(default=None) + block_hash: Optional[bytes] = Field(default=None) + transaction_index: Optional[int] = Field(default=None) + transaction_hash: Optional[bytes] = Field(default=None) + + # Metadata fields + create_time: datetime = Field( + default_factory=datetime.utcnow, sa_column=Column(TIMESTAMP, server_default=func.now()) + ) + update_time: datetime = Field( + default_factory=datetime.utcnow, sa_column=Column(TIMESTAMP, server_default=func.now()) + ) + reorg: bool = Field(default=False, sa_column=Column(BOOLEAN, server_default=text("false"))) + + __query_order__ = [block_number, transaction_index] + + @staticmethod + def model_domain_mapping(): + return [ + { + "domain": Trace, + "conflict_do_update": False, + "update_strategy": None, + "converter": general_converter, + } + ] + + __table_args__ = ( + Index("traces_transaction_hash_index", "transaction_hash"), + Index("traces_block_number_index", desc("block_number")), + Index( + "traces_from_address_block_number_index", + "from_address", + desc("block_number"), + ), + Index( + "traces_to_address_block_number_index", + "to_address", + desc("block_number"), + ), + ) + + +class ContractInternalTransactions(HemeraModel, table=True): + __tablename__ = "contract_internal_transactions" + + # Primary key + trace_id: str = Field(primary_key=True) + block_timestamp: datetime = Field(primary_key=True) + + # Address fields + from_address: Optional[bytes] = Field(default=None) + to_address: Optional[bytes] = Field(default=None) + + # Value and data fields + value: Optional[Decimal] = Field(default=None, max_digits=100) + input: Optional[bytes] = Field(default=None) + output: Optional[bytes] = Field(default=None) + + # Type fields + trace_type: Optional[str] = Field(default=None) + call_type: Optional[str] = Field(default=None) + + # Gas fields + gas: Optional[Decimal] = Field(default=None, max_digits=100) + gas_used: Optional[Decimal] = Field(default=None, max_digits=100) + + # Trace specific fields + subtraces: Optional[int] = Field(default=None) + trace_address: Optional[List[int]] = Field(default=None, sa_column=Column(ARRAY(INTEGER))) + error: Optional[str] = Field(default=None) + status: Optional[int] = Field(default=None) + + # Block fields + block_number: Optional[int] = Field(default=None) + block_hash: Optional[bytes] = Field(default=None) + transaction_index: Optional[int] = Field(default=None) + transaction_hash: Optional[bytes] = Field(default=None) + + # Metadata fields + create_time: datetime = Field( + default_factory=datetime.utcnow, sa_column=Column(TIMESTAMP, server_default=func.now()) + ) + update_time: datetime = Field( + default_factory=datetime.utcnow, sa_column=Column(TIMESTAMP, server_default=func.now()) + ) + reorg: bool = Field(default=False, sa_column=Column(BOOLEAN, server_default=text("false"))) + + __query_order__ = [block_number, transaction_index] + + @staticmethod + def model_domain_mapping(): + return [ + { + "domain": ContractInternalTransaction, + "conflict_do_update": False, + "update_strategy": None, + "converter": general_converter, + } + ] + + __table_args__ = ( + Index("contract_internal_transactions_transaction_hash_index", "transaction_hash"), + Index("contract_internal_transactions_block_number_index", desc("block_number")), + Index( + "contract_internal_transactions_from_address_block_number_index", + "from_address", + desc("block_number"), + ), + Index( + "contract_internal_transactions_to_address_block_number_index", + "to_address", + desc("block_number"), + ), + ) + + +class TransactionTraceJsons(HemeraModel, table=True): + __tablename__ = "transaction_trace_json" + + # Primary key + transaction_hash: bytes = Field(primary_key=True) + block_timestamp: datetime = Field(primary_key=True) + + block_number: int = Field(default=None) + block_hash: bytes = Field(default=None) + data: Optional[Dict] = Field(default=None, sa_column=Column(JSONB)) + + @staticmethod + def model_domain_mapping(): + return [ + { + "domain": TransactionTraceJson, + "conflict_do_update": False, + "update_strategy": None, + "converter": general_converter, + } + ] diff --git a/hemera/common/models/traces.py b/hemera/common/models/traces.py deleted file mode 100644 index 90c268912..000000000 --- a/hemera/common/models/traces.py +++ /dev/null @@ -1,58 +0,0 @@ -from sqlalchemy import Column, Index, desc, func, text -from sqlalchemy.dialects.postgresql import ARRAY, BIGINT, BOOLEAN, BYTEA, INTEGER, NUMERIC, TEXT, TIMESTAMP, VARCHAR - -from hemera.common.models import HemeraModel, general_converter -from hemera.indexer.domains.trace import Trace - - -class Traces(HemeraModel): - __tablename__ = "traces" - - trace_id = Column(VARCHAR, primary_key=True) - from_address = Column(BYTEA) - to_address = Column(BYTEA) - value = Column(NUMERIC(100)) - input = Column(BYTEA) - output = Column(BYTEA) - trace_type = Column(VARCHAR) - call_type = Column(VARCHAR) - gas = Column(NUMERIC(100)) - gas_used = Column(NUMERIC(100)) - subtraces = Column(INTEGER) - trace_address = Column(ARRAY(INTEGER)) - error = Column(TEXT) - status = Column(INTEGER) - block_number = Column(BIGINT) - block_hash = Column(BYTEA) - block_timestamp = Column(TIMESTAMP) - transaction_index = Column(INTEGER) - transaction_hash = Column(BYTEA) - - create_time = Column(TIMESTAMP, server_default=func.now()) - update_time = Column(TIMESTAMP, server_default=func.now()) - reorg = Column(BOOLEAN, server_default=text("false")) - - __query_order__ = [block_number, transaction_index] - - @staticmethod - def model_domain_mapping(): - return [ - { - "domain": Trace, - "conflict_do_update": False, - "update_strategy": None, - "converter": general_converter, - } - ] - - -Index("traces_transaction_hash_index", Traces.transaction_hash) -Index("traces_block_number_index", desc(Traces.block_number)) - -Index( - "traces_from_address_block_number_index", - Traces.from_address, - desc(Traces.block_number), -) - -Index("traces_to_address_block_number_index", Traces.to_address, desc(Traces.block_number)) diff --git a/hemera/common/models/transactions.py b/hemera/common/models/transactions.py deleted file mode 100644 index b78a788cd..000000000 --- a/hemera/common/models/transactions.py +++ /dev/null @@ -1,112 +0,0 @@ -from typing import Type - -from sqlalchemy import Column, Computed, Index, asc, desc, func, text -from sqlalchemy.dialects.postgresql import ARRAY, BIGINT, BOOLEAN, BYTEA, INTEGER, NUMERIC, TEXT, TIMESTAMP, VARCHAR - -from hemera.common.models import HemeraModel, general_converter -from hemera.common.utils.format_utils import hex_str_to_bytes -from hemera.indexer.domains.transaction import Transaction - - -class Transactions(HemeraModel): - __tablename__ = "transactions" - - hash = Column(BYTEA, primary_key=True) - transaction_index = Column(INTEGER) - from_address = Column(BYTEA) - to_address = Column(BYTEA) - value = Column(NUMERIC(100)) - transaction_type = Column(INTEGER) - input = Column(BYTEA) - method_id = Column(VARCHAR, Computed("substring((input)::varchar for 8)::bigint::varchar")) - nonce = Column(INTEGER) - - block_hash = Column(BYTEA) - block_number = Column(BIGINT) - block_timestamp = Column(TIMESTAMP) - - gas = Column(NUMERIC(100)) - gas_price = Column(NUMERIC(100)) - max_fee_per_gas = Column(NUMERIC(100)) - max_priority_fee_per_gas = Column(NUMERIC(100)) - - receipt_root = Column(BYTEA) - receipt_status = Column(INTEGER) - receipt_gas_used = Column(NUMERIC(100)) - receipt_cumulative_gas_used = Column(NUMERIC(100)) - receipt_effective_gas_price = Column(NUMERIC(100)) - receipt_l1_fee = Column(NUMERIC(100)) - receipt_l1_fee_scalar = Column(NUMERIC(100, 18)) - receipt_l1_gas_used = Column(NUMERIC(100)) - receipt_l1_gas_price = Column(NUMERIC(100)) - receipt_blob_gas_used = Column(NUMERIC(100)) - receipt_blob_gas_price = Column(NUMERIC(100)) - - blob_versioned_hashes = Column(ARRAY(BYTEA)) - receipt_contract_address = Column(BYTEA) - - exist_error = Column(BOOLEAN) - error = Column(TEXT) - revert_reason = Column(TEXT) - - create_time = Column(TIMESTAMP, server_default=func.now()) - update_time = Column(TIMESTAMP, server_default=func.now()) - reorg = Column(BOOLEAN, server_default=text("false")) - - __query_order__ = [block_number, transaction_index] - - @staticmethod - def model_domain_mapping(): - return [ - { - "domain": Transaction, - "conflict_do_update": False, - "update_strategy": None, - "converter": converter, - } - ] - - -Index("transactions_block_timestamp_index", Transactions.block_timestamp) - -Index( - "transactions_block_number_transaction_index", - desc(Transactions.block_number), - desc(Transactions.transaction_index), -) - -Index( - "transactions_from_address_block_number_transaction_idx", - asc(Transactions.from_address), - desc(Transactions.block_number), - desc(Transactions.transaction_index), -) - -Index( - "transactions_to_address_block_number_transaction_idx", - asc(Transactions.to_address), - desc(Transactions.block_number), - desc(Transactions.transaction_index), -) - - -def converter(table: Type[HemeraModel], data: Transaction, is_update=False): - converted_data = general_converter(table, data, is_update) - receipt = data.receipt - - converted_data["receipt_root"] = hex_str_to_bytes(receipt.root) if receipt and receipt.root else None - converted_data["receipt_status"] = receipt.status if receipt else None - converted_data["receipt_gas_used"] = receipt.gas_used if receipt else None - converted_data["receipt_cumulative_gas_used"] = receipt.cumulative_gas_used if receipt else None - converted_data["receipt_effective_gas_price"] = receipt.effective_gas_price if receipt else None - converted_data["receipt_l1_fee"] = receipt.l1_fee if receipt else None - converted_data["receipt_l1_fee_scalar"] = receipt.l1_fee_scalar if receipt else None - converted_data["receipt_l1_gas_used"] = receipt.l1_gas_used if receipt else None - converted_data["receipt_l1_gas_price"] = receipt.l1_gas_price if receipt else None - converted_data["receipt_blob_gas_used"] = receipt.blob_gas_used if receipt else None - converted_data["receipt_blob_gas_price"] = receipt.blob_gas_price if receipt else None - converted_data["receipt_contract_address"] = ( - hex_str_to_bytes(receipt.contract_address) if receipt and receipt.contract_address else None - ) - - return converted_data diff --git a/hemera/common/models/utils/__init__.py b/hemera/common/models/utils/__init__.py new file mode 100644 index 000000000..4971dccd2 --- /dev/null +++ b/hemera/common/models/utils/__init__.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2025/3/4 11:53 +# @Author ideal93 +# @File __init__.py.py +# @Brief diff --git a/hemera/common/models/utils/metrics_persistence.py b/hemera/common/models/utils/metrics_persistence.py new file mode 100644 index 000000000..1d2fa2bf6 --- /dev/null +++ b/hemera/common/models/utils/metrics_persistence.py @@ -0,0 +1,18 @@ +from datetime import datetime +from typing import Dict, Optional + +from sqlalchemy import Column +from sqlalchemy.dialects.postgresql import JSONB +from sqlmodel import Field + +from hemera.common.models import HemeraModel + + +class MetricsPersistence(HemeraModel): + __tablename__ = "metrics_persistence" + + instance: str = Column(primary_key=True) + metrics: Optional[Dict] = Field(default=None, sa_column=Column(JSONB)) + + create_time: datetime = Field(default_factory=datetime.utcnow) + update_time: datetime = Field(default_factory=datetime.utcnow) diff --git a/hemera/common/models/utils/prices.py b/hemera/common/models/utils/prices.py new file mode 100644 index 000000000..6559f8eb2 --- /dev/null +++ b/hemera/common/models/utils/prices.py @@ -0,0 +1,72 @@ +from datetime import datetime +from decimal import Decimal +from typing import Optional + +from sqlalchemy.sql import text +from sqlmodel import Field, Index + +from hemera.common.models import HemeraModel + + +class TokenPrices(HemeraModel, table=True): + """Historical token prices model + + Primary keys: + - symbol: Token symbol + - timestamp: Price timestamp + """ + + __tablename__ = "token_prices" + + # Primary key fields + symbol: str = Field(primary_key=True) + timestamp: datetime = Field(primary_key=True) + + # Price field + price: Optional[Decimal] = Field(default=None, description="Token price at the timestamp") + + __table_args__ = ( + Index("token_prices_symbol_timestamp_index", text("symbol ASC, timestamp DESC")), + Index("token_prices_timestamp_index", text("timestamp DESC")), + ) + + +class TokenHourlyPrices(HemeraModel, table=True): + """Historical hourly token prices model + + Primary keys: + - symbol: Token symbol + - timestamp: Price timestamp + """ + + __tablename__ = "token_hourly_prices" + + # Primary key fields + symbol: str = Field(primary_key=True) + timestamp: datetime = Field(primary_key=True) + + # Price field + price: Optional[Decimal] = Field(default=None, description="Token price at the timestamp") + + __table_args__ = ( + Index("token_hourly_prices_symbol_timestamp_index", text("symbol ASC, timestamp DESC")), + Index("token_hourly_prices_timestamp_index", text("timestamp DESC")), + ) + + +class CoinPrices(HemeraModel, table=True): + """Daily coin prices model + + Primary key: + - block_date: Price date from block + """ + + __tablename__ = "coin_prices" + + # Primary key field + block_date: datetime = Field(primary_key=True, description="Block date for the price") + + # Price field + price: Optional[Decimal] = Field(default=None, description="Coin price on the block date") + + __table_args__ = (Index("coin_prices_block_date_index", text("block_date DESC")),) diff --git a/hemera/common/models/utils/records.py b/hemera/common/models/utils/records.py new file mode 100644 index 000000000..b9345fc55 --- /dev/null +++ b/hemera/common/models/utils/records.py @@ -0,0 +1,69 @@ +from datetime import datetime +from typing import Any, Dict, Optional + +from sqlalchemy.dialects.postgresql import JSONB +from sqlmodel import Column, Field + +from hemera.common.models import HemeraModel + + +class SyncRecords(HemeraModel, table=True): + __tablename__ = "sync_records" + + # Primary key + mission_sign: str = Field(primary_key=True) + + # Fields + last_block_number: Optional[int] = Field(default=None) + update_time: Optional[datetime] = Field(default_factory=datetime.utcnow) + + __query_order__ = [update_time] + + +class FixRecords(HemeraModel, table=True): + __tablename__ = "fix_records" + + # Primary key + job_id: str = Field(primary_key=True) + + # Fields + last_fixed_block_number: Optional[int] = Field(default=None) + update_time: Optional[datetime] = Field(default_factory=datetime.utcnow) + + __query_order__ = [update_time] + + +class ExceptionRecords(HemeraModel, table=True): + __tablename__ = "exception_records" + + # Primary key + id: str = Field(primary_key=True) + + # Fields + block_number: int = Field(default=None) + dataclass: str = Field(default=None) + level: str = Field(default=None) + message_type: str = Field(default=None) + message: str = Field(default=None) + exception_env: Optional[Dict[str, Any]] = Field(default=None, sa_column=Column(JSONB)) + record_time: datetime = Field(default=None) + + __query_order__ = [record_time] + + +class FailureRecords(HemeraModel, table=True): + __tablename__ = "failure_records" + + # Primary key + record_id: int = Field(primary_key=True) + + # Fields + mission_sign: str = Field(default=None) + output_types: str = Field(default=None) + start_block_number: int = Field(default=None) + end_block_number: int = Field(default=None) + exception_stage: str = Field(default=None) + exception: Optional[Dict[str, Any]] = Field(default=None, sa_column=Column(JSONB)) + crash_time: datetime = Field(default=None) + + __query_order__ = [crash_time] diff --git a/hemera/common/models/utils/scheduled_metadata.py b/hemera/common/models/utils/scheduled_metadata.py new file mode 100644 index 000000000..762b7bcc1 --- /dev/null +++ b/hemera/common/models/utils/scheduled_metadata.py @@ -0,0 +1,16 @@ +from datetime import datetime +from typing import Optional + +from sqlmodel import Field + +from hemera.common.models import HemeraModel, general_converter + + +class ScheduledMetadata(HemeraModel, table=True): + __tablename__ = "scheduled_metadata" + + # Primary key and basic fields + id: int = Field(primary_key=True) + dag_id: str = Field(primary_key=True) + execution_date: Optional[datetime] = Field(default=None) + last_data_timestamp: Optional[datetime] = Field(default=None) diff --git a/hemera/common/services/postgresql_service.py b/hemera/common/services/postgresql_service.py index 0b402ac33..89db90397 100644 --- a/hemera/common/services/postgresql_service.py +++ b/hemera/common/services/postgresql_service.py @@ -2,8 +2,6 @@ from contextlib import contextmanager from multiprocessing import current_process -from alembic import command -from alembic.config import Config from psycopg2.pool import ThreadedConnectionPool from sqlalchemy import create_engine from sqlalchemy.engine import Engine @@ -97,44 +95,7 @@ def __init__( self._initialized[(p_name, jdbc_url)] = True def _init_schema(self, script_location: str) -> None: - """ - Initialize database schema using Alembic migrations. - """ - alembic_cfg = Config() - alembic_cfg.set_main_option("script_location", script_location) - alembic_cfg.set_main_option("sqlalchemy.url", self.jdbc_url) - - # Configure logging - self._configure_alembic_logging(alembic_cfg) - - command.upgrade(alembic_cfg, self.db_version) - - def _configure_alembic_logging(self, config: Config) -> None: - """ - Configure Alembic logging settings. - """ - config.set_main_option("loggers", "root,sqlalchemy,alembic") - config.set_main_option("handlers", "console") - config.set_main_option("formatters", "generic") - - # Logger configurations - loggers = { - "root": ("WARN", "console", ""), - "sqlalchemy": ("WARN", "", "sqlalchemy.engine"), - "alembic": ("INFO", "", "alembic"), - } - - for logger, (level, handlers, qualname) in loggers.items(): - section = f"logger_{logger}" - config.set_section_option(section, "level", level) - config.set_section_option(section, "handlers", handlers) - config.set_section_option(section, "qualname", qualname) - - # Configure console handler - config.set_section_option("handler_console", "class", "StreamHandler") - config.set_section_option("handler_console", "args", "(sys.stderr,)") - config.set_section_option("handler_console", "level", "NOTSET") - config.set_section_option("handler_console", "formatter", "generic") + pass @contextmanager def session_scope(self) -> Session: diff --git a/hemera/common/utils/abi_code_utils.py b/hemera/common/utils/abi_code_utils.py index db697dc4a..10adb7fda 100644 --- a/hemera/common/utils/abi_code_utils.py +++ b/hemera/common/utils/abi_code_utils.py @@ -9,21 +9,13 @@ from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import eth_abi -from ens.utils import get_abi_output_types from eth_abi import abi from eth_abi.codec import ABICodec -from eth_typing import TypeStr -from eth_utils import encode_hex, to_hex +from eth_typing import ABIEvent, ABIFunction, TypeStr +from eth_utils import encode_hex, get_abi_input_types, get_abi_output_types, to_hex from hexbytes import HexBytes -from web3._utils.abi import ( - exclude_indexed_event_inputs, - get_abi_input_types, - get_indexed_event_inputs, - map_abi_data, - named_tree, -) +from web3._utils.abi import exclude_indexed_event_inputs, get_indexed_event_inputs, map_abi_data, named_tree from web3._utils.normalizers import BASE_RETURN_NORMALIZERS -from web3.types import ABIEvent, ABIFunction from hemera.common.utils.exception_control import FastShutdownError from hemera.common.utils.format_utils import bytes_to_hex_str, convert_bytes_to_hex, convert_dict, hex_str_to_bytes diff --git a/hemera/common/utils/config.py b/hemera/common/utils/config.py index bfdd1bbe7..e3679db5f 100644 --- a/hemera/common/utils/config.py +++ b/hemera/common/utils/config.py @@ -1,68 +1,5 @@ import logging import os -from configparser import ConfigParser - -from hemera.api.app.config import AppConfig - -_config_instance = None -_is_initialized = False - - -def get_config(): - global _config_instance - if _config_instance is None: - _config_instance = read_config() - return _config_instance - - -def set_config(config: AppConfig): - global _config_instance, _is_initialized - if not _is_initialized: - _config_instance = config - _is_initialized = True - else: - if config.env != "ut": - raise Exception("Configuration can only be set once.") - else: - _config_instance = config - - -def read_config(): - config = ConfigParser() - try: - if os.path.exists("api_config.conf"): - config.read("api_config.conf") - app_config = AppConfig.load_from_config_file(config) - elif os.path.exists("api_config.yaml"): - app_config = AppConfig.load_from_yaml_file("api_config.yaml") - else: - app_config = AppConfig() - app_config.update_from_env() - except Exception as e: - logging.error("Error initializing config") - exit() - - return app_config - - -def set_config_value(config_file, section, key, value): - config = ConfigParser() - config.read(config_file) - config[section][key] = value - - with open(config_file, "w") as configfile: - config.write(configfile) - - -def read_config_value(config_file, section, key): - config = ConfigParser() - try: - config.read(config_file) - value = config[section][key] - except Exception as e: - value = None - - return value def check_and_set_default_env(key: str, default_value: str): diff --git a/hemera/common/utils/db_utils.py b/hemera/common/utils/db_utils.py index 00e7c1dbb..631a5a842 100644 --- a/hemera/common/utils/db_utils.py +++ b/hemera/common/utils/db_utils.py @@ -4,10 +4,9 @@ from sqlalchemy import text -from hemera.common.models import HemeraModel, db -from hemera.common.models.blocks import Blocks +from hemera.common.models import HemeraModel +from hemera.common.models.base.blocks import Blocks from hemera.common.services.postgresql_service import PostgreSQLService -from hemera.common.utils.config import get_config from hemera.common.utils.format_utils import bytes_to_hex_str from hemera.indexer.domains import Domain, dict_to_dataclass from hemera.indexer.domains.block import Block @@ -15,8 +14,6 @@ from hemera.indexer.domains.receipt import Receipt from hemera.indexer.domains.transaction import Transaction -app_config = get_config() - def build_entities(model, columns): if columns == "*": diff --git a/hemera/common/utils/format_utils.py b/hemera/common/utils/format_utils.py index f474e3455..82c51f194 100644 --- a/hemera/common/utils/format_utils.py +++ b/hemera/common/utils/format_utils.py @@ -6,7 +6,7 @@ from sqlalchemy import Integer, Numeric -def bytes_to_hex_str(b: bytes) -> str: +def bytes_to_hex_str(b: bytes) -> Optional[str]: """ Converts a bytes object to a hexadecimal string with '0x' prefix. @@ -16,10 +16,12 @@ def bytes_to_hex_str(b: bytes) -> str: :return: A hexadecimal string representation of the input bytes. :rtype: str """ + if not b: + return None return "0x" + b.hex() -def hex_str_to_bytes(h: str) -> bytes: +def hex_str_to_bytes(h: str) -> Optional[bytes]: """ Converts a hexadecimal string to a bytes object. diff --git a/hemera/common/utils/web3_utils.py b/hemera/common/utils/web3_utils.py index df7323cf2..bea6b4323 100644 --- a/hemera/common/utils/web3_utils.py +++ b/hemera/common/utils/web3_utils.py @@ -1,10 +1,11 @@ import base64 import json import re +from typing import Optional import requests from web3 import Web3 -from web3.middleware import geth_poa_middleware +from web3.middleware import PythonicMiddleware ZERO_ADDRESS = "0x0000000000000000000000000000000000000000" @@ -93,15 +94,10 @@ def build_web3(provider): - w3 = Web3(provider) - w3.middleware_onion.inject(geth_poa_middleware, layer=0) + w3 = Web3(provider, middleware=[PythonicMiddleware]) return w3 -def verify_0_address(address): - return set(address[2:]) == {"0"} - - def get_debug_trace_transaction(traces): def prune_delegates(trace): while ( @@ -173,6 +169,38 @@ def is_eth_transaction_hash(hash): return bool(re.fullmatch(pattern, hash)) +def valid_hash(value: str) -> Optional[str]: + """Check if the input string is a valid block hash. + + Args: + value: Input string to validate + + Returns: + bool: True if the input is a valid block hash, False otherwise + + Examples: + >>> valid_hash("0x1234...") # 64 hex chars with 0x prefix + True + >>> valid_hash("1234...") # 64 hex chars without prefix + True + >>> valid_hash("0xabcd") # Too short + False + """ + # Remove 0x prefix if present + hex_value = value.lower().removeprefix("0x") + + # Check if the string has exactly 64 characters (32 bytes) + if len(hex_value) != 64: + return None + + # Check if all characters are valid hex digits + try: + int(hex_value, 16) + return "0x" + hex_value.lower() + except ValueError: + return None + + def to_checksum_address(address): return Web3.to_checksum_address(address) diff --git a/hemera/indexer/controller/reorg_controller.py b/hemera/indexer/controller/reorg_controller.py index 9fbdf65cc..c65f0f2b5 100644 --- a/hemera/indexer/controller/reorg_controller.py +++ b/hemera/indexer/controller/reorg_controller.py @@ -5,8 +5,8 @@ from sqlalchemy import and_, update from sqlalchemy.dialects.postgresql import insert -from hemera.common.models.blocks import Blocks -from hemera.common.models.fix_record import FixRecord +from hemera.common.models.base.blocks import Blocks +from hemera.common.models.utils.records import FixRecords from hemera.common.utils.exception_control import HemeraBaseException from hemera.common.utils.format_utils import hex_str_to_bytes from hemera.common.utils.web3_utils import build_web3 @@ -143,7 +143,7 @@ def _do_fixing(self, fix_block, retry_errors=True): def submit_new_fixing_job(self, start_block_number, remain_process): session = self.db_service.get_service_session() - stmt = insert(FixRecord).values( + stmt = insert(FixRecords).values( { "start_block_number": start_block_number, "last_fixed_block_number": start_block_number + 1, @@ -162,7 +162,7 @@ def submit_new_fixing_job(self, start_block_number, remain_process): def update_job_info(self, job_id, job_info): session = self.db_service.get_service_session() try: - stmt = update(FixRecord).where(FixRecord.job_id == job_id).values(job_info) + stmt = update(FixRecords).where(FixRecords.job_id == job_id).values(job_info) session.execute(stmt) session.commit() finally: @@ -172,10 +172,10 @@ def check_job_runnable(self, job_id): runnable = False session = self.db_service.get_service_session() try: - running_cnt = session.query(FixRecord).filter(FixRecord.job_status == "running").count() + running_cnt = session.query(FixRecords).filter(FixRecords.job_status == "running").count() runnable = running_cnt == 0 if not runnable: - running_job = session.query(FixRecord).filter(FixRecord.job_status == "running").first() + running_job = session.query(FixRecords).filter(FixRecords.job_status == "running").first() runnable = running_job.job_id == job_id finally: session.close() @@ -186,9 +186,9 @@ def wake_up_next_job(self): job = None try: job = ( - session.query(FixRecord) - .filter(FixRecord.job_status != "completed") - .order_by(FixRecord.create_time) + session.query(FixRecords) + .filter(FixRecords.job_status != "completed") + .order_by(FixRecords.create_time) .first() ) except Exception as e: diff --git a/hemera/indexer/controller/scheduler/job_scheduler.py b/hemera/indexer/controller/scheduler/job_scheduler.py index 27dbd12fe..b1722cf29 100644 --- a/hemera/indexer/controller/scheduler/job_scheduler.py +++ b/hemera/indexer/controller/scheduler/job_scheduler.py @@ -104,7 +104,7 @@ def __init__( force_filter_mode=False, metrics=None, ): - import_submodules("hemera_udf") + # import_submodules("hemera_udf") self.logger = logging.getLogger(__name__) self.auto_reorg = auto_reorg self.batch_web3_provider = batch_web3_provider diff --git a/hemera/indexer/controller/scheduler/reorg_scheduler.py b/hemera/indexer/controller/scheduler/reorg_scheduler.py index ebe32704f..db7fdf82d 100644 --- a/hemera/indexer/controller/scheduler/reorg_scheduler.py +++ b/hemera/indexer/controller/scheduler/reorg_scheduler.py @@ -5,9 +5,8 @@ from pottery import RedisDict from redis.client import Redis -from hemera.common.models.tokens import Tokens +from hemera.common.models.token.tokens import Tokens from hemera.common.utils.format_utils import bytes_to_hex_str -from hemera.common.utils.module_loading import import_submodules from hemera.indexer.jobs import FilterTransactionDataJob from hemera.indexer.jobs.base_job import BaseExportJob, BaseJob, ExtensionJob from hemera.indexer.jobs.export_blocks_job import ExportBlocksJob diff --git a/hemera/indexer/domains/coin_balance.py b/hemera/indexer/domains/coin_balance.py index 07e37f7c4..c47064da0 100644 --- a/hemera/indexer/domains/coin_balance.py +++ b/hemera/indexer/domains/coin_balance.py @@ -12,3 +12,14 @@ class CoinBalance(Domain): def __init__(self, coin_balance: dict): self.dict_to_entity(coin_balance) + + +@dataclass +class CurrentCoinBalance(Domain): + address: str + balance: int + block_number: int + block_timestamp: int + + def __init__(self, coin_balance: dict): + self.dict_to_entity(coin_balance) diff --git a/hemera/indexer/domains/current_token_balance.py b/hemera/indexer/domains/current_token_balance.py index 4c8c0e4b7..b56148532 100644 --- a/hemera/indexer/domains/current_token_balance.py +++ b/hemera/indexer/domains/current_token_balance.py @@ -6,8 +6,6 @@ @dataclass class CurrentTokenBalance(Domain): address: str - token_id: int - token_type: str token_address: str balance: int block_number: int diff --git a/hemera/indexer/domains/current_token_id_balance.py b/hemera/indexer/domains/current_token_id_balance.py new file mode 100644 index 000000000..2b7086268 --- /dev/null +++ b/hemera/indexer/domains/current_token_id_balance.py @@ -0,0 +1,13 @@ +from dataclasses import dataclass + +from hemera.indexer.domains import Domain + + +@dataclass +class CurrentTokenIdBalance(Domain): + address: str + token_id: int + token_address: str + balance: int + block_number: int + block_timestamp: int diff --git a/hemera/indexer/domains/token_balance.py b/hemera/indexer/domains/token_balance.py index b1dbd1444..4f469c9a6 100644 --- a/hemera/indexer/domains/token_balance.py +++ b/hemera/indexer/domains/token_balance.py @@ -6,8 +6,6 @@ @dataclass class TokenBalance(Domain): address: str - token_id: int - token_type: str token_address: str balance: int block_number: int diff --git a/hemera/indexer/domains/token_id_balance.py b/hemera/indexer/domains/token_id_balance.py new file mode 100644 index 000000000..d8ca94eff --- /dev/null +++ b/hemera/indexer/domains/token_id_balance.py @@ -0,0 +1,13 @@ +from dataclasses import dataclass + +from hemera.indexer.domains import Domain + + +@dataclass +class TokenIdBalance(Domain): + address: str + token_id: int + token_address: str + balance: int + block_number: int + block_timestamp: int diff --git a/hemera/indexer/domains/transaction_trace_json.py b/hemera/indexer/domains/transaction_trace_json.py new file mode 100644 index 000000000..d93c65c34 --- /dev/null +++ b/hemera/indexer/domains/transaction_trace_json.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2025/3/4 14:35 +# @Author ideal93 +# @File transaction_trace_json.py.py +# @Brief + +from dataclasses import dataclass +from typing import Any + +from hemera.indexer.domains import Domain + + +@dataclass +class TransactionTraceJson(Domain): + transaction_hash: str + block_timestamp: int + block_number: int + block_hash: str + data: dict[str, Any] + + @staticmethod + def from_rpc(trace_dict: dict): + return TransactionTraceJson( + block_number=trace_dict["block_number"], + block_hash=trace_dict["block_hash"], + block_timestamp=trace_dict["block_timestamp"], + transaction_hash=trace_dict["txHash"], + data=trace_dict, + ) diff --git a/hemera/indexer/exporters/postgres_item_exporter.py b/hemera/indexer/exporters/postgres_item_exporter.py index a8c9b7d94..a98cc14aa 100644 --- a/hemera/indexer/exporters/postgres_item_exporter.py +++ b/hemera/indexer/exporters/postgres_item_exporter.py @@ -74,7 +74,6 @@ def export_items(self, items, **kwargs): update_strategy = pg_config["update_strategy"] converter = pg_config["converter"] - # Initialize sub-progress bar for current table # Initialize sub-progress bar for current table self.sub_progress = TqdmExtraFormat( total=len(item_group), @@ -122,7 +121,7 @@ def sql_insert_statement(model: Type[HemeraModel], do_update: bool, columns, whe if do_update: insert_stmt = "INSERT INTO {}.{} ({}) VALUES %s ON CONFLICT ({}) DO UPDATE SET {}".format( - model.schema(), + model.metadata.schema, model.__tablename__, ", ".join(columns), ", ".join(pk_list), @@ -132,7 +131,7 @@ def sql_insert_statement(model: Type[HemeraModel], do_update: bool, columns, whe insert_stmt += " WHERE {}".format(where_clause) else: insert_stmt = "INSERT INTO {}.{} ({}) VALUES %s ON CONFLICT DO NOTHING ".format( - model.schema(), + model.metadata.schema, model.__tablename__, ", ".join(columns), ) diff --git a/hemera/indexer/jobs/__init__.py b/hemera/indexer/jobs/__init__.py index afb07829b..7aa6beecf 100644 --- a/hemera/indexer/jobs/__init__.py +++ b/hemera/indexer/jobs/__init__.py @@ -18,8 +18,8 @@ from hemera.indexer.jobs.export_coin_balances_job import ExportCoinBalancesJob from hemera.indexer.jobs.export_contracts_from_transaction_job import ExportContractsFromTransactionJob from hemera.indexer.jobs.export_contracts_job import ExportContractsJob +from hemera.indexer.jobs.export_nft_infos_job import ExportTokenIdInfosJob from hemera.indexer.jobs.export_token_balances_job import ExportTokenBalancesJob -from hemera.indexer.jobs.export_token_id_infos_job import ExportTokenIdInfosJob from hemera.indexer.jobs.export_tokens_and_transfers_job import ExportTokensAndTransfersJob from hemera.indexer.jobs.export_traces_job import ExportTracesJob from hemera.indexer.jobs.export_transactions_and_logs_job import ExportTransactionsAndLogsJob diff --git a/hemera/indexer/jobs/check_block_consensus_job.py b/hemera/indexer/jobs/check_block_consensus_job.py index 563ec3ff1..ef443ce50 100644 --- a/hemera/indexer/jobs/check_block_consensus_job.py +++ b/hemera/indexer/jobs/check_block_consensus_job.py @@ -4,7 +4,7 @@ from sqlalchemy import and_ -from hemera.common.models.blocks import Blocks +from hemera.common.models.base.blocks import Blocks from hemera.common.utils.format_utils import as_dict from hemera.indexer.domains import dict_to_dataclass from hemera.indexer.domains.block import Block diff --git a/hemera/indexer/jobs/export_token_id_infos_job.py b/hemera/indexer/jobs/export_nft_infos_job.py similarity index 100% rename from hemera/indexer/jobs/export_token_id_infos_job.py rename to hemera/indexer/jobs/export_nft_infos_job.py diff --git a/hemera/indexer/jobs/export_token_balances_job.py b/hemera/indexer/jobs/export_token_balances_job.py index 609355a85..0d297fba5 100644 --- a/hemera/indexer/jobs/export_token_balances_job.py +++ b/hemera/indexer/jobs/export_token_balances_job.py @@ -13,8 +13,8 @@ from hemera.indexer.domains.token_transfer import ERC20TokenTransfer, ERC721TokenTransfer, ERC1155TokenTransfer from hemera.indexer.executors.batch_work_executor import BatchWorkExecutor from hemera.indexer.jobs.base_job import BaseExportJob -from hemera.indexer.utils.abi import pad_address, uint256_to_bytes -from hemera.indexer.utils.abi_setting import ERC20_BALANCE_OF_FUNCTION, ERC1155_TOKEN_ID_BALANCE_OF_FUNCTION +from hemera.indexer.utils.abi import pad_address +from hemera.indexer.utils.abi_setting import ERC20_BALANCE_OF_FUNCTION from hemera.indexer.utils.collection_utils import distinct_collections_by_group from hemera.indexer.utils.exception_recorder import ExceptionRecorder from hemera.indexer.utils.multicall_hemera.util import calculate_execution_time @@ -28,8 +28,6 @@ class TokenBalanceParam: address: str token_address: str - token_id: Optional[int] - token_type: str block_number: int block_timestamp: int @@ -39,7 +37,7 @@ class TokenBalanceParam: # Exports token balance class ExportTokenBalancesJob(BaseExportJob): - dependency_types = [ERC20TokenTransfer, ERC721TokenTransfer, ERC1155TokenTransfer] + dependency_types = [ERC20TokenTransfer, ERC721TokenTransfer] output_types = [TokenBalance, CurrentTokenBalance, MarkBalanceToken] able_to_reorg = True @@ -57,7 +55,7 @@ def __init__(self, **kwargs): @calculate_execution_time def _collect(self, **kwargs): - token_transfers = self._collect_all_token_transfers() + token_transfers = self._collect_erc20_and_erc721_token_transfers() parameters = self.extract_token_parameters(token_transfers) self._collect_batch(parameters) @@ -97,8 +95,6 @@ def _process(self, **kwargs): [ CurrentTokenBalance( address=token_balance.address, - token_id=token_balance.token_id, - token_type=token_balance.token_type, token_address=token_balance.token_address, balance=token_balance.balance, block_number=token_balance.block_number, @@ -106,13 +102,13 @@ def _process(self, **kwargs): ) for token_balance in self._data_buff[TokenBalance.type()] ], - group_by=["token_address", "address", "token_id"], + group_by=["token_address", "address"], max_key="block_number", ) ) @calculate_execution_time - def _collect_all_token_transfers(self): + def _collect_erc20_and_erc721_token_transfers(self): token_transfers = [] erc20_tokens = set() if ERC20TokenTransfer.type() in self._data_buff: @@ -123,9 +119,6 @@ def _collect_all_token_transfers(self): if erc721_token_transfer.token_address not in erc20_tokens: token_transfers.append(erc721_token_transfer) - if ERC1155TokenTransfer.type() in self._data_buff: - token_transfers += self._data_buff[ERC1155TokenTransfer.type()] - return token_transfers @calculate_execution_time @@ -141,8 +134,6 @@ def extract_token_parameters( continue common_params = { "token_address": transfer.token_address, - "token_id": (transfer.token_id if isinstance(transfer, ERC1155TokenTransfer) else None), - "token_type": transfer.token_type, "block_number": transfer.block_number if block_number is None else block_number, "block_timestamp": transfer.block_timestamp, } @@ -156,12 +147,8 @@ def extract_token_parameters( { "address": parameter.address, "token_address": parameter.token_address, - "token_id": parameter.token_id, - "token_type": parameter.token_type, "param_to": parameter.token_address, - "param_data": encode_balance_abi_parameter( - parameter.address, parameter.token_type, parameter.token_id - ), + "param_data": encode_balance_abi_parameter(parameter.address), "param_number": parameter.block_number if block_number is None else block_number, "block_number": parameter.block_number if block_number is None else block_number, "block_timestamp": parameter.block_timestamp, @@ -171,17 +158,13 @@ def extract_token_parameters( return token_parameters -def encode_balance_abi_parameter(address, token_type, token_id): - if token_type == "ERC1155": - encoded_arguments = HexBytes(pad_address(address) + uint256_to_bytes(token_id)) - return to_hex(HexBytes(ERC1155_TOKEN_ID_BALANCE_OF_FUNCTION.get_signature()) + encoded_arguments) - else: - encoded_arguments = HexBytes(pad_address(address)) - return to_hex(HexBytes(ERC20_BALANCE_OF_FUNCTION.get_signature()) + encoded_arguments) +def encode_balance_abi_parameter(address): + encoded_arguments = HexBytes(pad_address(address)) + return to_hex(HexBytes(ERC20_BALANCE_OF_FUNCTION.get_signature()) + encoded_arguments) def extract_token_parameters( - token_transfers: List[Union[ERC20TokenTransfer, ERC721TokenTransfer, ERC1155TokenTransfer]], + token_transfers: List[Union[ERC20TokenTransfer, ERC721TokenTransfer]], block_number: Union[Optional[int], str] = None, ): origin_parameters = set() @@ -189,8 +172,6 @@ def extract_token_parameters( for transfer in token_transfers: common_params = { "token_address": transfer.token_address, - "token_id": (transfer.token_id if isinstance(transfer, ERC1155TokenTransfer) else None), - "token_type": transfer.token_type, "block_number": transfer.block_number if block_number is None else block_number, "block_timestamp": transfer.block_timestamp, } @@ -204,10 +185,8 @@ def extract_token_parameters( { "address": parameter.address, "token_address": parameter.token_address, - "token_id": parameter.token_id, - "token_type": parameter.token_type, "param_to": parameter.token_address, - "param_data": encode_balance_abi_parameter(parameter.address, parameter.token_type, parameter.token_id), + "param_data": encode_balance_abi_parameter(parameter.address), "param_number": parameter.block_number if block_number is None else block_number, "block_number": parameter.block_number if block_number is None else block_number, "block_timestamp": parameter.block_timestamp, diff --git a/hemera/indexer/jobs/export_token_id_balances_job.py b/hemera/indexer/jobs/export_token_id_balances_job.py new file mode 100644 index 000000000..32c34ceae --- /dev/null +++ b/hemera/indexer/jobs/export_token_id_balances_job.py @@ -0,0 +1,196 @@ +import logging +from dataclasses import dataclass +from typing import List, Optional, Union + +from eth_utils import to_hex +from hexbytes import HexBytes + +from hemera.common.utils.web3_utils import ZERO_ADDRESS +from hemera.indexer.domains import dict_to_dataclass +from hemera.indexer.domains.current_token_id_balance import CurrentTokenIdBalance +from hemera.indexer.domains.token import MarkBalanceToken +from hemera.indexer.domains.token_id_balance import TokenIdBalance +from hemera.indexer.domains.token_transfer import ERC1155TokenTransfer +from hemera.indexer.executors.batch_work_executor import BatchWorkExecutor +from hemera.indexer.jobs.base_job import BaseExportJob +from hemera.indexer.utils.abi import pad_address, uint256_to_bytes +from hemera.indexer.utils.abi_setting import ERC1155_TOKEN_ID_BALANCE_OF_FUNCTION +from hemera.indexer.utils.collection_utils import distinct_collections_by_group +from hemera.indexer.utils.exception_recorder import ExceptionRecorder +from hemera.indexer.utils.multicall_hemera.util import calculate_execution_time +from hemera.indexer.utils.token_fetcher import TokenFetcher + +logger = logging.getLogger(__name__) +exception_recorder = ExceptionRecorder() + + +@dataclass(frozen=True) +class TokenIdBalanceParam: + address: str + token_address: str + token_id: int + block_number: int + block_timestamp: int + + +FAILURE_THRESHOLD = 100 + + +# Exports token balance +class ExportTokenIdBalancesJob(BaseExportJob): + dependency_types = [ERC1155TokenTransfer] + output_types = [TokenIdBalance, CurrentTokenIdBalance] + able_to_reorg = True + + def __init__(self, **kwargs): + super().__init__(**kwargs) + + self._batch_work_executor = BatchWorkExecutor( + kwargs["batch_size"], + kwargs["max_workers"], + job_name=self.__class__.__name__, + ) + self._is_batch = kwargs["batch_size"] > 1 + self._is_multi_call = kwargs["multicall"] + self.token_fetcher = TokenFetcher(self._web3, kwargs) + + @calculate_execution_time + def _collect(self, **kwargs): + token_transfers = self._collect_erc1155_token_transfers() + parameters = self.extract_token_parameters(token_transfers) + self._collect_batch(parameters) + + @calculate_execution_time + def _collect_batch(self, parameters): + token_balances = self.token_fetcher.fetch_token_balance(parameters) + results = [] + tokens_set = set() + for tb in token_balances: + if tb["token_address"] in self.tokens: + key = "fail_balance_of_count" if tb["balance"] is None else "succeed_balance_of_count" + self.tokens[tb["token_address"]][key] += 1 + tokens_set.add(tb["token_address"]) + results.append(dict_to_dataclass(tb, TokenIdBalance)) + self._collect_items(TokenIdBalance.type(), results) + for tk in tokens_set: + if ( + self.tokens[tk]["fail_balance_of_count"] > FAILURE_THRESHOLD + and self.tokens[tk]["succeed_balance_of_count"] <= 0 + ): + self.tokens[tk]["no_balance_of"] = True + self._collect_item( + MarkBalanceToken.type(), + MarkBalanceToken( + address=tk, + no_balance_of=self.tokens[tk]["no_balance_of"], + fail_balance_of_count=self.tokens[tk]["fail_balance_of_count"], + succeed_balance_of_count=self.tokens[tk]["succeed_balance_of_count"], + ), + ) + + def _process(self, **kwargs): + if TokenIdBalance.type() in self._data_buff: + self._data_buff[TokenIdBalance.type()].sort(key=lambda x: (x.block_number, x.address)) + self._update_domains( + distinct_collections_by_group( + [ + CurrentTokenIdBalance( + address=token_balance.address, + token_id=token_balance.token_id, + token_address=token_balance.token_address, + balance=token_balance.balance, + block_number=token_balance.block_number, + block_timestamp=token_balance.block_timestamp, + ) + for token_balance in self._data_buff[TokenIdBalance.type()] + ], + group_by=["token_address", "address", "token_id"], + max_key="block_number", + ) + ) + + @calculate_execution_time + def _collect_erc1155_token_transfers(self): + token_transfers = [] + if ERC1155TokenTransfer.type() in self._data_buff: + token_transfers += self._data_buff[ERC1155TokenTransfer.type()] + + return token_transfers + + @calculate_execution_time + def extract_token_parameters( + self, + token_transfers: List[ERC1155TokenTransfer], + block_number: Union[Optional[int], str] = None, + ): + origin_parameters = set() + token_parameters = [] + for transfer in token_transfers: + if transfer.token_address in self.tokens and self.tokens[transfer.token_address]["no_balance_of"]: + continue + common_params = { + "token_address": transfer.token_address, + "token_id": transfer.token_id, + "block_number": transfer.block_number if block_number is None else block_number, + "block_timestamp": transfer.block_timestamp, + } + if transfer.from_address != ZERO_ADDRESS: + origin_parameters.add(TokenIdBalanceParam(address=transfer.from_address, **common_params)) + if transfer.to_address != ZERO_ADDRESS: + origin_parameters.add(TokenIdBalanceParam(address=transfer.to_address, **common_params)) + + for parameter in origin_parameters: + token_parameters.append( + { + "address": parameter.address, + "token_address": parameter.token_address, + "token_id": parameter.token_id, + "param_to": parameter.token_address, + "param_data": encode_token_id_balance_abi_parameter(parameter.address, parameter.token_id), + "param_number": parameter.block_number if block_number is None else block_number, + "block_number": parameter.block_number if block_number is None else block_number, + "block_timestamp": parameter.block_timestamp, + } + ) + + return token_parameters + + +def encode_token_id_balance_abi_parameter(address, token_id): + encoded_arguments = HexBytes(pad_address(address) + uint256_to_bytes(token_id)) + return to_hex(HexBytes(ERC1155_TOKEN_ID_BALANCE_OF_FUNCTION.get_signature()) + encoded_arguments) + + +def extract_token_parameters( + token_transfers: List[ERC1155TokenTransfer], + block_number: Union[Optional[int], str] = None, +): + origin_parameters = set() + token_parameters = [] + for transfer in token_transfers: + common_params = { + "token_address": transfer.token_address, + "token_id": transfer.token_id, + "block_number": transfer.block_number if block_number is None else block_number, + "block_timestamp": transfer.block_timestamp, + } + if transfer.from_address != ZERO_ADDRESS: + origin_parameters.add(TokenIdBalanceParam(address=transfer.from_address, **common_params)) + if transfer.to_address != ZERO_ADDRESS: + origin_parameters.add(TokenIdBalanceParam(address=transfer.to_address, **common_params)) + + for parameter in origin_parameters: + token_parameters.append( + { + "address": parameter.address, + "token_address": parameter.token_address, + "token_id": parameter.token_id, + "param_to": parameter.token_address, + "param_data": encode_token_id_balance_abi_parameter(parameter.address, parameter.token_id), + "param_number": parameter.block_number if block_number is None else block_number, + "block_number": parameter.block_number if block_number is None else block_number, + "block_timestamp": parameter.block_timestamp, + } + ) + + return token_parameters diff --git a/hemera/indexer/jobs/export_traces_job.py b/hemera/indexer/jobs/export_traces_job.py index 3580c5a1a..1e0291703 100644 --- a/hemera/indexer/jobs/export_traces_job.py +++ b/hemera/indexer/jobs/export_traces_job.py @@ -11,6 +11,7 @@ from hemera.indexer.domains.block import Block, UpdateBlockInternalCount from hemera.indexer.domains.contract_internal_transaction import ContractInternalTransaction from hemera.indexer.domains.trace import Trace +from hemera.indexer.domains.transaction_trace_json import TransactionTraceJson from hemera.indexer.executors.batch_work_executor import BatchWorkExecutor from hemera.indexer.jobs.base_job import BaseExportJob from hemera.indexer.utils.exception_recorder import ExceptionRecorder @@ -24,7 +25,7 @@ # Exports traces class ExportTracesJob(BaseExportJob): dependency_types = [Block] - output_types = [Trace, ContractInternalTransaction, UpdateBlockInternalCount] + output_types = [Trace, ContractInternalTransaction, UpdateBlockInternalCount, TransactionTraceJson] able_to_reorg = True def __init__(self, **kwargs): @@ -48,11 +49,13 @@ def _collect(self, **kwargs): self._batch_work_executor.wait() def _collect_batch(self, blocks): - traces = traces_rpc_requests( + traces, total_transaction_json_list = traces_rpc_requests( self._batch_web3_provider.make_request, [dataclass_to_dict(block) for block in blocks], self._is_batch, ) + for total_transaction_json in total_transaction_json_list: + self._collect_item(TransactionTraceJson.type(), TransactionTraceJson.from_rpc(total_transaction_json)) for trace in traces: trace_entity = Trace.from_rpc(trace) @@ -174,6 +177,7 @@ def traces_rpc_requests(make_requests, blocks: List[dict], is_batch): responses = [make_requests(params=orjson.dumps(trace_block_rpc[0]))] total_traces = [] + total_transaction_json = [] for block, response in zip_rpc_response(blocks, responses, index="number"): block_number = block["number"] transactions = block["transactions"] @@ -213,8 +217,20 @@ def traces_rpc_requests(make_requests, blocks: List[dict], is_batch): "transaction_traces": result, } trace_spliter = ExtractTraces() + for trace in geth_trace["transaction_traces"]: + total_transaction_json.append( + { + **trace, + **{ + "block_number": block_number, + "block_hash": block["hash"], + "block_timestamp": block["timestamp"], + }, + } + ) + traces = trace_spliter.geth_trace_to_traces(geth_trace) total_traces.extend(traces) - return total_traces + return total_traces, total_transaction_json diff --git a/hemera/indexer/jobs/source_job/pg_source_job.py b/hemera/indexer/jobs/source_job/pg_source_job.py index 492551920..b3249d73e 100644 --- a/hemera/indexer/jobs/source_job/pg_source_job.py +++ b/hemera/indexer/jobs/source_job/pg_source_job.py @@ -9,14 +9,14 @@ from sqlalchemy import and_, func, select from sqlalchemy.dialects.postgresql import BIGINT, INTEGER -from hemera.common.models.blocks import Blocks -from hemera.common.models.logs import Logs -from hemera.common.models.transactions import Transactions +from hemera.common.models.base.blocks import Blocks +from hemera.common.models.base.logs import Logs +from hemera.common.models.base.transactions import Transactions from hemera.common.services.postgresql_service import PostgreSQLService from hemera.common.utils.db_utils import table_to_dataclass from hemera.common.utils.exception_control import FastShutdownError from hemera.common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes -from hemera.indexer.domains import Domain, dict_to_dataclass +from hemera.indexer.domains import Domain from hemera.indexer.domains.block import Block from hemera.indexer.domains.log import Log from hemera.indexer.domains.receipt import Receipt diff --git a/hemera/indexer/utils/abi.py b/hemera/indexer/utils/abi.py index 4e982b7e5..3237ff33b 100644 --- a/hemera/indexer/utils/abi.py +++ b/hemera/indexer/utils/abi.py @@ -3,9 +3,7 @@ from eth_abi.codec import ABICodec from eth_abi.grammar import BasicType -from eth_abi.utils.numeric import ceil32 -from eth_abi.utils.padding import zpad, zpad_right -from eth_typing import ChecksumAddress, HexStr, TypeStr +from eth_typing import ABIEvent, ABIFunction, ChecksumAddress, HexStr, TypeStr from eth_utils import ( event_abi_to_log_topic, function_abi_to_4byte_selector, @@ -18,7 +16,6 @@ from web3 import Web3 from web3._utils.abi import build_strict_registry from web3._utils.normalizers import implicitly_identity, parse_basic_type_str -from web3.types import ABIEvent, ABIFunction from hemera.common.utils.format_utils import bytes_to_hex_str diff --git a/hemera/indexer/utils/exception_recorder.py b/hemera/indexer/utils/exception_recorder.py index df31946b3..45f37dc59 100644 --- a/hemera/indexer/utils/exception_recorder.py +++ b/hemera/indexer/utils/exception_recorder.py @@ -3,7 +3,7 @@ from sqlalchemy.dialects.postgresql import insert -from hemera.common.models.exception_records import ExceptionRecords +from hemera.common.models.utils.records import ExceptionRecords LOG_BUFFER_SIZE = 5000 diff --git a/hemera/indexer/utils/limit_reader.py b/hemera/indexer/utils/limit_reader.py index 6354e180e..87dd62178 100644 --- a/hemera/indexer/utils/limit_reader.py +++ b/hemera/indexer/utils/limit_reader.py @@ -1,6 +1,6 @@ from sqlalchemy import func -from hemera.common.models.blocks import Blocks +from hemera.common.models.base.blocks import Blocks from hemera.common.services.postgresql_service import PostgreSQLService from hemera.common.utils.exception_control import FastShutdownError from hemera.common.utils.web3_utils import build_web3 diff --git a/hemera/indexer/utils/metrics_persistence.py b/hemera/indexer/utils/metrics_persistence.py index 3d22d7e88..0e8b7c5c6 100644 --- a/hemera/indexer/utils/metrics_persistence.py +++ b/hemera/indexer/utils/metrics_persistence.py @@ -6,7 +6,7 @@ from sqlalchemy import func from sqlalchemy.dialects.postgresql import insert -from hemera.common.models.metrics_persistence import MetricsPersistence +from hemera.common.models.utils.metrics_persistence import MetricsPersistence from hemera.common.utils.file_utils import write_to_file logger = logging.getLogger(__name__) diff --git a/hemera/indexer/utils/provider.py b/hemera/indexer/utils/provider.py index a89565581..ef4db2f74 100644 --- a/hemera/indexer/utils/provider.py +++ b/hemera/indexer/utils/provider.py @@ -4,7 +4,7 @@ from urllib.parse import urlparse from web3 import HTTPProvider, IPCProvider -from web3._utils.request import make_post_request +from web3._utils.http_session_manager import HTTPSessionManager from web3._utils.threads import Timeout DEFAULT_TIMEOUT = 60 @@ -64,6 +64,7 @@ def make_request(self, method=None, params=None): class BatchHTTPProvider(HTTPProvider): + http_manager = HTTPSessionManager(100, 5) def make_request(self, method=None, params=None): self.logger.debug("Making request HTTP. URI: %s, Request: %s", self.endpoint_uri, params) @@ -71,7 +72,7 @@ def make_request(self, method=None, params=None): request_data = params.encode("utf-8") else: request_data = params - raw_response = make_post_request(self.endpoint_uri, request_data, **self.get_request_kwargs()) + raw_response = self.http_manager.make_post_request(self.endpoint_uri, request_data, **self.get_request_kwargs()) try: response = self.decode_rpc_response(raw_response) except JSONDecodeError: diff --git a/hemera/indexer/utils/reorg.py b/hemera/indexer/utils/reorg.py index f6e8f6f13..ebaae379f 100644 --- a/hemera/indexer/utils/reorg.py +++ b/hemera/indexer/utils/reorg.py @@ -4,8 +4,8 @@ from sqlalchemy import and_, func, insert, literal, select from hemera.common.models import HemeraModel -from hemera.common.models.blocks import Blocks -from hemera.common.models.fix_record import FixRecord +from hemera.common.models.base.blocks import Blocks +from hemera.common.models.utils.records import FixRecords from hemera.common.services.postgresql_service import PostgreSQLService from hemera.common.utils.exception_control import RetriableError @@ -98,7 +98,7 @@ def check_reorg(service: PostgreSQLService, check_range: int = None): ) ) - insert_stmt = insert(FixRecord).from_select( + insert_stmt = insert(FixRecords).from_select( ["start_block_number", "last_fixed_block_number", "remain_process", "job_status"], select_stmt ) diff --git a/hemera/indexer/utils/sync_recorder.py b/hemera/indexer/utils/sync_recorder.py index 26f4621f2..27b461e6d 100644 --- a/hemera/indexer/utils/sync_recorder.py +++ b/hemera/indexer/utils/sync_recorder.py @@ -7,8 +7,7 @@ from sqlalchemy import func from sqlalchemy.dialects.postgresql import insert -from hemera.common.models.failure_records import FailureRecords -from hemera.common.models.sync_record import SyncRecord +from hemera.common.models.utils.records import FailureRecords, SyncRecords from hemera.common.utils.file_utils import smart_open, write_to_file logger = logging.getLogger(__name__) @@ -87,7 +86,7 @@ def set_last_synced_block(self, last_synced_block): update_time = func.to_timestamp(int(datetime.now(timezone.utc).timestamp())) try: conflict_args = { - "index_elements": [SyncRecord.mission_sign], + "index_elements": [SyncRecords.mission_sign], "set_": { "last_block_number": last_synced_block, "update_time": update_time, @@ -95,10 +94,10 @@ def set_last_synced_block(self, last_synced_block): } if ASYNC_SUBMIT or self.multi_mode: - conflict_args["where"] = SyncRecord.last_block_number <= last_synced_block + conflict_args["where"] = SyncRecords.last_block_number <= last_synced_block statement = ( - insert(SyncRecord) + insert(SyncRecords) .values( { "mission_sign": self.key, @@ -118,7 +117,7 @@ def set_last_synced_block(self, last_synced_block): def get_last_synced_block(self): session = self.service.get_service_session() try: - result = session.query(SyncRecord.last_block_number).filter(SyncRecord.mission_sign == self.key).scalar() + result = session.query(SyncRecords.last_block_number).filter(SyncRecords.mission_sign == self.key).scalar() except Exception as e: raise e finally: diff --git a/hemera/indexer/utils/token_fetcher.py b/hemera/indexer/utils/token_fetcher.py index 66be69667..9ddc65ef8 100644 --- a/hemera/indexer/utils/token_fetcher.py +++ b/hemera/indexer/utils/token_fetcher.py @@ -165,10 +165,9 @@ def _prepare_token_balance_parameters(self, tokens): cnt += 1 token, wal = row["token_address"], row["address"] - token_id = row["token_id"] - token_type = row["token_type"] + token_id = row.get("token_id") block_number = row["block_number"] - if token_type == "ERC1155" and token_id is not None: + if token_id is not None: construct_call = Call( target=token, function_abi=ERC1155_TOKEN_ID_BALANCE_OF_FUNCTION, @@ -204,8 +203,7 @@ def fetch_token_balance(self, tokens): return_data.append( { "address": item["address"].lower(), - "token_id": item["token_id"], - "token_type": item["token_type"], + "token_id": item.get("token_id"), "token_address": item["token_address"].lower(), "balance": balance, "block_number": item["block_number"], diff --git a/hemera/migrate.py b/hemera/migrate.py new file mode 100644 index 000000000..04cd4a471 --- /dev/null +++ b/hemera/migrate.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2025/3/3 18:36 +# @Author ideal93 +# @File migrate.py +# @Brief + +import psycopg2 + + +def migrate(db_config, migration_file, params=None): + """ + Applies a migration to a PostgreSQL database. + + Args: + db_config (dict): Dictionary containing database configuration parameters. + migration_file (str): Path to the SQL file containing the migration. + params (dict or tuple, optional): Parameters to inject into the SQL query. + """ + conn = None + try: + # Connect to the PostgreSQL database + conn = psycopg2.connect(**db_config) + cursor = conn.cursor() + + # Read the migration SQL from file + with open(migration_file, "r") as file: + migration_sql = file.read() + + if params: + cursor.execute(migration_sql, params) + else: + cursor.execute(migration_sql) + + conn.commit() + print("Migration applied successfully.") + + except Exception as e: + print("Migration failed:", e) + finally: + if conn: + conn.close() diff --git a/hemera/migrations/V20250100__create_extensions.sql b/hemera/migrations/V20250100__create_extensions.sql new file mode 100644 index 000000000..36901e94e --- /dev/null +++ b/hemera/migrations/V20250100__create_extensions.sql @@ -0,0 +1,6 @@ +-- EXTENSIONS +CREATE EXTENSION IF NOT EXISTS postgres_fdw; + +CREATE SCHEMA IF NOT EXISTS partman; +CREATE EXTENSION IF NOT EXISTS pg_partman SCHEMA partman; +CREATE EXTENSION IF NOT EXISTS pgcrypto; \ No newline at end of file diff --git a/hemera/migrations/V20250101__create_base_tables.sql b/hemera/migrations/V20250101__create_base_tables.sql new file mode 100644 index 000000000..416ba34f8 --- /dev/null +++ b/hemera/migrations/V20250101__create_base_tables.sql @@ -0,0 +1,162 @@ +\set start_partition '2025-01-01' +\set partition_interval '1 month' + +-- blocks +CREATE TABLE IF NOT EXISTS blocks +( + hash BYTEA not null + primary key, + number BIGINT, + timestamp timestamp, + parent_hash BYTEA, + nonce BYTEA, + gas_limit NUMERIC(100), + gas_used NUMERIC(100), + base_fee_per_gas NUMERIC(100), + difficulty NUMERIC(38), + total_difficulty NUMERIC(38), + size BIGINT, + miner BYTEA, + sha3_uncles BYTEA, + transactions_root BYTEA, + transactions_count BIGINT, + state_root BYTEA, + receipts_root BYTEA, + extra_data BYTEA, + withdrawals_root BYTEA, + create_time TIMESTAMP default now(), + update_time TIMESTAMP default now(), + reorg BOOLEAN, + blob_gas_used NUMERIC(100), + excess_blob_gas NUMERIC(100), + traces_count BIGINT, + internal_transactions_count BIGINT +); + +CREATE INDEX IF NOT EXISTS blocks_number_index + ON blocks (number DESC); + +CREATE INDEX IF NOT EXISTS blocks_timestamp_index + ON blocks (TIMESTAMP DESC); + +CREATE UNIQUE INDEX IF NOT EXISTS blocks_hash_unique_when_not_reorg + ON blocks (hash) + WHERE (reorg = FALSE); + +CREATE UNIQUE index blocks_number_unique_when_not_reorg + ON blocks (number) + WHERE (reorg = FALSE); + +-- transactions +CREATE TABLE IF NOT EXISTS transactions +( + hash BYTEA NOT NULL, + transaction_index INTEGER, + from_address BYTEA, + to_address BYTEA, + value NUMERIC(100), + transaction_type INTEGER, + input BYTEA, + nonce NUMERIC(100), + block_hash BYTEA, + block_number BIGINT, + block_timestamp TIMESTAMP, + gas NUMERIC(100), + gas_price NUMERIC(100), + max_fee_per_gas NUMERIC(100), + max_priority_fee_per_gas NUMERIC(100), + receipt_root BYTEA, + receipt_status INTEGER, + receipt_gas_used NUMERIC(100), + receipt_cumulative_gas_used NUMERIC(100), + receipt_effective_gas_price NUMERIC(100), + receipt_l1_fee NUMERIC(100), + receipt_l1_fee_scalar NUMERIC(100, 18), + receipt_l1_gas_used NUMERIC(100), + receipt_l1_gas_price NUMERIC(100), + receipt_blob_gas_used NUMERIC(100), + receipt_blob_gas_price NUMERIC(100), + blob_versioned_hashes BYTEA[], + receipt_contract_address BYTEA, + exist_error BOOLEAN, + error TEXT, + revert_reason TEXT, + create_time TIMESTAMP DEFAULT NOW(), + update_time TIMESTAMP DEFAULT NOW(), + reorg BOOLEAN DEFAULT FALSE, + method_id VARCHAR GENERATED ALWAYS AS (SUBSTR(((input)::character varying)::text, 3, 8)) STORED, + PRIMARY KEY (hash, block_timestamp) +) PARTITION BY RANGE (block_timestamp); + + +CREATE INDEX IF NOT EXISTS transactions_block_number_transaction_index + ON transactions (block_number DESC, transaction_index DESC); + +CREATE INDEX IF NOT EXISTS transactions_block_timestamp_index + ON transactions (block_timestamp DESC); + +SELECT partman.create_parent( + p_parent_table := 'public.transactions'::text, + p_control := 'block_timestamp'::text, + p_type := 'range'::text, + p_interval := :'partition_interval'::text, + p_premake := 12::int, + p_automatic_maintenance := 'on'::text, + p_start_partition := :'start_partition'::text, + p_default_table := false + ); + +-- logs +CREATE TABLE IF NOT EXISTS logs +( + log_index INTEGER NOT NULL, + address BYTEA, + data BYTEA, + topic0 BYTEA, + topic1 BYTEA, + topic2 BYTEA, + topic3 BYTEA, + transaction_hash BYTEA NOT NULL, + transaction_index INTEGER, + block_number BIGINT, + block_hash BYTEA NOT NULL, + block_timestamp TIMESTAMP, + create_time TIMESTAMP DEFAULT NOW(), + update_time TIMESTAMP DEFAULT NOW(), + reorg BOOLEAN DEFAULT FALSE, + PRIMARY KEY (transaction_hash, block_hash, log_index, block_timestamp) +) PARTITION BY RANGE (block_timestamp); + +CREATE INDEX IF NOT EXISTS logs_address_block_number_log_index_index + ON logs (address ASC, block_number DESC, log_index DESC); + +CREATE INDEX IF NOT EXISTS logs_block_timestamp_index + ON logs (block_timestamp DESC); + +CREATE INDEX IF NOT EXISTS logs_address_topic_0_number_log_index_index + ON logs (address ASC, topic0 ASC, block_number DESC, log_index DESC); + +CREATE INDEX IF NOT EXISTS logs_block_number_log_index_index + ON logs (block_number DESC, log_index DESC); + +SELECT partman.create_parent( + p_parent_table := 'public.logs'::text, + p_control := 'block_timestamp'::text, + p_type := 'range'::text, + p_interval := :'partition_interval'::text, + p_premake := 12::INT, + p_automatic_maintenance := 'on'::text, + p_start_partition := :'start_partition'::text, + p_default_table := FALSE + ); + +CREATE TABLE block_ts_mapper +( + ts BIGSERIAL + PRIMARY KEY, + block_number BIGINT, + timestamp TIMESTAMP +); + +CREATE INDEX block_ts_mapper_block_number_idx + ON block_ts_mapper (block_number DESC); diff --git a/hemera/migrations/V20250102__create_token_tables.sql b/hemera/migrations/V20250102__create_token_tables.sql new file mode 100644 index 000000000..911d5767e --- /dev/null +++ b/hemera/migrations/V20250102__create_token_tables.sql @@ -0,0 +1,380 @@ +\set start_partition '2025-01-01' +\set partition_interval '1 month' + +-- tokens +CREATE TABLE IF NOT EXISTS tokens +( + address BYTEA NOT NULL + PRIMARY KEY, + name VARCHAR, + symbol VARCHAR, + total_supply NUMERIC(100), + decimals NUMERIC(100), + token_type VARCHAR, + holder_count INTEGER, + transfer_count INTEGER, + icon_url VARCHAR, + urls JSONB, + volume_24h NUMERIC(38, 2), + price NUMERIC(38, 6), + previous_price NUMERIC(38, 6), + market_cap NUMERIC(38, 2), + on_chain_market_cap NUMERIC(38, 2), + is_verified BOOLEAN, + cmc_id INTEGER, + cmc_slug VARCHAR, + gecko_id VARCHAR, + description VARCHAR, + create_time TIMESTAMP DEFAULT NOW(), + update_time TIMESTAMP DEFAULT NOW(), + block_number BIGINT, + no_balance_of BOOLEAN DEFAULT FALSE, + fail_balance_of_count INTEGER DEFAULT 0, + no_total_supply BOOLEAN DEFAULT FALSE, + fail_total_supply_count INTEGER DEFAULT 0, + tags CHARACTER VARYING[], + succeed_balance_of_count INTEGER DEFAULT 0 +); + +CREATE INDEX IF NOT EXISTS tokens_type_index + ON tokens (token_type); + +CREATE INDEX IF NOT EXISTS tokens_type_holders_index + ON tokens (token_type ASC, holder_count DESC); + +CREATE INDEX IF NOT EXISTS tokens_type_on_chain_market_cap_index + ON tokens (token_type ASC, on_chain_market_cap DESC); + +-- address_token_balances +CREATE TABLE IF NOT EXISTS address_token_balances +( + address BYTEA NOT NULL, + token_address BYTEA NOT NULL, + balance NUMERIC(100), + block_number BIGINT NOT NULL, + block_timestamp TIMESTAMP NOT NULL, + create_time TIMESTAMP DEFAULT NOW(), + update_time TIMESTAMP DEFAULT NOW(), + reorg BOOLEAN DEFAULT FALSE, + PRIMARY KEY (address, token_address, block_number, block_timestamp) +) PARTITION BY RANGE (block_timestamp); + +SELECT partman.create_parent( + P_PARENT_TABLE := 'public.address_token_balances'::text, + P_CONTROL := 'block_timestamp'::text, + P_TYPE := 'range'::text, + P_INTERVAL := :'partition_interval'::text, + P_PREMAKE := 12::INT, + P_AUTOMATIC_MAINTENANCE := 'on'::text, + P_START_PARTITION := :'start_partition'::text, + P_DEFAULT_TABLE := FALSE + ); + +-- address_current_token_balances +CREATE TABLE IF NOT EXISTS address_current_token_balances +( + address BYTEA NOT NULL, + token_address BYTEA NOT NULL, + balance NUMERIC(100), + block_number BIGINT, + block_timestamp TIMESTAMP, + create_time TIMESTAMP DEFAULT NOW(), + update_time TIMESTAMP DEFAULT NOW(), + reorg BOOLEAN, + CONSTRAINT address_current_token_balances_partition_pkey + PRIMARY KEY (address, token_address) +) + PARTITION BY RANGE (token_address); + +CREATE TABLE IF NOT EXISTS address_current_token_balances_p0 + PARTITION OF address_current_token_balances + FOR VALUES FROM ('\x00') TO ('\x10'); + +CREATE TABLE IF NOT EXISTS address_current_token_balances_p1 + PARTITION OF address_current_token_balances + FOR VALUES FROM ('\x10') TO ('\x20'); + +CREATE TABLE IF NOT EXISTS address_current_token_balances_p2 + PARTITION OF address_current_token_balances + FOR VALUES FROM ('\x20') TO ('\x30'); + +CREATE TABLE IF NOT EXISTS address_current_token_balances_p3 + PARTITION OF address_current_token_balances + FOR VALUES FROM ('\x30') TO ('\x40'); + +CREATE TABLE IF NOT EXISTS address_current_token_balances_p4 + PARTITION OF address_current_token_balances + FOR VALUES FROM ('\x40') TO ('\x50'); + +CREATE TABLE IF NOT EXISTS address_current_token_balances_p5 + PARTITION OF address_current_token_balances + FOR VALUES FROM ('\x50') TO ('\x60'); + +CREATE TABLE IF NOT EXISTS address_current_token_balances_p6 + PARTITION OF address_current_token_balances + FOR VALUES FROM ('\x60') TO ('\x70'); + +CREATE TABLE IF NOT EXISTS address_current_token_balances_p7 + PARTITION OF address_current_token_balances + FOR VALUES FROM ('\x70') TO ('\x80'); + +CREATE TABLE IF NOT EXISTS address_current_token_balances_p8 + PARTITION OF address_current_token_balances + FOR VALUES FROM ('\x80') TO ('\x90'); + +CREATE TABLE IF NOT EXISTS address_current_token_balances_p9 + PARTITION OF address_current_token_balances + FOR VALUES FROM ('\x90') TO ('\xa0'); + +CREATE TABLE IF NOT EXISTS address_current_token_balances_p10 + PARTITION OF address_current_token_balances + FOR VALUES FROM ('\xa0') TO ('\xb0'); + +CREATE TABLE IF NOT EXISTS address_current_token_balances_p11 + PARTITION OF address_current_token_balances + FOR VALUES FROM ('\xb0') TO ('\xc0'); + +CREATE TABLE IF NOT EXISTS address_current_token_balances_p12 + PARTITION OF address_current_token_balances + FOR VALUES FROM ('\xc0') TO ('\xd0'); + +CREATE TABLE IF NOT EXISTS address_current_token_balances_p13 + PARTITION OF address_current_token_balances + FOR VALUES FROM ('\xd0') TO ('\xe0'); + +CREATE TABLE IF NOT EXISTS address_current_token_balances_p14 + PARTITION OF address_current_token_balances + FOR VALUES FROM ('\xe0') TO ('\xf0'); + +CREATE TABLE IF NOT EXISTS address_current_token_balances_p15 + PARTITION OF address_current_token_balances + FOR VALUES FROM ('\xf0') TO ('\xffffffffffffffffffffffffffffffffffffffffff'); + +-- address_current_token_id_balances +CREATE TABLE IF NOT EXISTS address_current_token_id_balances +( + address BYTEA NOT NULL, + token_address BYTEA NOT NULL, + token_id NUMERIC(78) NOT NULL, + balance NUMERIC(100), + block_number BIGINT NOT NULL, + block_timestamp TIMESTAMP NOT NULL, + create_time TIMESTAMP DEFAULT NOW(), + update_time TIMESTAMP DEFAULT NOW(), + reorg BOOLEAN DEFAULT FALSE, + PRIMARY KEY (address, token_address, token_id) +); + +-- address_token_id_balances +CREATE TABLE IF NOT EXISTS address_token_id_balances +( + address BYTEA NOT NULL, + token_address BYTEA NOT NULL, + token_id NUMERIC(78) NOT NULL, + balance NUMERIC(100), + block_number BIGINT NOT NULL, + block_timestamp TIMESTAMP NOT NULL, + create_time TIMESTAMP DEFAULT NOW(), + update_time TIMESTAMP DEFAULT NOW(), + reorg BOOLEAN DEFAULT FALSE, + PRIMARY KEY (address, token_address, token_id, block_number, block_timestamp) +); + +-- token_transfers +-- erc20_token_transfers +CREATE TABLE IF NOT EXISTS erc20_token_transfers +( + transaction_hash BYTEA NOT NULL, + log_index INTEGER NOT NULL, + from_address BYTEA, + to_address BYTEA, + token_address BYTEA, + value NUMERIC(100), + block_number BIGINT, + block_hash BYTEA NOT NULL, + block_timestamp TIMESTAMP NOT NULL, + create_time TIMESTAMP DEFAULT NOW(), + update_time TIMESTAMP DEFAULT NOW(), + reorg BOOLEAN, + CONSTRAINT erc20_token_transfers_pkey + PRIMARY KEY (transaction_hash, block_hash, log_index, block_timestamp) +) PARTITION by RANGE (block_timestamp); + +SELECT partman.create_parent( + P_PARENT_TABLE := 'public.erc20_token_transfers'::text, + P_CONTROL := 'block_timestamp'::text, + P_TYPE := 'range'::text, + P_INTERVAL := :'partition_interval'::text, + P_PREMAKE := 12::INT, + P_AUTOMATIC_MAINTENANCE := 'on'::text, + P_START_PARTITION := :'start_partition'::text, + P_DEFAULT_TABLE := FALSE + ); + +CREATE INDEX IF NOT EXISTS erc20_token_transfers_block_number_index + ON erc20_token_transfers (block_number DESC); + +CREATE INDEX IF NOT EXISTS erc20_token_transfers_token_address_index + ON erc20_token_transfers (token_address DESC); + +-- erc721_token_transfers +CREATE TABLE IF NOT EXISTS erc721_token_transfers +( + transaction_hash BYTEA NOT NULL, + log_index INTEGER NOT NULL, + from_address BYTEA, + to_address BYTEA, + token_address BYTEA, + token_id NUMERIC(78), + block_number BIGINT, + block_hash BYTEA NOT NULL, + block_timestamp TIMESTAMP, + create_time TIMESTAMP DEFAULT NOW(), + update_time TIMESTAMP DEFAULT NOW(), + reorg BOOLEAN, + CONSTRAINT erc721_token_transfers_pkey + PRIMARY KEY (transaction_hash, block_hash, log_index, block_timestamp) +) PARTITION by RANGE (block_timestamp); + +SELECT partman.create_parent( + P_PARENT_TABLE := 'public.erc721_token_transfers'::text, + P_CONTROL := 'block_timestamp'::text, + P_TYPE := 'range'::text, + P_INTERVAL := :'partition_interval'::text, + P_PREMAKE := 12::INT, + P_AUTOMATIC_MAINTENANCE := 'on'::text, + P_START_PARTITION := :'start_partition'::text, + P_DEFAULT_TABLE := FALSE + ); +CREATE INDEX IF NOT EXISTS erc721_token_transfers_number_log_index + ON erc721_token_transfers (block_number DESC, log_index DESC); + +CREATE INDEX IF NOT EXISTS erc721_token_transfers_token_address_id_index + ON erc721_token_transfers (token_address, token_id, block_number DESC, log_index DESC); + +CREATE INDEX IF NOT EXISTS erc721_token_transfers_token_address_number_log_index_index + ON erc721_token_transfers (token_address ASC, block_number DESC, log_index DESC); + +-- erc1155_token_transfers +CREATE TABLE IF NOT EXISTS erc1155_token_transfers +( + transaction_hash BYTEA NOT NULL, + log_index INTEGER NOT NULL, + from_address BYTEA, + to_address BYTEA, + token_address BYTEA, + token_id NUMERIC(78) NOT NULL, + value NUMERIC(100), + block_number BIGINT, + block_hash BYTEA NOT NULL, + block_timestamp TIMESTAMP, + create_time TIMESTAMP DEFAULT NOW(), + update_time TIMESTAMP DEFAULT NOW(), + reorg BOOLEAN, + CONSTRAINT erc1155_token_transfers_pkey + PRIMARY KEY (transaction_hash, block_hash, log_index, block_timestamp, token_id) +) PARTITION by RANGE (block_timestamp); +SELECT partman.create_parent( + P_PARENT_TABLE := 'public.erc1155_token_transfers'::text, + P_CONTROL := 'block_timestamp'::text, + P_TYPE := 'range'::text, + P_INTERVAL := :'partition_interval'::text, + P_PREMAKE := 12::INT, + P_AUTOMATIC_MAINTENANCE := 'on'::text, + P_START_PARTITION := :'start_partition'::text, + P_DEFAULT_TABLE := FALSE + ); +CREATE INDEX IF NOT EXISTS erc1155_token_transfers_number_log_index + ON erc1155_token_transfers (block_number DESC, log_index DESC); + +CREATE INDEX IF NOT EXISTS erc1155_token_transfers_token_address_id_index + ON erc1155_token_transfers (token_address, token_id, block_number DESC, log_index DESC); + +CREATE INDEX IF NOT EXISTS erc1155_token_transfers_token_address_number_log_index_index + ON erc1155_token_transfers (token_address ASC, block_number DESC, log_index DESC); + +-- nft +CREATE TABLE IF NOT EXISTS nft_details +( + token_address BYTEA NOT NULL, + token_id NUMERIC(100) NOT NULL, + token_supply NUMERIC(78), + token_owner BYTEA, + token_uri VARCHAR, + token_uri_info JSONB, + block_number BIGINT, + block_timestamp TIMESTAMP, + create_time TIMESTAMP DEFAULT NOW(), + update_time TIMESTAMP DEFAULT NOW(), + reorg BOOLEAN DEFAULT FALSE, + PRIMARY KEY (token_address, token_id) +); + +CREATE INDEX IF NOT EXISTS nft_details_token_address_index + ON nft_details (token_address DESC, token_id); + +CREATE INDEX IF NOT EXISTS nft_details_address_index + ON nft_details (token_owner DESC, token_id ASC); + +CREATE TABLE IF NOT EXISTS nft_id_changes +( + token_address BYTEA NOT NULL, + token_id NUMERIC(100) NOT NULL, + token_owner BYTEA, + block_number BIGINT NOT NULL, + block_timestamp TIMESTAMP, + create_time TIMESTAMP DEFAULT NOW(), + update_time TIMESTAMP DEFAULT NOW(), + reorg BOOLEAN DEFAULT FALSE, + PRIMARY KEY (token_address, token_id, block_number) +); + +CREATE INDEX IF NOT EXISTS nft_id_number_desc_index + ON nft_id_changes (token_address, token_id, block_number DESC); + +-- nft_transfers +CREATE TABLE IF NOT EXISTS nft_transfers ( + transaction_hash BYTEA NOT NULL, + block_hash BYTEA NOT NULL, + log_index INTEGER NOT NULL, + token_id NUMERIC(100) NOT NULL, + block_timestamp TIMESTAMP NOT NULL, + block_number INTEGER NOT NULL, + + from_address BYTEA, + to_address BYTEA, + token_address BYTEA, + value NUMERIC(100), + + create_time TIMESTAMP DEFAULT now(), + update_time TIMESTAMP DEFAULT now(), + reorg BOOLEAN DEFAULT FALSE, + + PRIMARY KEY ( + transaction_hash, + block_hash, + log_index, + token_id, + block_timestamp, + block_number + ) +) PARTITION by RANGE (block_timestamp); +SELECT partman.create_parent( + P_PARENT_TABLE := 'public.nft_transfers'::text, + P_CONTROL := 'block_timestamp'::text, + P_TYPE := 'range'::text, + P_INTERVAL := :'partition_interval'::text, + P_PREMAKE := 12::INT, + P_AUTOMATIC_MAINTENANCE := 'on'::text, + P_START_PARTITION := :'start_partition'::text, + P_DEFAULT_TABLE := FALSE + ); + +CREATE INDEX idx_nft_transfers_block_log + ON nft_transfers (block_number DESC, log_index DESC); + +CREATE INDEX idx_nft_transfers_token_time + ON nft_transfers (token_address, block_number DESC, log_index DESC); + +CREATE INDEX idx_nft_transfers_token_id + ON nft_transfers (token_address, token_id, block_number DESC, log_index DESC); diff --git a/hemera/migrations/V20250103__create_trace_tables.sql b/hemera/migrations/V20250103__create_trace_tables.sql new file mode 100644 index 000000000..b64016a4e --- /dev/null +++ b/hemera/migrations/V20250103__create_trace_tables.sql @@ -0,0 +1,170 @@ +\set start_partition '2025-01-01' +\set partition_interval '1 month' + +-- traces +CREATE TABLE IF NOT EXISTS traces +( + trace_id TEXT NOT NULL, + block_timestamp TIMESTAMP NOT NULL, + from_address BYTEA, + to_address BYTEA, + value NUMERIC(100), + input BYTEA, + output BYTEA, + trace_type VARCHAR, + call_type VARCHAR, + gas NUMERIC(100), + gas_used NUMERIC(100), + subtraces INTEGER, + trace_address INTEGER[], + error VARCHAR, + status INTEGER, + block_number BIGINT, + block_hash BYTEA, + transaction_index INTEGER, + transaction_hash BYTEA, + create_time TIMESTAMP DEFAULT NOW(), + update_time TIMESTAMP DEFAULT NOW(), + reorg BOOLEAN DEFAULT FALSE, + PRIMARY KEY (trace_id, block_timestamp) +) PARTITION BY RANGE (block_timestamp); + +SELECT partman.create_parent( + P_PARENT_TABLE := 'public.traces'::text, + P_CONTROL := 'block_timestamp'::text, + P_TYPE := 'range'::text, + P_INTERVAL := :'partition_interval'::text, + P_PREMAKE := 12::INT, + P_AUTOMATIC_MAINTENANCE := 'on'::text, + P_START_PARTITION := :'start_partition'::text, + P_DEFAULT_TABLE := FALSE + ); + +CREATE INDEX IF NOT EXISTS traces_transaction_hash_index + ON traces (transaction_hash); + +CREATE INDEX IF NOT EXISTS traces_block_number_index + ON traces (block_number DESC); + +-- contract_internal_transactions +CREATE TABLE IF NOT EXISTS contract_internal_transactions +( + trace_id TEXT NOT NULL, + block_timestamp TIMESTAMP NOT NULL, + from_address BYTEA, + to_address BYTEA, + value NUMERIC(100), + input BYTEA, + output BYTEA, + trace_type VARCHAR, + call_type VARCHAR, + gas NUMERIC(100), + gas_used NUMERIC(100), + subtraces INTEGER, + trace_address INTEGER[], + error VARCHAR, + status INTEGER, + block_number BIGINT, + block_hash BYTEA, + transaction_index INTEGER, + transaction_hash BYTEA, + create_time TIMESTAMP DEFAULT NOW(), + update_time TIMESTAMP DEFAULT NOW(), + reorg BOOLEAN DEFAULT FALSE, + PRIMARY KEY (trace_id, block_timestamp) +) PARTITION BY RANGE (block_timestamp); + +SELECT partman.create_parent( + P_PARENT_TABLE := 'public.contract_internal_transactions'::text, + P_CONTROL := 'block_timestamp'::text, + P_TYPE := 'range'::text, + P_INTERVAL := :'partition_interval'::text, + P_PREMAKE := 12::INT, + P_AUTOMATIC_MAINTENANCE := 'on'::text, + P_START_PARTITION := :'start_partition'::text, + P_DEFAULT_TABLE := FALSE + ); + +CREATE INDEX IF NOT EXISTS contract_internal_transactions_transaction_hash_index + ON contract_internal_transactions (transaction_hash); + +CREATE INDEX IF NOT EXISTS contract_internal_transactions_block_number_index + ON contract_internal_transactions (block_number DESC); + +-- transaction_trace_json +CREATE TABLE IF NOT EXISTS transaction_trace_json +( + transaction_hash BYTEA NOT NULL, + block_timestamp TIMESTAMP NOT NULL, + block_number BIGINT, + block_hash BYTEA, + data JSONB, + PRIMARY KEY (transaction_hash, block_timestamp) +) PARTITION BY RANGE (block_timestamp); + +SELECT partman.create_parent( + P_PARENT_TABLE := 'public.transaction_trace_json'::text, + P_CONTROL := 'block_timestamp'::text, + P_TYPE := 'range'::text, + P_INTERVAL := :'partition_interval'::text, + P_PREMAKE := 12::INT, + P_AUTOMATIC_MAINTENANCE := 'on'::text, + P_START_PARTITION := :'start_partition'::text, + P_DEFAULT_TABLE := FALSE + ); + +-- address_coin_balances +CREATE TABLE IF NOT EXISTS address_coin_balances +( + address BYTEA NOT NULL, + block_number BIGINT NOT NULL, + block_timestamp TIMESTAMP NOT NULL, + balance NUMERIC(100), + create_time TIMESTAMP DEFAULT NOW(), + update_time TIMESTAMP DEFAULT NOW(), + reorg BOOLEAN DEFAULT FALSE, + PRIMARY KEY (address, block_number, block_timestamp) +) PARTITION BY RANGE (block_timestamp); + +SELECT partman.create_parent( + P_PARENT_TABLE := 'public.address_coin_balances'::text, + P_CONTROL := 'block_timestamp'::text, + P_TYPE := 'range'::text, + P_INTERVAL := :'partition_interval'::text, + P_PREMAKE := 12::INT, + P_AUTOMATIC_MAINTENANCE := 'on'::text, + P_START_PARTITION := :'start_partition'::text, + P_DEFAULT_TABLE := FALSE +); + +-- contracts +CREATE TABLE IF NOT EXISTS contracts +( + address BYTEA NOT NULL + PRIMARY KEY, + name VARCHAR, + contract_creator BYTEA, + creation_code BYTEA, + deployed_code BYTEA, + block_number BIGINT, + block_hash BYTEA, + block_timestamp TIMESTAMP, + transaction_index INTEGER, + transaction_hash BYTEA, + official_website VARCHAR, + description VARCHAR, + email VARCHAR, + social_list JSONB, + is_verified BOOLEAN, + is_proxy BOOLEAN, + implementation_contract BYTEA, + verified_implementation_contract BYTEA, + proxy_standard VARCHAR, + create_time TIMESTAMP DEFAULT NOW(), + update_time TIMESTAMP DEFAULT NOW(), + reorg BOOLEAN DEFAULT FALSE, + deployed_code_hash VARCHAR GENERATED ALWAYS AS (ENCODE( + DIGEST(('0x'::TEXT || ENCODE(deployed_code, 'hex'::TEXT)), 'sha256'::TEXT), 'hex'::TEXT)) STORED, + transaction_from_address BYTEA, + bytecode VARCHAR GENERATED ALWAYS AS (('0x'::text || ENCODE(creation_code, 'hex'::TEXT))) STORED +); diff --git a/hemera/migrations/V20250104__create_address_index_tables.sql b/hemera/migrations/V20250104__create_address_index_tables.sql new file mode 100644 index 000000000..e1d8d7df6 --- /dev/null +++ b/hemera/migrations/V20250104__create_address_index_tables.sql @@ -0,0 +1,116 @@ +\set start_partition '2025-01-01' +\set partition_interval '1 month' + +-- address_transactions +CREATE TABLE IF NOT EXISTS address_transactions +( + address BYTEA NOT NULL, + block_number BIGINT NOT NULL, + transaction_index INTEGER NOT NULL, + block_timestamp TIMESTAMP NOT NULL, + transaction_hash BYTEA, + block_hash BYTEA, + txn_type SMALLINT, + related_address BYTEA, + value NUMERIC(100), + transaction_fee NUMERIC(100), + receipt_status INTEGER, + method VARCHAR, + create_time TIMESTAMP DEFAULT NOW(), + update_time TIMESTAMP DEFAULT NOW(), + PRIMARY KEY (address, block_timestamp, block_number, transaction_index, block_hash) +) PARTITION BY RANGE (block_timestamp); + +SELECT partman.create_parent( + P_PARENT_TABLE := 'public.address_transactions'::text, + P_CONTROL := 'block_timestamp'::text, + P_TYPE := 'range'::text, + P_INTERVAL := :'partition_interval'::text, + P_PREMAKE := 12::INT, + P_AUTOMATIC_MAINTENANCE := 'on'::text, + P_START_PARTITION := :'start_partition'::text, + P_DEFAULT_TABLE := FALSE + ); + +CREATE INDEX IF NOT EXISTS address_transactions_address_block_timestamp_block_number_t_idx + ON address_transactions (address, block_number DESC, transaction_index DESC); + +CREATE INDEX IF NOT EXISTS address_transactions_address_txn_type_block_timestamp_block_idx + ON address_transactions (address, txn_type, block_number DESC, transaction_index DESC); + +CREATE INDEX IF NOT EXISTS address_transactions_block_hash_idx + ON address_transactions (block_hash); + +-- address_token_transfers +CREATE TABLE IF NOT EXISTS address_token_transfers +( + address BYTEA NOT NULL, + block_number BIGINT NOT NULL, + log_index INTEGER NOT NULL, + transaction_hash BYTEA NOT NULL, + block_timestamp TIMESTAMP NOT NULL, + block_hash BYTEA NOT NULL, + token_address BYTEA, + related_address BYTEA, + transfer_type INTEGER, + value NUMERIC(100), + create_time TIMESTAMP DEFAULT NOW(), + update_time TIMESTAMP DEFAULT NOW(), + PRIMARY KEY (address, block_timestamp, block_number, log_index, transaction_hash, block_hash) +) PARTITION BY RANGE (block_timestamp); + +SELECT partman.create_parent( + P_PARENT_TABLE := 'public.address_token_transfers'::text, + P_CONTROL := 'block_timestamp'::text, + P_TYPE := 'range'::text, + P_INTERVAL := :'partition_interval'::text, + P_PREMAKE := 12::INT, + P_AUTOMATIC_MAINTENANCE := 'on'::text, + P_START_PARTITION := :'start_partition'::text, + P_DEFAULT_TABLE := FALSE + ); + +CREATE INDEX IF NOT EXISTS address_token_transfers_wallet_address_token_address__idx + ON address_token_transfers (address, block_number DESC, log_index DESC); + +CREATE INDEX IF NOT EXISTS address_token_transfers_txn_type_block_timestamp_block_idx + ON address_token_transfers (address, transfer_type, block_number DESC, log_index DESC); + +CREATE INDEX IF NOT EXISTS address_token_transfers_block_hash_idx + ON address_token_transfers (block_hash); + + +CREATE TABLE IF NOT EXISTS address_nft_transfers +( + address BYTEA NOT NULL, + block_number BIGINT NOT NULL, + log_index INTEGER NOT NULL, + transaction_hash BYTEA NOT NULL, + block_timestamp TIMESTAMP NOT NULL, + token_id NUMERIC(100) NOT NULL, + block_hash BYTEA, + token_address BYTEA, + related_address BYTEA, + transfer_type INTEGER, + value NUMERIC(100), + create_time TIMESTAMP DEFAULT NOW(), + update_time TIMESTAMP DEFAULT NOW(), + PRIMARY KEY (address, block_number, log_index, transaction_hash, block_timestamp, token_id, block_hash) +) PARTITION BY RANGE (block_timestamp); + +SELECT partman.create_parent( + P_PARENT_TABLE := 'public.address_nft_transfers'::text, + P_CONTROL := 'block_timestamp'::text, + P_TYPE := 'range'::text, + P_INTERVAL := :'partition_interval'::text, + P_PREMAKE := 12::INT, + P_AUTOMATIC_MAINTENANCE := 'on'::text, + P_START_PARTITION := :'start_partition'::text, + P_DEFAULT_TABLE := FALSE + ); + +CREATE INDEX IF NOT EXISTS idx_address_nft_transfers_token_time + ON address_nft_transfers (address, block_timestamp DESC, block_number DESC, log_index DESC); + +CREATE INDEX IF NOT EXISTS address_nft_transfers_block_hash_idx + ON address_token_transfers (block_hash); \ No newline at end of file diff --git a/hemera/migrations/V20250105__create_stats_tables.sql b/hemera/migrations/V20250105__create_stats_tables.sql new file mode 100644 index 000000000..e69de29bb diff --git a/hemera/migrations/V20250106__create_utils_tables.sql b/hemera/migrations/V20250106__create_utils_tables.sql new file mode 100644 index 000000000..00ca58b1f --- /dev/null +++ b/hemera/migrations/V20250106__create_utils_tables.sql @@ -0,0 +1,23 @@ +create table sync_records +( + mission_sign varchar not null +primary key, +last_block_number bigint, +update_time timestamp default now() +); + + +create table failure_records +( + record_id bigserial + primary key, + mission_sign varchar, + output_types varchar, + start_block_number bigint, + end_block_number bigint, + exception_stage varchar, + exception json, + crash_time timestamp +); + + diff --git a/hemera/migrations/env.py b/hemera/migrations/env.py deleted file mode 100644 index 4383b6d54..000000000 --- a/hemera/migrations/env.py +++ /dev/null @@ -1,161 +0,0 @@ -import re - -from alembic import context -from alembic.autogenerate import rewriter -from alembic.operations import ops -from sqlalchemy import engine_from_config, pool -from sqlalchemy.sql.schema import SchemaItem - -from hemera.common.models import db -from hemera.common.utils.module_loading import import_submodules - -# Make sure everything is imported so that alembic can find it all -# import_all_models() -import_submodules("hemera.common.models") - -# this is the Alembic Config object, which provides -# access to the values within the .ini file in use. -config = context.config - -# Interpret the config file for Python logging. -# This line sets up loggers basically. -# if config.config_file_name is not None: -# fileConfig(config.config_file_name) - -# add your model's MetaData object here -# for 'autogenerate' support -# from myapp import mymodel -# target_metadata = mymodel.Base.metadata -target_metadata = db.metadata - -# other values from the config, defined by the needs of env.py, -# can be acquired: -# my_important_option = config.get_main_option("my_important_option") -# ... etc. - -IGNORE_DB_TABLE = [] -PARTITION_TABLES = [] - - -def table_able_to_track(**kwargs) -> bool: - name = kwargs["name"] - reflected = kwargs["reflected"] - if reflected: - return not any(name.startswith(table) for table in IGNORE_DB_TABLE) - return True - - -def do_not_track_partition_table(**kwargs) -> bool: - name = kwargs["name"] - reflected = kwargs["reflected"] - object_type = kwargs["object_type"] - - if reflected and object_type == "table": - partition_patterns = [ - re.compile(r"^(.+)_partition_\d{6}$"), - re.compile(r"^(.+)_(\d{4})_(\d{2})$"), - re.compile(r"^(.+)_p(\d{4})_(\d{2})$"), - re.compile(r"^(.+)_p(\d{1,2})$"), - re.compile(r"^(.+)_default$"), - ] - for pattern in partition_patterns: - match = pattern.match(name) - - if match: - return False - return True - - -tracking_list = [table_able_to_track, do_not_track_partition_table] - -writer = rewriter.Rewriter() - - -@writer.rewrites(ops.CreateTableOp) -def rewrite_create_table(context, revision, op): - op.if_not_exists = True - return op - - -@writer.rewrites(ops.CreateIndexOp) -def rewrite_create_index(context, revision, op): - op.if_not_exists = True - return op - - -@writer.rewrites(ops.DropTableOp) -def rewrite_drop_table(context, revision, op): - op.if_exists = True - return op - - -@writer.rewrites(ops.DropIndexOp) -def rewrite_drop_index(context, revision, op): - op.if_exists = True - return op - - -def custom_table_tracking( - obj: SchemaItem, name: str, object_type: str, reflected: bool, compare_to: SchemaItem -) -> bool: - for tracking in tracking_list: - if not tracking(obj=obj, name=name, object_type=object_type, reflected=reflected, compare_to=compare_to): - return False - return True - - -def run_migrations_offline() -> None: - """Run migrations in 'offline' mode. - - This configures the context with just a URL - and not an Engine, though an Engine is acceptable - here as well. By skipping the Engine creation - we don't even need a DBAPI to be available. - - Calls to context.execute() here emit the given string to the - script output. - - """ - url = config.get_main_option("sqlalchemy.url") - context.configure( - url=url, - target_metadata=target_metadata, - literal_binds=True, - dialect_opts={"paramstyle": "named"}, - include_object=custom_table_tracking, - process_revision_directives=writer, - ) - - with context.begin_transaction(): - context.run_migrations() - - -def run_migrations_online() -> None: - """Run migrations in 'online' mode. - - In this scenario we need to create an Engine - and associate a connection with the context. - - """ - connectable = engine_from_config( - config.get_section(config.config_ini_section, {}), - prefix="sqlalchemy.", - poolclass=pool.NullPool, - ) - - with connectable.connect() as connection: - context.configure( - connection=connection, - target_metadata=target_metadata, - include_object=custom_table_tracking, - process_revision_directives=writer, - ) - - with context.begin_transaction(): - context.run_migrations() - - -if context.is_offline_mode(): - run_migrations_offline() -else: - run_migrations_online() diff --git a/hemera/migrations/isolate/20241105_add_address_index_and_stats.py b/hemera/migrations/isolate/20241105_add_address_index_and_stats.py deleted file mode 100644 index 76159f6a6..000000000 --- a/hemera/migrations/isolate/20241105_add_address_index_and_stats.py +++ /dev/null @@ -1,543 +0,0 @@ -"""add_address_index_and_stats - -Revision ID: 872094559593 -Revises: bc23aa19668e -Create Date: 2024-11-05 13:34:30.692977 - -""" - -from typing import Sequence, Union - -import sqlalchemy as sa -from alembic import op -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision: str = "872094559593" -down_revision: Union[str, None] = "bc23aa19668e" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table( - "address_contract_operations", - sa.Column("address", postgresql.BYTEA(), nullable=False), - sa.Column("trace_from_address", postgresql.BYTEA(), nullable=True), - sa.Column("contract_address", postgresql.BYTEA(), nullable=True), - sa.Column("trace_id", sa.TEXT(), nullable=False), - sa.Column("block_number", sa.INTEGER(), nullable=False), - sa.Column("transaction_index", sa.INTEGER(), nullable=False), - sa.Column("transaction_hash", postgresql.BYTEA(), nullable=True), - sa.Column("block_timestamp", postgresql.TIMESTAMP(), nullable=False), - sa.Column("block_hash", postgresql.BYTEA(), nullable=True), - sa.Column("error", sa.TEXT(), nullable=True), - sa.Column("status", sa.INTEGER(), nullable=True), - sa.Column("creation_code", postgresql.BYTEA(), nullable=True), - sa.Column("deployed_code", postgresql.BYTEA(), nullable=True), - sa.Column("gas", sa.NUMERIC(precision=100), nullable=True), - sa.Column("gas_used", sa.NUMERIC(precision=100), nullable=True), - sa.Column("trace_type", sa.TEXT(), nullable=True), - sa.Column("call_type", sa.TEXT(), nullable=True), - sa.Column("transaction_receipt_status", sa.INTEGER(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("address", "trace_id", "block_number", "transaction_index", "block_timestamp"), - ) - op.create_index( - "address_contract_operations_address_block_tn_t_idx", - "address_contract_operations", - ["address", sa.text("block_timestamp DESC"), sa.text("block_number DESC"), sa.text("transaction_index DESC")], - unique=False, - ) - op.create_table( - "address_internal_transactions", - sa.Column("address", postgresql.BYTEA(), nullable=False), - sa.Column("trace_id", sa.TEXT(), nullable=False), - sa.Column("block_number", sa.INTEGER(), nullable=False), - sa.Column("transaction_index", sa.INTEGER(), nullable=False), - sa.Column("transaction_hash", postgresql.BYTEA(), nullable=True), - sa.Column("block_timestamp", postgresql.TIMESTAMP(), nullable=False), - sa.Column("block_hash", postgresql.BYTEA(), nullable=True), - sa.Column("error", sa.TEXT(), nullable=True), - sa.Column("status", sa.INTEGER(), nullable=True), - sa.Column("input_method", sa.TEXT(), nullable=True), - sa.Column("value", sa.NUMERIC(precision=100), nullable=True), - sa.Column("gas", sa.NUMERIC(precision=100), nullable=True), - sa.Column("gas_used", sa.NUMERIC(precision=100), nullable=True), - sa.Column("trace_type", sa.TEXT(), nullable=True), - sa.Column("call_type", sa.TEXT(), nullable=True), - sa.Column("txn_type", sa.SMALLINT(), nullable=True), - sa.Column("related_address", postgresql.BYTEA(), nullable=True), - sa.Column("transaction_receipt_status", sa.INTEGER(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("address", "trace_id", "block_number", "transaction_index", "block_timestamp"), - ) - op.create_index( - "address_internal_transactions_address_nt_t_idx", - "address_internal_transactions", - ["address", sa.text("block_timestamp DESC"), sa.text("block_number DESC"), sa.text("transaction_index DESC")], - unique=False, - ) - op.create_table( - "af_erc1155_token_holdings_current", - sa.Column("position_token_address", postgresql.BYTEA(), nullable=False), - sa.Column("token_id", sa.NUMERIC(precision=100), nullable=False), - sa.Column("wallet_address", postgresql.BYTEA(), nullable=False), - sa.Column("block_number", sa.BIGINT(), nullable=True), - sa.Column("block_timestamp", sa.BIGINT(), nullable=True), - sa.Column("balance", sa.NUMERIC(precision=100), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("position_token_address", "token_id", "wallet_address"), - ) - op.create_index( - "af_erc1155_token_holdings_current_token_block_desc_index", - "af_erc1155_token_holdings_current", - [sa.text("position_token_address DESC"), sa.text("block_timestamp DESC")], - unique=False, - ) - op.create_index( - "af_erc1155_token_holdings_current_wallet_block_desc_index", - "af_erc1155_token_holdings_current", - [sa.text("wallet_address DESC"), sa.text("block_timestamp DESC")], - unique=False, - ) - op.create_table( - "af_erc1155_token_holdings_hist", - sa.Column("position_token_address", postgresql.BYTEA(), nullable=False), - sa.Column("token_id", sa.NUMERIC(precision=100), nullable=False), - sa.Column("wallet_address", postgresql.BYTEA(), nullable=False), - sa.Column("balance", sa.NUMERIC(precision=100), nullable=True), - sa.Column("block_number", sa.BIGINT(), nullable=False), - sa.Column("block_timestamp", sa.BIGINT(), nullable=False), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("reorg", sa.BOOLEAN(), nullable=True), - sa.PrimaryKeyConstraint( - "position_token_address", "token_id", "wallet_address", "block_timestamp", "block_number" - ), - ) - op.create_index( - "feature_erc1155_token_holding_token_block_desc_index", - "af_erc1155_token_holdings_hist", - [sa.text("position_token_address DESC"), sa.text("block_timestamp DESC")], - unique=False, - ) - op.create_index( - "feature_erc1155_token_holding_token_wallet_block_desc_index", - "af_erc1155_token_holdings_hist", - [sa.text("position_token_address DESC"), sa.text("wallet_address DESC"), sa.text("block_number DESC")], - unique=False, - ) - op.create_table( - "af_index_daily_stats", - sa.Column("address", postgresql.BYTEA(), nullable=False), - sa.Column("block_date", sa.DATE(), nullable=False), - sa.Column("transaction_in_count", sa.INTEGER(), nullable=True), - sa.Column("transaction_out_count", sa.INTEGER(), nullable=True), - sa.Column("transaction_self_count", sa.INTEGER(), nullable=True), - sa.Column("transaction_in_value", sa.BIGINT(), nullable=True), - sa.Column("transaction_out_value", sa.BIGINT(), nullable=True), - sa.Column("transaction_self_value", sa.BIGINT(), nullable=True), - sa.Column("transaction_in_fee", sa.NUMERIC(), nullable=True), - sa.Column("transaction_out_fee", sa.NUMERIC(), nullable=True), - sa.Column("transaction_self_fee", sa.NUMERIC(), nullable=True), - sa.Column("internal_transaction_in_count", sa.INTEGER(), nullable=True), - sa.Column("internal_transaction_out_count", sa.INTEGER(), nullable=True), - sa.Column("internal_transaction_self_count", sa.INTEGER(), nullable=True), - sa.Column("internal_transaction_in_value", sa.BIGINT(), nullable=True), - sa.Column("internal_transaction_out_value", sa.BIGINT(), nullable=True), - sa.Column("internal_transaction_self_value", sa.BIGINT(), nullable=True), - sa.Column("erc20_transfer_in_count", sa.INTEGER(), nullable=True), - sa.Column("erc20_transfer_out_count", sa.INTEGER(), nullable=True), - sa.Column("erc20_transfer_self_count", sa.INTEGER(), nullable=True), - sa.Column("nft_transfer_in_count", sa.INTEGER(), nullable=True), - sa.Column("nft_transfer_out_count", sa.INTEGER(), nullable=True), - sa.Column("nft_transfer_self_count", sa.INTEGER(), nullable=True), - sa.Column("nft_721_transfer_in_count", sa.INTEGER(), nullable=True), - sa.Column("nft_721_transfer_out_count", sa.INTEGER(), nullable=True), - sa.Column("nft_721_transfer_self_count", sa.INTEGER(), nullable=True), - sa.Column("nft_1155_transfer_in_count", sa.INTEGER(), nullable=True), - sa.Column("nft_1155_transfer_out_count", sa.INTEGER(), nullable=True), - sa.Column("nft_1155_transfer_self_count", sa.INTEGER(), nullable=True), - sa.Column("contract_creation_count", sa.INTEGER(), nullable=True), - sa.Column("contract_destruction_count", sa.INTEGER(), nullable=True), - sa.Column("contract_operation_count", sa.INTEGER(), nullable=True), - sa.Column("transaction_count", sa.INTEGER(), nullable=True), - sa.Column("internal_transaction_count", sa.INTEGER(), nullable=True), - sa.Column("erc20_transfer_count", sa.INTEGER(), nullable=True), - sa.Column("nft_transfer_count", sa.INTEGER(), nullable=True), - sa.Column("nft_721_transfer_count", sa.INTEGER(), nullable=True), - sa.Column("nft_1155_transfer_count", sa.INTEGER(), nullable=True), - sa.PrimaryKeyConstraint("address", "block_date"), - ) - op.create_table( - "af_index_na_scheduled_metadata", - sa.Column("id", sa.INTEGER(), nullable=False), - sa.Column("dag_id", sa.VARCHAR(), nullable=True), - sa.Column("execution_date", postgresql.TIMESTAMP(), nullable=True), - sa.Column("last_data_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.PrimaryKeyConstraint("id"), - ) - op.create_table( - "af_index_stats", - sa.Column("address", postgresql.BYTEA(), nullable=False), - sa.Column("transaction_in_count", sa.INTEGER(), nullable=True), - sa.Column("transaction_out_count", sa.INTEGER(), nullable=True), - sa.Column("transaction_self_count", sa.INTEGER(), nullable=True), - sa.Column("transaction_in_value", sa.NUMERIC(), nullable=True), - sa.Column("transaction_out_value", sa.NUMERIC(), nullable=True), - sa.Column("transaction_self_value", sa.NUMERIC(), nullable=True), - sa.Column("transaction_in_fee", sa.NUMERIC(), nullable=True), - sa.Column("transaction_out_fee", sa.NUMERIC(), nullable=True), - sa.Column("transaction_self_fee", sa.NUMERIC(), nullable=True), - sa.Column("internal_transaction_in_count", sa.INTEGER(), nullable=True), - sa.Column("internal_transaction_out_count", sa.INTEGER(), nullable=True), - sa.Column("internal_transaction_self_count", sa.INTEGER(), nullable=True), - sa.Column("internal_transaction_in_value", sa.NUMERIC(), nullable=True), - sa.Column("internal_transaction_out_value", sa.NUMERIC(), nullable=True), - sa.Column("internal_transaction_self_value", sa.NUMERIC(), nullable=True), - sa.Column("erc20_transfer_in_count", sa.INTEGER(), nullable=True), - sa.Column("erc20_transfer_out_count", sa.INTEGER(), nullable=True), - sa.Column("erc20_transfer_self_count", sa.INTEGER(), nullable=True), - sa.Column("nft_transfer_in_count", sa.INTEGER(), nullable=True), - sa.Column("nft_transfer_out_count", sa.INTEGER(), nullable=True), - sa.Column("nft_transfer_self_count", sa.INTEGER(), nullable=True), - sa.Column("nft_721_transfer_in_count", sa.INTEGER(), nullable=True), - sa.Column("nft_721_transfer_out_count", sa.INTEGER(), nullable=True), - sa.Column("nft_721_transfer_self_count", sa.INTEGER(), nullable=True), - sa.Column("nft_1155_transfer_in_count", sa.INTEGER(), nullable=True), - sa.Column("nft_1155_transfer_out_count", sa.INTEGER(), nullable=True), - sa.Column("nft_1155_transfer_self_count", sa.INTEGER(), nullable=True), - sa.Column("contract_creation_count", sa.INTEGER(), nullable=True), - sa.Column("contract_destruction_count", sa.INTEGER(), nullable=True), - sa.Column("contract_operation_count", sa.INTEGER(), nullable=True), - sa.Column("transaction_count", sa.INTEGER(), nullable=True), - sa.Column("internal_transaction_count", sa.INTEGER(), nullable=True), - sa.Column("erc20_transfer_count", sa.INTEGER(), nullable=True), - sa.Column("nft_transfer_count", sa.INTEGER(), nullable=True), - sa.Column("nft_721_transfer_count", sa.INTEGER(), nullable=True), - sa.Column("nft_1155_transfer_count", sa.INTEGER(), nullable=True), - sa.Column("tag", sa.VARCHAR(), nullable=True), - sa.PrimaryKeyConstraint("address"), - ) - op.create_table( - "af_index_token_address_daily_stats", - sa.Column("address", postgresql.BYTEA(), nullable=False), - sa.Column("token_holder_count", sa.INTEGER(), nullable=True), - sa.Column("token_transfer_count", sa.INTEGER(), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), nullable=True), - sa.PrimaryKeyConstraint("address"), - ) - op.create_table( - "af_index_token_address_stats", - sa.Column("address", postgresql.BYTEA(), nullable=False), - sa.Column("token_holder_count", sa.INTEGER(), nullable=True), - sa.Column("token_transfer_count", sa.INTEGER(), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), nullable=True), - sa.PrimaryKeyConstraint("address"), - ) - op.create_table( - "af_stats_na_daily_addresses", - sa.Column("block_date", sa.DATE(), nullable=False), - sa.Column("active_address_cnt", sa.BIGINT(), nullable=True), - sa.Column("receiver_address_cnt", sa.BIGINT(), nullable=True), - sa.Column("sender_address_cnt", sa.BIGINT(), nullable=True), - sa.Column("total_address_cnt", sa.BIGINT(), nullable=True), - sa.Column("new_address_cnt", sa.BIGINT(), nullable=True), - sa.PrimaryKeyConstraint("block_date"), - ) - op.create_table( - "af_stats_na_daily_blocks", - sa.Column("block_date", sa.DATE(), nullable=False), - sa.Column("cnt", sa.BIGINT(), nullable=True), - sa.Column("avg_size", sa.NUMERIC(), nullable=True), - sa.Column("avg_gas_limit", sa.NUMERIC(), nullable=True), - sa.Column("avg_gas_used", sa.NUMERIC(), nullable=True), - sa.Column("total_gas_used", sa.BIGINT(), nullable=True), - sa.Column("avg_gas_used_percentage", sa.NUMERIC(), nullable=True), - sa.Column("avg_txn_cnt", sa.NUMERIC(), nullable=True), - sa.Column("total_cnt", sa.BIGINT(), nullable=True), - sa.Column("block_interval", sa.NUMERIC(), nullable=True), - sa.PrimaryKeyConstraint("block_date"), - ) - op.create_table( - "af_stats_na_daily_bridge_transactions", - sa.Column("block_date", sa.DATE(), nullable=False), - sa.Column("deposit_cnt", sa.BIGINT(), nullable=True), - sa.Column("withdraw_cnt", sa.BIGINT(), nullable=True), - sa.PrimaryKeyConstraint("block_date"), - ) - op.create_table( - "af_stats_na_daily_tokens", - sa.Column("block_date", sa.DATE(), nullable=False), - sa.Column("erc20_active_address_cnt", sa.INTEGER(), nullable=True), - sa.Column("erc20_total_transfer_cnt", sa.BIGINT(), nullable=True), - sa.Column("erc721_active_address_cnt", sa.INTEGER(), nullable=True), - sa.Column("erc721_total_transfer_cnt", sa.BIGINT(), nullable=True), - sa.Column("erc1155_active_address_cnt", sa.INTEGER(), nullable=True), - sa.Column("erc1155_total_transfer_cnt", sa.BIGINT(), nullable=True), - sa.PrimaryKeyConstraint("block_date"), - ) - op.create_table( - "af_stats_na_daily_transactions", - sa.Column("block_date", sa.DATE(), nullable=False), - sa.Column("cnt", sa.BIGINT(), nullable=True), - sa.Column("total_cnt", sa.BIGINT(), nullable=True), - sa.Column("txn_error_cnt", sa.BIGINT(), nullable=True), - sa.Column("avg_transaction_fee", sa.NUMERIC(), nullable=True), - sa.Column("avg_gas_price", sa.NUMERIC(), nullable=True), - sa.Column("max_gas_price", sa.NUMERIC(), nullable=True), - sa.Column("min_gas_price", sa.NUMERIC(), nullable=True), - sa.Column("avg_receipt_l1_fee", sa.NUMERIC(), nullable=True), - sa.Column("max_receipt_l1_fee", sa.NUMERIC(), nullable=True), - sa.Column("min_receipt_l1_fee", sa.NUMERIC(), nullable=True), - sa.Column("avg_receipt_l1_gas_price", sa.NUMERIC(), nullable=True), - sa.Column("max_receipt_l1_gas_price", sa.NUMERIC(), nullable=True), - sa.Column("min_receipt_l1_gas_price", sa.NUMERIC(), nullable=True), - sa.PrimaryKeyConstraint("block_date"), - ) - op.create_table( - "coin_prices", - sa.Column("block_date", sa.DateTime(), nullable=False), - sa.Column("price", sa.Numeric(), nullable=True), - sa.PrimaryKeyConstraint("block_date"), - ) - op.create_table( - "scheduled_metadata", - sa.Column("id", sa.INTEGER(), nullable=False), - sa.Column("dag_id", sa.VARCHAR(), nullable=True), - sa.Column("execution_date", sa.DateTime(), nullable=True), - sa.Column("last_data_timestamp", sa.DateTime(), nullable=True), - sa.PrimaryKeyConstraint("id"), - ) - op.drop_table("daily_wallet_addresses_aggregates") - op.drop_table("daily_addresses_aggregates") - op.drop_table("daily_blocks_aggregates") - op.drop_table("daily_tokens_aggregates") - op.drop_table("scheduled_token_count_metadata") - op.drop_table("scheduled_wallet_count_metadata") - op.drop_table("daily_contract_interacted_aggregates") - op.drop_table("daily_transactions_aggregates") - op.drop_table("statistics_wallet_addresses") - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table( - "statistics_wallet_addresses", - sa.Column("address", postgresql.BYTEA(), autoincrement=False, nullable=False), - sa.Column("txn_in_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("txn_out_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("txn_in_value", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True), - sa.Column("txn_out_value", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True), - sa.Column("internal_txn_in_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("internal_txn_out_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("internal_txn_in_value", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True), - sa.Column("internal_txn_out_value", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True), - sa.Column("erc20_transfer_in_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("erc721_transfer_in_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("erc1155_transfer_in_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("erc20_transfer_out_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("erc721_transfer_out_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("erc1155_transfer_out_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("txn_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("internal_txn_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("erc20_transfer_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("erc721_transfer_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("erc1155_transfer_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("deposit_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("withdraw_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("tag", sa.VARCHAR(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint("address", name="statistics_wallet_addresses_pkey"), - ) - op.create_table( - "daily_transactions_aggregates", - sa.Column("block_date", sa.DATE(), autoincrement=False, nullable=False), - sa.Column("cnt", sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column("total_cnt", sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column("txn_error_cnt", sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column("avg_transaction_fee", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("avg_gas_price", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("max_gas_price", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("min_gas_price", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("avg_receipt_l1_fee", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("max_receipt_l1_fee", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("min_receipt_l1_fee", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("avg_receipt_l1_gas_used", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("max_receipt_l1_gas_used", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("min_receipt_l1_gas_used", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("avg_receipt_l1_gas_price", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("max_receipt_l1_gas_price", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("min_receipt_l1_gas_price", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("avg_receipt_l1_fee_scalar", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("max_receipt_l1_fee_scalar", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("min_receipt_l1_fee_scalar", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint("block_date", name="daily_transactions_aggregates_pkey"), - ) - op.create_table( - "daily_contract_interacted_aggregates", - sa.Column("block_date", sa.DATE(), autoincrement=False, nullable=False), - sa.Column("from_address", postgresql.BYTEA(), autoincrement=False, nullable=False), - sa.Column("to_address", postgresql.BYTEA(), autoincrement=False, nullable=False), - sa.Column("contract_interacted_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint( - "block_date", "from_address", "to_address", name="daily_contract_interacted_aggregates_pkey" - ), - ) - op.create_table( - "scheduled_wallet_count_metadata", - sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column("dag_id", sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column("execution_date", postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column("last_data_timestamp", postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint("id", name="scheduled_wallet_count_metadata_pkey"), - ) - op.create_table( - "scheduled_token_count_metadata", - sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column("dag_id", sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column("execution_date", postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column("last_data_timestamp", postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint("id", name="scheduled_token_count_metadata_pkey"), - ) - op.create_table( - "daily_tokens_aggregates", - sa.Column("block_date", sa.DATE(), autoincrement=False, nullable=False), - sa.Column("erc20_active_address_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("erc20_total_transfer_cnt", sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column("erc721_active_address_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("erc721_total_transfer_cnt", sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column("erc1155_active_address_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("erc1155_total_transfer_cnt", sa.BIGINT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint("block_date", name="daily_tokens_aggregates_pkey"), - ) - op.create_table( - "daily_blocks_aggregates", - sa.Column("block_date", sa.DATE(), autoincrement=False, nullable=False), - sa.Column("cnt", sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column("avg_size", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("avg_gas_limit", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("avg_gas_used", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("total_gas_used", sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column("avg_gas_used_percentage", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("avg_txn_cnt", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("total_cnt", sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column("block_interval", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint("block_date", name="daily_blocks_aggregates_pkey"), - ) - op.create_table( - "daily_addresses_aggregates", - sa.Column("block_date", sa.DATE(), autoincrement=False, nullable=False), - sa.Column("active_address_cnt", sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column("receiver_address_cnt", sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column("sender_address_cnt", sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column("total_address_cnt", sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column("new_address_cnt", sa.BIGINT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint("block_date", name="daily_addresses_aggregates_pkey"), - ) - op.create_table( - "daily_wallet_addresses_aggregates", - sa.Column("address", postgresql.BYTEA(), autoincrement=False, nullable=False), - sa.Column("block_date", sa.DATE(), autoincrement=False, nullable=False), - sa.Column("txn_in_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("txn_out_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("txn_in_value", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True), - sa.Column("txn_out_value", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True), - sa.Column("internal_txn_in_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("internal_txn_out_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("internal_txn_in_value", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True), - sa.Column("internal_txn_out_value", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True), - sa.Column("erc20_transfer_in_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("erc721_transfer_in_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("erc1155_transfer_in_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("erc20_transfer_out_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("erc721_transfer_out_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("erc1155_transfer_out_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column( - "internal_txn_cnt", - sa.INTEGER(), - sa.Computed("(internal_txn_in_cnt + internal_txn_out_cnt)", persisted=True), - autoincrement=False, - nullable=True, - ), - sa.Column( - "erc20_transfer_cnt", - sa.INTEGER(), - sa.Computed("(erc20_transfer_in_cnt + erc20_transfer_out_cnt)", persisted=True), - autoincrement=False, - nullable=True, - ), - sa.Column( - "erc721_transfer_cnt", - sa.INTEGER(), - sa.Computed("(erc721_transfer_in_cnt + erc721_transfer_out_cnt)", persisted=True), - autoincrement=False, - nullable=True, - ), - sa.Column( - "erc1155_transfer_cnt", - sa.INTEGER(), - sa.Computed("(erc1155_transfer_in_cnt + erc1155_transfer_out_cnt)", persisted=True), - autoincrement=False, - nullable=True, - ), - sa.Column("txn_self_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("txn_in_error_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("txn_out_error_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("txn_self_error_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column( - "txn_cnt", - sa.INTEGER(), - sa.Computed("((txn_in_cnt + txn_out_cnt) - txn_self_cnt)", persisted=True), - autoincrement=False, - nullable=True, - ), - sa.Column("deposit_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("withdraw_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("gas_in_used", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True), - sa.Column("l2_txn_in_fee", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True), - sa.Column("l1_txn_in_fee", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True), - sa.Column("txn_in_fee", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True), - sa.Column("gas_out_used", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True), - sa.Column("l2_txn_out_fee", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True), - sa.Column("l1_txn_out_fee", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True), - sa.Column("txn_out_fee", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True), - sa.Column("contract_deployed_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("from_address_unique_interacted_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("to_address_unique_interacted_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint("address", "block_date", name="daily_wallet_addresses_aggregates_pkey"), - ) - op.drop_table("scheduled_metadata") - op.drop_table("coin_prices") - op.drop_table("af_stats_na_daily_transactions") - op.drop_table("af_stats_na_daily_tokens") - op.drop_table("af_stats_na_daily_bridge_transactions") - op.drop_table("af_stats_na_daily_blocks") - op.drop_table("af_stats_na_daily_addresses") - op.drop_table("af_index_token_address_stats") - op.drop_table("af_index_token_address_daily_stats") - op.drop_table("af_index_stats") - op.drop_table("af_index_na_scheduled_metadata") - op.drop_table("af_index_daily_stats") - op.drop_index( - "feature_erc1155_token_holding_token_wallet_block_desc_index", table_name="af_erc1155_token_holdings_hist" - ) - op.drop_index("feature_erc1155_token_holding_token_block_desc_index", table_name="af_erc1155_token_holdings_hist") - op.drop_table("af_erc1155_token_holdings_hist") - op.drop_index( - "af_erc1155_token_holdings_current_wallet_block_desc_index", table_name="af_erc1155_token_holdings_current" - ) - op.drop_index( - "af_erc1155_token_holdings_current_token_block_desc_index", table_name="af_erc1155_token_holdings_current" - ) - op.drop_table("af_erc1155_token_holdings_current") - op.drop_index("address_internal_transactions_address_nt_t_idx", table_name="address_internal_transactions") - op.drop_table("address_internal_transactions") - op.drop_index("address_contract_operations_address_block_tn_t_idx", table_name="address_contract_operations") - op.drop_table("address_contract_operations") - # ### end Alembic commands ### diff --git a/hemera/migrations/manual_versions/20240704_base_version.sql b/hemera/migrations/manual_versions/20240704_base_version.sql deleted file mode 100644 index f1fd80c7a..000000000 --- a/hemera/migrations/manual_versions/20240704_base_version.sql +++ /dev/null @@ -1,427 +0,0 @@ -BEGIN; - -CREATE TABLE IF NOT EXISTS alembic_version ( - version_num VARCHAR(32) NOT NULL, - CONSTRAINT alembic_version_pkc PRIMARY KEY (version_num) -); - --- Running upgrade -> 5e4608933f64 - -CREATE TABLE IF NOT EXISTS address_coin_balances ( - address BYTEA NOT NULL, - balance NUMERIC(100), - block_number BIGINT NOT NULL, - block_timestamp TIMESTAMP WITHOUT TIME ZONE, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - reorg BOOLEAN DEFAULT false, - PRIMARY KEY (address, block_number) -); - -CREATE TABLE IF NOT EXISTS address_token_balances ( - address BYTEA NOT NULL, - token_id NUMERIC(78), - token_type VARCHAR, - token_address BYTEA NOT NULL, - balance NUMERIC(100), - block_number BIGINT NOT NULL, - block_timestamp TIMESTAMP WITHOUT TIME ZONE, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - reorg BOOLEAN DEFAULT false, - PRIMARY KEY (address, token_address, token_id, block_number) -); - -CREATE TABLE IF NOT EXISTS block_ts_mapper ( - ts BIGSERIAL NOT NULL, - block_number BIGINT, - timestamp TIMESTAMP WITHOUT TIME ZONE, - PRIMARY KEY (ts) -); - -CREATE INDEX block_ts_mapper_idx ON block_ts_mapper (block_number DESC); - -CREATE TABLE IF NOT EXISTS blocks ( - hash BYTEA NOT NULL, - number BIGINT, - timestamp TIMESTAMP WITHOUT TIME ZONE, - parent_hash BYTEA, - nonce BYTEA, - gas_limit NUMERIC(100), - gas_used NUMERIC(100), - base_fee_per_gas NUMERIC(100), - difficulty NUMERIC(38), - total_difficulty NUMERIC(38), - size BIGINT, - miner BYTEA, - sha3_uncles BYTEA, - transactions_root BYTEA, - transactions_count BIGINT, - state_root BYTEA, - receipts_root BYTEA, - extra_data BYTEA, - withdrawals_root BYTEA, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - reorg BOOLEAN DEFAULT false, - PRIMARY KEY (hash) -); - -CREATE INDEX blocks_number_index ON blocks (number DESC); - -CREATE INDEX blocks_timestamp_index ON blocks (timestamp DESC); - -CREATE TABLE IF NOT EXISTS contract_internal_transactions ( - trace_id VARCHAR NOT NULL, - from_address BYTEA, - to_address BYTEA, - value NUMERIC(100), - trace_type VARCHAR, - call_type VARCHAR, - gas NUMERIC(100), - gas_used NUMERIC(100), - trace_address INTEGER[], - error TEXT, - status INTEGER, - block_number BIGINT, - block_hash BYTEA, - block_timestamp TIMESTAMP WITHOUT TIME ZONE, - transaction_index INTEGER, - transaction_hash BYTEA, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - reorg BOOLEAN DEFAULT false, - PRIMARY KEY (trace_id) -); - -CREATE INDEX contract_internal_transactions_transaction_hash_idx ON contract_internal_transactions (transaction_hash); - -CREATE INDEX internal_transactions_address_number_transaction_index ON contract_internal_transactions (from_address, to_address, block_number DESC, transaction_index DESC); - -CREATE INDEX internal_transactions_block_timestamp_index ON contract_internal_transactions (block_timestamp DESC); - -CREATE TABLE IF NOT EXISTS contracts ( - address BYTEA NOT NULL, - name VARCHAR, - contract_creator BYTEA, - creation_code BYTEA, - deployed_code BYTEA, - block_number BIGINT, - block_hash BYTEA, - block_timestamp TIMESTAMP WITHOUT TIME ZONE, - transaction_index INTEGER, - transaction_hash BYTEA, - official_website VARCHAR, - description VARCHAR, - email VARCHAR, - social_list JSONB, - is_verified BOOLEAN, - is_proxy BOOLEAN, - implementation_contract BYTEA, - verified_implementation_contract BYTEA, - proxy_standard VARCHAR, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - reorg BOOLEAN DEFAULT false, - PRIMARY KEY (address) -); - -CREATE TABLE IF NOT EXISTS erc1155_token_holders ( - token_address BYTEA NOT NULL, - wallet_address BYTEA NOT NULL, - token_id NUMERIC(78) NOT NULL, - balance_of NUMERIC(100), - latest_call_contract_time TIMESTAMP WITHOUT TIME ZONE, - block_number BIGINT, - block_timestamp TIMESTAMP WITHOUT TIME ZONE, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - reorg BOOLEAN DEFAULT false, - PRIMARY KEY (token_address, wallet_address, token_id) -); - -CREATE INDEX erc1155_token_holders_token_address_balance_of_index ON erc1155_token_holders (token_address, balance_of DESC); - -CREATE TABLE IF NOT EXISTS erc1155_token_id_details ( - address BYTEA NOT NULL, - token_id NUMERIC(78) NOT NULL, - token_supply NUMERIC(78), - token_uri VARCHAR, - token_uri_info JSONB, - block_number BIGINT, - block_timestamp TIMESTAMP WITHOUT TIME ZONE, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - reorg BOOLEAN DEFAULT false, - PRIMARY KEY (address, token_id) -); - -CREATE INDEX erc1155_detail_desc_address_id_index ON erc1155_token_id_details (address DESC, token_id); - -CREATE TABLE IF NOT EXISTS erc1155_token_transfers ( - transaction_hash BYTEA NOT NULL, - log_index INTEGER NOT NULL, - from_address BYTEA, - to_address BYTEA, - token_address BYTEA, - token_id NUMERIC(78), - value NUMERIC(100), - block_number BIGINT, - block_hash BYTEA, - block_timestamp TIMESTAMP WITHOUT TIME ZONE, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - reorg BOOLEAN DEFAULT false, - PRIMARY KEY (transaction_hash, log_index) -); - -CREATE INDEX erc1155_token_transfers_address_block_number_log_index_index ON erc1155_token_transfers (token_address, from_address, to_address, block_number DESC, log_index DESC); - -CREATE INDEX erc1155_token_transfers_block_timestamp_index ON erc1155_token_transfers (block_timestamp DESC); - -CREATE TABLE IF NOT EXISTS erc20_token_holders ( - token_address BYTEA NOT NULL, - wallet_address BYTEA NOT NULL, - balance_of NUMERIC(100), - block_number BIGINT, - block_timestamp TIMESTAMP WITHOUT TIME ZONE, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - reorg BOOLEAN DEFAULT false, - PRIMARY KEY (token_address, wallet_address) -); - -CREATE INDEX erc20_token_holders_token_address_balance_of_index ON erc20_token_holders (token_address, balance_of DESC); - -CREATE TABLE IF NOT EXISTS erc20_token_transfers ( - transaction_hash BYTEA NOT NULL, - log_index INTEGER NOT NULL, - from_address BYTEA, - to_address BYTEA, - token_address BYTEA, - value NUMERIC(100), - block_number BIGINT, - block_hash BYTEA, - block_timestamp TIMESTAMP WITHOUT TIME ZONE, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - reorg BOOLEAN DEFAULT false, - PRIMARY KEY (transaction_hash, log_index) -); - -CREATE INDEX erc20_token_transfers_address_block_number_log_index_index ON erc20_token_transfers (token_address, from_address, to_address, block_number DESC, log_index DESC); - -CREATE INDEX erc20_token_transfers_block_timestamp_index ON erc20_token_transfers (block_timestamp DESC); - -CREATE TABLE IF NOT EXISTS erc721_token_holders ( - token_address BYTEA NOT NULL, - wallet_address BYTEA NOT NULL, - balance_of NUMERIC(100), - block_number BIGINT, - block_timestamp TIMESTAMP WITHOUT TIME ZONE, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - reorg BOOLEAN DEFAULT false, - PRIMARY KEY (token_address, wallet_address) -); - -CREATE INDEX erc721_token_holders_token_address_balance_of_index ON erc721_token_holders (token_address, balance_of DESC); - -CREATE TABLE IF NOT EXISTS erc721_token_id_changes ( - address BYTEA NOT NULL, - token_id NUMERIC(78) NOT NULL, - token_owner BYTEA, - block_number BIGINT NOT NULL, - block_timestamp TIMESTAMP WITHOUT TIME ZONE, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - reorg BOOLEAN DEFAULT false, - PRIMARY KEY (address, token_id, block_number) -); - -CREATE INDEX erc721_change_address_id_number_desc_index ON erc721_token_id_changes (address, token_id, block_number DESC); - -CREATE TABLE IF NOT EXISTS erc721_token_id_details ( - address BYTEA NOT NULL, - token_id NUMERIC(78) NOT NULL, - token_owner BYTEA, - token_uri VARCHAR, - token_uri_info JSONB, - block_number BIGINT, - block_timestamp TIMESTAMP WITHOUT TIME ZONE, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - reorg BOOLEAN DEFAULT false, - PRIMARY KEY (address, token_id) -); - -CREATE INDEX erc721_detail_owner_address_id_index ON erc721_token_id_details (token_owner DESC, address, token_id); - -CREATE TABLE IF NOT EXISTS erc721_token_transfers ( - transaction_hash BYTEA NOT NULL, - log_index INTEGER NOT NULL, - from_address BYTEA, - to_address BYTEA, - token_address BYTEA, - token_id NUMERIC(78), - token_uri JSONB, - block_number BIGINT, - block_hash BYTEA, - block_timestamp TIMESTAMP WITHOUT TIME ZONE, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - reorg BOOLEAN DEFAULT false, - PRIMARY KEY (transaction_hash, log_index) -); - -CREATE INDEX erc721_token_transfers_address_block_number_log_index_index ON erc721_token_transfers (token_address, from_address, to_address, block_number DESC, log_index DESC); - -CREATE INDEX erc721_token_transfers_block_timestamp_index ON erc721_token_transfers (block_timestamp DESC); - -CREATE TABLE IF NOT EXISTS fix_record ( - job_id SERIAL NOT NULL, - start_block_number BIGINT, - last_fixed_block_number BIGINT, - remain_process INTEGER, - job_status VARCHAR, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (job_id) -); - -CREATE TABLE IF NOT EXISTS logs ( - log_index INTEGER NOT NULL, - address BYTEA, - data BYTEA, - topic0 BYTEA, - topic1 BYTEA, - topic2 BYTEA, - topic3 BYTEA, - transaction_hash BYTEA NOT NULL, - transaction_index INTEGER, - block_number BIGINT, - block_hash BYTEA, - block_timestamp TIMESTAMP WITHOUT TIME ZONE, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - reorg BOOLEAN DEFAULT false, - PRIMARY KEY (log_index, transaction_hash) -); - -CREATE INDEX logs_address_block_number_log_index_index ON logs (address, block_number DESC, log_index DESC); - -CREATE INDEX logs_block_timestamp_index ON logs (block_timestamp DESC); - -CREATE TABLE IF NOT EXISTS sync_record ( - mission_type VARCHAR NOT NULL, - entity_types INTEGER NOT NULL, - last_block_number BIGINT, - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (mission_type, entity_types) -); - -CREATE TABLE IF NOT EXISTS tokens ( - address BYTEA NOT NULL, - name VARCHAR, - symbol VARCHAR, - total_supply NUMERIC(100), - decimals NUMERIC(100), - token_type VARCHAR, - holder_count INTEGER, - transfer_count INTEGER, - icon_url VARCHAR, - urls JSONB, - volume_24h NUMERIC(38, 2), - price NUMERIC(38, 6), - previous_price NUMERIC(38, 6), - market_cap NUMERIC(38, 2), - on_chain_market_cap NUMERIC(38, 2), - is_verified BOOLEAN, - cmc_id INTEGER, - cmc_slug VARCHAR, - gecko_id VARCHAR, - description VARCHAR, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (address) -); - -CREATE INDEX tokens_symbol_index ON tokens (symbol); - -CREATE INDEX tokens_type_index ON tokens (token_type); - -CREATE TABLE IF NOT EXISTS traces ( - trace_id VARCHAR NOT NULL, - from_address BYTEA, - to_address BYTEA, - value NUMERIC(100), - input BYTEA, - output BYTEA, - trace_type VARCHAR, - call_type VARCHAR, - gas NUMERIC(100), - gas_used NUMERIC(100), - subtraces INTEGER, - trace_address INTEGER[], - error TEXT, - status INTEGER, - block_number BIGINT, - block_hash BYTEA, - block_timestamp TIMESTAMP WITHOUT TIME ZONE, - transaction_index INTEGER, - transaction_hash BYTEA, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - reorg BOOLEAN DEFAULT false, - PRIMARY KEY (trace_id) -); - -CREATE INDEX traces_address_block_timestamp_index ON traces (from_address, to_address, block_timestamp DESC); - -CREATE INDEX traces_transaction_hash_index ON traces (transaction_hash); - -CREATE TABLE IF NOT EXISTS transactions ( - hash BYTEA NOT NULL, - transaction_index INTEGER, - from_address BYTEA, - to_address BYTEA, - value NUMERIC(100), - transaction_type INTEGER, - input BYTEA, - nonce INTEGER, - block_hash BYTEA, - block_number BIGINT, - block_timestamp TIMESTAMP WITHOUT TIME ZONE, - gas NUMERIC(100), - gas_price NUMERIC(100), - max_fee_per_gas NUMERIC(100), - max_priority_fee_per_gas NUMERIC(100), - receipt_root BYTEA, - receipt_status INTEGER, - receipt_gas_used NUMERIC(100), - receipt_cumulative_gas_used NUMERIC(100), - receipt_effective_gas_price NUMERIC(100), - receipt_l1_fee NUMERIC(100), - receipt_l1_fee_scalar NUMERIC(100, 18), - receipt_l1_gas_used NUMERIC(100), - receipt_l1_gas_price NUMERIC(100), - receipt_blob_gas_used NUMERIC(100), - receipt_blob_gas_price NUMERIC(100), - blob_versioned_hashes BYTEA[], - receipt_contract_address BYTEA, - exist_error BOOLEAN, - error TEXT, - revert_reason TEXT, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - reorg BOOLEAN DEFAULT false, - PRIMARY KEY (hash) -); - -CREATE INDEX transactions_address_block_number_transaction_idx ON transactions (from_address, to_address, block_number DESC, transaction_index DESC); - -CREATE INDEX transactions_block_timestamp_block_number_index ON transactions (block_timestamp DESC, block_number DESC); - -INSERT INTO alembic_version (version_num) VALUES ('5e4608933f64') RETURNING alembic_version.version_num; - -COMMIT; \ No newline at end of file diff --git a/hemera/migrations/manual_versions/20240708_tokens_table_add_column_block_number.sql b/hemera/migrations/manual_versions/20240708_tokens_table_add_column_block_number.sql deleted file mode 100644 index f7601c1e6..000000000 --- a/hemera/migrations/manual_versions/20240708_tokens_table_add_column_block_number.sql +++ /dev/null @@ -1,9 +0,0 @@ -BEGIN; - --- Running upgrade 5e4608933f64 -> 8a915490914a - -ALTER TABLE tokens ADD COLUMN block_number BIGINT; - -UPDATE alembic_version SET version_num='8a915490914a' WHERE alembic_version.version_num = '5e4608933f64'; - -COMMIT; \ No newline at end of file diff --git a/hemera/migrations/manual_versions/20240716_add_api_server_table.sql b/hemera/migrations/manual_versions/20240716_add_api_server_table.sql deleted file mode 100644 index 6e4431ecc..000000000 --- a/hemera/migrations/manual_versions/20240716_add_api_server_table.sql +++ /dev/null @@ -1,115 +0,0 @@ -BEGIN; - --- Running upgrade 8a915490914a -> b15f744e8582 - -CREATE TABLE IF NOT EXISTS daily_addresses_aggregates ( - block_date DATE NOT NULL, - active_address_cnt BIGINT, - receiver_address_cnt BIGINT, - sender_address_cnt BIGINT, - total_address_cnt BIGINT, - new_address_cnt BIGINT, - PRIMARY KEY (block_date) -); - -CREATE TABLE IF NOT EXISTS daily_blocks_aggregates ( - block_date DATE NOT NULL, - cnt BIGINT, - avg_size NUMERIC, - avg_gas_limit NUMERIC, - avg_gas_used NUMERIC, - total_gas_used BIGINT, - avg_gas_used_percentage NUMERIC, - avg_txn_cnt NUMERIC, - total_cnt BIGINT, - block_interval NUMERIC, - PRIMARY KEY (block_date) -); - -CREATE TABLE IF NOT EXISTS daily_tokens_aggregates ( - block_date DATE NOT NULL, - erc20_active_address_cnt INTEGER, - erc20_total_transfer_cnt BIGINT, - erc721_active_address_cnt INTEGER, - erc721_total_transfer_cnt BIGINT, - erc1155_active_address_cnt INTEGER, - erc1155_total_transfer_cnt BIGINT, - PRIMARY KEY (block_date) -); - -CREATE TABLE IF NOT EXISTS daily_transactions_aggregates ( - block_date DATE NOT NULL, - cnt BIGINT, - total_cnt BIGINT, - txn_error_cnt BIGINT, - avg_transaction_fee NUMERIC, - avg_gas_price NUMERIC, - max_gas_price NUMERIC, - min_gas_price NUMERIC, - avg_receipt_l1_fee NUMERIC, - max_receipt_l1_fee NUMERIC, - min_receipt_l1_fee NUMERIC, - avg_receipt_l1_gas_used NUMERIC, - max_receipt_l1_gas_used NUMERIC, - min_receipt_l1_gas_used NUMERIC, - avg_receipt_l1_gas_price NUMERIC, - max_receipt_l1_gas_price NUMERIC, - min_receipt_l1_gas_price NUMERIC, - avg_receipt_l1_fee_scalar NUMERIC, - max_receipt_l1_fee_scalar NUMERIC, - min_receipt_l1_fee_scalar NUMERIC, - PRIMARY KEY (block_date) -); - -CREATE TABLE IF NOT EXISTS scheduled_token_count_metadata ( - id SERIAL NOT NULL, - dag_id VARCHAR, - execution_date TIMESTAMP WITHOUT TIME ZONE, - last_data_timestamp TIMESTAMP WITHOUT TIME ZONE, - PRIMARY KEY (id) -); - -CREATE TABLE IF NOT EXISTS scheduled_wallet_count_metadata ( - id SERIAL NOT NULL, - dag_id VARCHAR, - execution_date TIMESTAMP WITHOUT TIME ZONE, - last_data_timestamp TIMESTAMP WITHOUT TIME ZONE, - PRIMARY KEY (id) -); - -CREATE TABLE IF NOT EXISTS statistics_wallet_addresses ( - address BYTEA NOT NULL, - txn_in_cnt INTEGER, - txn_out_cnt INTEGER, - txn_in_value NUMERIC(78), - txn_out_value NUMERIC(78), - internal_txn_in_cnt INTEGER, - internal_txn_out_cnt INTEGER, - internal_txn_in_value NUMERIC(78), - internal_txn_out_value NUMERIC(78), - erc20_transfer_in_cnt INTEGER, - erc721_transfer_in_cnt INTEGER, - erc1155_transfer_in_cnt INTEGER, - erc20_transfer_out_cnt INTEGER, - erc721_transfer_out_cnt INTEGER, - erc1155_transfer_out_cnt INTEGER, - txn_cnt INTEGER, - internal_txn_cnt INTEGER, - erc20_transfer_cnt INTEGER, - erc721_transfer_cnt INTEGER, - erc1155_transfer_cnt INTEGER, - deposit_cnt INTEGER, - withdraw_cnt INTEGER, - tag VARCHAR, - PRIMARY KEY (address) -); - -CREATE TABLE IF NOT EXISTS wallet_addresses ( - address BYTEA NOT NULL, - ens_name VARCHAR, - PRIMARY KEY (address) -); - -UPDATE alembic_version SET version_num='b15f744e8582' WHERE alembic_version.version_num = '8a915490914a'; - -COMMIT; \ No newline at end of file diff --git a/hemera/migrations/manual_versions/20240725_update_index_table_optimize.sql b/hemera/migrations/manual_versions/20240725_update_index_table_optimize.sql deleted file mode 100644 index f528c2e94..000000000 --- a/hemera/migrations/manual_versions/20240725_update_index_table_optimize.sql +++ /dev/null @@ -1,196 +0,0 @@ -BEGIN; - --- Running upgrade b15f744e8582 -> 9f2cf385645f - -CREATE TABLE IF NOT EXISTS address_current_token_balances ( - address BYTEA NOT NULL, - token_id NUMERIC(78), - token_type VARCHAR, - token_address BYTEA NOT NULL, - balance NUMERIC(100), - block_number BIGINT, - block_timestamp TIMESTAMP WITHOUT TIME ZONE, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - reorg BOOLEAN DEFAULT false, - PRIMARY KEY (address, token_address, token_id) -); - -CREATE INDEX current_token_balances_token_address_balance_of_index ON address_current_token_balances (token_address, balance DESC); - -CREATE INDEX current_token_balances_token_address_id_balance_of_index ON address_current_token_balances (token_address, token_id, balance DESC); - -DROP INDEX erc721_token_holders_token_address_balance_of_index; - -DROP TABLE erc721_token_holders; - -DROP INDEX erc20_token_holders_token_address_balance_of_index; - -DROP TABLE erc20_token_holders; - -DROP TABLE wallet_addresses; - -DROP INDEX erc1155_token_holders_token_address_balance_of_index; - -DROP TABLE erc1155_token_holders; - -CREATE INDEX coin_balance_address_number_desc_index ON address_coin_balances (address DESC, block_number DESC); - -CREATE INDEX token_balance_address_id_number_index ON address_token_balances (address, token_address, token_id DESC, block_number DESC); - -ALTER TABLE blocks ADD COLUMN blob_gas_used NUMERIC(100); - -ALTER TABLE blocks ADD COLUMN excess_blob_gas NUMERIC(100); - -ALTER TABLE blocks ADD COLUMN traces_count BIGINT; - -ALTER TABLE blocks ADD COLUMN internal_transactions_count BIGINT; - -CREATE UNIQUE INDEX blocks_hash_unique_when_not_reorg ON blocks (hash) WHERE reorg = false; - -CREATE UNIQUE INDEX blocks_number_unique_when_not_reorg ON blocks (number) WHERE reorg = false; - -DROP INDEX internal_transactions_address_number_transaction_index; - -DROP INDEX internal_transactions_block_timestamp_index; - -CREATE INDEX internal_transactions_block_number_index ON contract_internal_transactions (block_number DESC); - -CREATE INDEX internal_transactions_from_address_number_transaction_index ON contract_internal_transactions (from_address, block_number DESC, transaction_index DESC); - -CREATE INDEX internal_transactions_number_transaction_index ON contract_internal_transactions (block_number DESC, transaction_index DESC); - -CREATE INDEX internal_transactions_to_address_number_transaction_index ON contract_internal_transactions (to_address, block_number DESC, transaction_index DESC); - -DROP INDEX erc1155_detail_desc_address_id_index; - -ALTER TABLE erc1155_token_id_details DROP CONSTRAINT erc1155_token_id_details_pkey; - -ALTER TABLE erc1155_token_id_details RENAME address TO token_address; - -CREATE INDEX erc1155_detail_desc_address_id_index ON erc1155_token_id_details (token_address DESC, token_id); - -ALTER TABLE erc1155_token_id_details ADD CONSTRAINT erc1155_token_id_details_pkey PRIMARY KEY (token_address, token_id); - -ALTER TABLE erc1155_token_transfers ALTER COLUMN token_id SET NOT NULL; - -ALTER TABLE erc1155_token_transfers ALTER COLUMN block_hash SET NOT NULL; - -DROP INDEX erc1155_token_transfers_address_block_number_log_index_index; - -DROP INDEX erc1155_token_transfers_block_timestamp_index; - -CREATE INDEX erc1155_token_transfers_from_address_number_log_index_index ON erc1155_token_transfers (from_address, block_number DESC, log_index DESC); - -CREATE INDEX erc1155_token_transfers_number_log_index ON erc1155_token_transfers (block_number DESC, log_index DESC); - -CREATE INDEX erc1155_token_transfers_to_address_number_log_index_index ON erc1155_token_transfers (to_address, block_number DESC, log_index DESC); - -CREATE INDEX erc1155_token_transfers_token_address_from_index ON erc1155_token_transfers (token_address, from_address); - -CREATE INDEX erc1155_token_transfers_token_address_id_index ON erc1155_token_transfers (token_address, token_id); - -CREATE INDEX erc1155_token_transfers_token_address_number_log_index_index ON erc1155_token_transfers (token_address, block_number DESC, log_index DESC); - -CREATE INDEX erc1155_token_transfers_token_address_to_index ON erc1155_token_transfers (token_address, to_address); - -ALTER TABLE erc1155_token_transfers DROP CONSTRAINT erc1155_token_transfers_pkey; - -ALTER TABLE erc1155_token_transfers ADD CONSTRAINT erc1155_token_transfers_pkey PRIMARY KEY (transaction_hash, block_hash, log_index, token_id); - -ALTER TABLE erc20_token_transfers ALTER COLUMN block_hash SET NOT NULL; - -DROP INDEX erc20_token_transfers_address_block_number_log_index_index; - -DROP INDEX erc20_token_transfers_block_timestamp_index; - -CREATE INDEX erc20_token_transfers_from_address_number_log_index_index ON erc20_token_transfers (from_address, block_number DESC, log_index DESC); - -CREATE INDEX erc20_token_transfers_number_log_index ON erc20_token_transfers (block_number DESC, log_index DESC); - -CREATE INDEX erc20_token_transfers_to_address_number_log_index_index ON erc20_token_transfers (to_address, block_number DESC, log_index DESC); - -CREATE INDEX erc20_token_transfers_token_address_from_index_index ON erc20_token_transfers (token_address, from_address); - -CREATE INDEX erc20_token_transfers_token_address_number_log_index_index ON erc20_token_transfers (token_address, block_number DESC, log_index DESC); - -CREATE INDEX erc20_token_transfers_token_address_to_index_index ON erc20_token_transfers (token_address, to_address); - -DROP INDEX erc721_change_address_id_number_desc_index; - -ALTER TABLE erc721_token_id_changes DROP CONSTRAINT erc721_token_id_changes_pkey; - -ALTER TABLE erc721_token_id_changes RENAME address TO token_address; - -CREATE INDEX erc721_change_address_id_number_desc_index ON erc721_token_id_changes (token_address, token_id, block_number DESC); - -ALTER TABLE erc721_token_id_changes ADD CONSTRAINT erc721_token_id_changes_pkey PRIMARY KEY (token_address, token_id, block_number); - -DROP INDEX erc721_detail_owner_address_id_index; - -ALTER TABLE erc721_token_id_details DROP CONSTRAINT erc721_token_id_details_pkey; - -ALTER TABLE erc721_token_id_details RENAME address TO token_address; - -CREATE INDEX erc721_detail_owner_address_id_index ON erc721_token_id_details (token_owner DESC, token_address, token_id); - -ALTER TABLE erc721_token_id_details ADD CONSTRAINT erc721_token_id_details_pkey PRIMARY KEY (token_address, token_id); - -ALTER TABLE erc721_token_transfers ALTER COLUMN block_hash SET NOT NULL; - -DROP INDEX erc721_token_transfers_address_block_number_log_index_index; - -CREATE INDEX erc721_token_transfers_from_address_number_log_index_index ON erc721_token_transfers (from_address, block_number DESC, log_index DESC); - -CREATE INDEX erc721_token_transfers_number_log_index ON erc721_token_transfers (block_number DESC, log_index DESC); - -CREATE INDEX erc721_token_transfers_to_address_number_log_index_index ON erc721_token_transfers (to_address, block_number DESC, log_index DESC); - -CREATE INDEX erc721_token_transfers_token_address_from_index ON erc721_token_transfers (token_address, from_address); - -CREATE INDEX erc721_token_transfers_token_address_id_index ON erc721_token_transfers (token_address, token_id); - -CREATE INDEX erc721_token_transfers_token_address_number_log_index_index ON erc721_token_transfers (token_address, block_number DESC, log_index DESC); - -CREATE INDEX erc721_token_transfers_token_address_to_index ON erc721_token_transfers (token_address, to_address); - -ALTER TABLE erc721_token_transfers DROP COLUMN token_uri; - -ALTER TABLE logs ALTER COLUMN block_hash SET NOT NULL; - -CREATE INDEX logs_address_topic_0_number_log_index_index ON logs (address, topic0, block_number DESC, log_index DESC); - -CREATE INDEX logs_block_number_log_index_index ON logs (block_number DESC, log_index DESC); - -CREATE INDEX tokens_name_index ON tokens (name); - -CREATE INDEX tokens_type_holders_index ON tokens (token_type, holder_count DESC); - -CREATE INDEX tokens_type_on_chain_market_cap_index ON tokens (token_type, on_chain_market_cap DESC); - -DROP INDEX traces_address_block_timestamp_index; - -CREATE INDEX traces_block_number_index ON traces (block_number DESC); - -CREATE INDEX traces_from_address_block_number_index ON traces (from_address, block_number DESC); - -CREATE INDEX traces_to_address_block_number_index ON traces (to_address, block_number DESC); - -ALTER TABLE transactions ADD COLUMN method_id VARCHAR GENERATED ALWAYS AS (substr(input :: pg_catalog.varchar, 3, 8)) STORED; - -DROP INDEX transactions_address_block_number_transaction_idx; - -DROP INDEX transactions_block_timestamp_block_number_index; - -CREATE INDEX transactions_block_number_transaction_index ON transactions (block_number DESC, transaction_index DESC); - -CREATE INDEX transactions_block_timestamp_index ON transactions (block_timestamp); - -CREATE INDEX transactions_from_address_block_number_transaction_idx ON transactions (from_address ASC, block_number DESC, transaction_index DESC); - -CREATE INDEX transactions_to_address_block_number_transaction_idx ON transactions (to_address ASC, block_number DESC, transaction_index DESC); - -UPDATE alembic_version SET version_num='9f2cf385645f' WHERE alembic_version.version_num = 'b15f744e8582'; - - -COMMIT; \ No newline at end of file diff --git a/hemera/migrations/manual_versions/20240726_modify_sync_record_table.sql b/hemera/migrations/manual_versions/20240726_modify_sync_record_table.sql deleted file mode 100644 index 67a20240d..000000000 --- a/hemera/migrations/manual_versions/20240726_modify_sync_record_table.sql +++ /dev/null @@ -1,16 +0,0 @@ -BEGIN; - --- Running upgrade 9f2cf385645f -> 0b922153e040 - -DROP TABLE sync_record; - -CREATE TABLE IF NOT EXISTS sync_record ( - mission_sign VARCHAR NOT NULL, - last_block_number BIGINT, - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (mission_sign) -); - -UPDATE alembic_version SET version_num='0b922153e040' WHERE alembic_version.version_num = '9f2cf385645f'; - -COMMIT; \ No newline at end of file diff --git a/hemera/migrations/manual_versions/20240731_add_feature_records_and_uniswap_v3_.sql b/hemera/migrations/manual_versions/20240731_add_feature_records_and_uniswap_v3_.sql deleted file mode 100644 index 122eac04c..000000000 --- a/hemera/migrations/manual_versions/20240731_add_feature_records_and_uniswap_v3_.sql +++ /dev/null @@ -1,47 +0,0 @@ -BEGIN; - --- Running upgrade 0b922153e040 -> 3d5ce8939570 - -CREATE TABLE IF NOT EXISTS all_feature_value_records ( - feature_id NUMERIC(100) NOT NULL, - block_number BIGINT NOT NULL, - address BYTEA NOT NULL, - value JSONB, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (block_number, feature_id, address) -); - -CREATE INDEX all_feature_value_records_feature_block_index ON all_feature_value_records (feature_id, block_number DESC); - -CREATE TABLE IF NOT EXISTS feature_uniswap_v3_pools ( - nft_address BYTEA NOT NULL, - pool_address BYTEA NOT NULL, - token0_address BYTEA, - token1_address BYTEA, - fee NUMERIC(100), - tick_spacing NUMERIC(100), - mint_block_number BIGINT, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (nft_address, pool_address) -); - -CREATE TABLE IF NOT EXISTS feature_uniswap_v3_tokens ( - nft_address BYTEA NOT NULL, - token_id NUMERIC(100) NOT NULL, - pool_address BYTEA, - tick_lower NUMERIC(100), - tick_upper NUMERIC(100), - fee NUMERIC(100), - mint_block_number BIGINT, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (nft_address, token_id) -); - -CREATE INDEX feature_uniswap_v3_tokens_nft_index ON feature_uniswap_v3_tokens (nft_address); - -UPDATE alembic_version SET version_num='3d5ce8939570' WHERE alembic_version.version_num = '0b922153e040'; - -COMMIT; \ No newline at end of file diff --git a/hemera/migrations/manual_versions/20240731_add_user_ops_table.sql b/hemera/migrations/manual_versions/20240731_add_user_ops_table.sql deleted file mode 100644 index 1acc9ccc6..000000000 --- a/hemera/migrations/manual_versions/20240731_add_user_ops_table.sql +++ /dev/null @@ -1,36 +0,0 @@ -BEGIN; - --- Running upgrade 3d5ce8939570 -> 9a1e927f02bb - -CREATE TABLE IF NOT EXISTS user_operations_results ( - user_op_hash BYTEA NOT NULL, - sender VARCHAR(42), - paymaster VARCHAR(42), - nonce NUMERIC, - status BOOLEAN, - actual_gas_cost NUMERIC, - actual_gas_used NUMERIC, - init_code BYTEA, - call_data BYTEA, - call_gas_limit NUMERIC, - verification_gas_limit NUMERIC, - pre_verification_gas NUMERIC, - max_fee_per_gas NUMERIC, - max_priority_fee_per_gas NUMERIC, - paymaster_and_data BYTEA, - signature BYTEA, - transactions_hash BYTEA, - transactions_index INTEGER, - block_number BIGINT, - block_timestamp TIMESTAMP WITHOUT TIME ZONE, - bundler VARCHAR(42), - start_log_index INTEGER, - end_log_index INTEGER, - PRIMARY KEY (user_op_hash) -); - -CREATE INDEX transactions_hash_index ON user_operations_results (transactions_hash); - -UPDATE alembic_version SET version_num='9a1e927f02bb' WHERE alembic_version.version_num = '3d5ce8939570'; - -COMMIT; \ No newline at end of file diff --git a/hemera/migrations/manual_versions/20240802_add_exception_recorder_table.sql b/hemera/migrations/manual_versions/20240802_add_exception_recorder_table.sql deleted file mode 100644 index 6344957ef..000000000 --- a/hemera/migrations/manual_versions/20240802_add_exception_recorder_table.sql +++ /dev/null @@ -1,19 +0,0 @@ -BEGIN; - --- Running upgrade 9a1e927f02bb -> 040e5251f45d - -CREATE TABLE IF NOT EXISTS exception_records ( - id BIGSERIAL NOT NULL, - block_number BIGINT, - dataclass VARCHAR, - level VARCHAR, - message_type VARCHAR, - message VARCHAR, - exception_env JSONB, - record_time TIMESTAMP WITHOUT TIME ZONE, - PRIMARY KEY (id) -); - -UPDATE alembic_version SET version_num='040e5251f45d' WHERE alembic_version.version_num = '9a1e927f02bb'; - -COMMIT; \ No newline at end of file diff --git a/hemera/migrations/manual_versions/20240802_add_l2_chain_table.sql b/hemera/migrations/manual_versions/20240802_add_l2_chain_table.sql deleted file mode 100644 index e096e8f03..000000000 --- a/hemera/migrations/manual_versions/20240802_add_l2_chain_table.sql +++ /dev/null @@ -1,249 +0,0 @@ -BEGIN; - --- Running upgrade 040e5251f45d -> e3a3e2114b9c - -CREATE TABLE IF NOT EXISTS arbitrum_state_batches ( - node_num SERIAL NOT NULL, - create_l1_block_number INTEGER, - create_l1_block_timestamp TIMESTAMP WITHOUT TIME ZONE, - create_l1_block_hash VARCHAR, - create_l1_transaction_hash VARCHAR, - l1_block_number INTEGER, - l1_block_timestamp TIMESTAMP WITHOUT TIME ZONE, - l1_block_hash VARCHAR, - l1_transaction_hash VARCHAR, - parent_node_hash VARCHAR, - node_hash VARCHAR, - block_hash VARCHAR, - send_root VARCHAR, - start_block_number INTEGER, - end_block_number INTEGER, - transaction_count INTEGER, - block_count INTEGER, - PRIMARY KEY (node_num) -); - -CREATE TABLE IF NOT EXISTS arbitrum_transaction_batches ( - batch_index SERIAL NOT NULL, - l1_block_number INTEGER, - l1_block_timestamp TIMESTAMP WITHOUT TIME ZONE, - l1_block_hash VARCHAR, - l1_transaction_hash VARCHAR, - batch_root VARCHAR, - start_block_number INTEGER, - end_block_number INTEGER, - transaction_count INTEGER, - block_count INTEGER, - PRIMARY KEY (batch_index) -); - -CREATE TABLE IF NOT EXISTS bridge_tokens ( - l1_token_address BYTEA NOT NULL, - l2_token_address BYTEA NOT NULL, - PRIMARY KEY (l1_token_address, l2_token_address) -); - -CREATE TABLE IF NOT EXISTS data_store_tx_mapping ( - data_store_id INTEGER NOT NULL, - index INTEGER NOT NULL, - block_number INTEGER, - transaction_hash VARCHAR, - PRIMARY KEY (data_store_id, index) -); - -CREATE TABLE IF NOT EXISTS data_stores ( - id SERIAL NOT NULL, - store_number INTEGER, - duration_data_store_id INTEGER, - index INTEGER, - data_commitment VARCHAR, - msg_hash VARCHAR, - init_time TIMESTAMP WITHOUT TIME ZONE, - expire_time TIMESTAMP WITHOUT TIME ZONE, - duration INTEGER, - store_period_length INTEGER, - fee INTEGER, - confirmer VARCHAR, - header VARCHAR, - init_tx_hash VARCHAR, - init_gas_used INTEGER, - init_block_number INTEGER, - confirmed BOOLEAN, - signatory_record VARCHAR, - confirm_tx_hash VARCHAR, - confirm_gas_used INTEGER, - batch_index INTEGER, - tx_count INTEGER, - block_count INTEGER, - PRIMARY KEY (id) -); - -CREATE TABLE IF NOT EXISTS l1_state_batches ( - batch_index SERIAL NOT NULL, - previous_total_elements INTEGER, - batch_size INTEGER, - l1_block_number INTEGER, - l1_block_timestamp TIMESTAMP WITHOUT TIME ZONE, - l1_block_hash VARCHAR, - l1_transaction_hash VARCHAR, - extra_data VARCHAR, - batch_root VARCHAR, - PRIMARY KEY (batch_index) -); - -CREATE TABLE IF NOT EXISTS l1_to_l2_bridge_transactions ( - msg_hash BYTEA NOT NULL, - version INTEGER, - index INTEGER, - l1_block_number INTEGER, - l1_block_timestamp TIMESTAMP WITHOUT TIME ZONE, - l1_block_hash BYTEA, - l1_transaction_hash BYTEA, - l1_from_address BYTEA, - l1_to_address BYTEA, - l2_block_number INTEGER, - l2_block_timestamp TIMESTAMP WITHOUT TIME ZONE, - l2_block_hash BYTEA, - l2_transaction_hash BYTEA, - l2_from_address BYTEA, - l2_to_address BYTEA, - amount NUMERIC(78), - from_address BYTEA, - to_address BYTEA, - l1_token_address BYTEA, - l2_token_address BYTEA, - extra_info JSON, - _type INTEGER, - sender BYTEA, - target BYTEA, - data BYTEA, - PRIMARY KEY (msg_hash) -); - -CREATE TABLE IF NOT EXISTS l2_to_l1_bridge_transactions ( - msg_hash BYTEA NOT NULL, - version INTEGER, - index INTEGER, - l2_block_number INTEGER, - l2_block_timestamp TIMESTAMP WITHOUT TIME ZONE, - l2_block_hash BYTEA, - l2_transaction_hash BYTEA, - l2_from_address BYTEA, - l2_to_address BYTEA, - l1_block_number INTEGER, - l1_block_timestamp TIMESTAMP WITHOUT TIME ZONE, - l1_block_hash BYTEA, - l1_transaction_hash BYTEA, - l1_from_address BYTEA, - l1_to_address BYTEA, - amount NUMERIC(78), - from_address BYTEA, - to_address BYTEA, - l1_token_address BYTEA, - l2_token_address BYTEA, - extra_info JSON, - _type INTEGER, - l1_proven_transaction_hash BYTEA, - l1_proven_block_number INTEGER, - l1_proven_block_timestamp TIMESTAMP WITHOUT TIME ZONE, - l1_proven_block_hash BYTEA, - l1_proven_from_address BYTEA, - l1_proven_to_address BYTEA, - PRIMARY KEY (msg_hash) -); - -CREATE TABLE IF NOT EXISTS linea_batches ( - number SERIAL NOT NULL, - verify_tx_hash VARCHAR, - verify_block_number INTEGER, - timestamp TIMESTAMP WITHOUT TIME ZONE, - blocks INTEGER[], - transactions VARCHAR[], - last_finalized_block_number INTEGER, - tx_count INTEGER, - block_count INTEGER, - PRIMARY KEY (number) -); - -CREATE TABLE IF NOT EXISTS mantle_batches ( - index SERIAL NOT NULL, - data_store_index INTEGER, - upgrade_data_store_id INTEGER, - data_store_id INTEGER, - status INTEGER, - confirm_at TIMESTAMP WITHOUT TIME ZONE, - PRIMARY KEY (index) -); - -CREATE TABLE IF NOT EXISTS op_bedrock_state_batches ( - batch_index SERIAL NOT NULL, - l1_block_number INTEGER, - l1_block_timestamp TIMESTAMP WITHOUT TIME ZONE, - l1_block_hash VARCHAR, - l1_transaction_hash VARCHAR, - start_block_number INTEGER, - end_block_number INTEGER, - batch_root VARCHAR, - transaction_count INTEGER, - block_count INTEGER, - PRIMARY KEY (batch_index) -); - -CREATE TABLE IF NOT EXISTS op_da_transactions ( - receipt_blob_gas_used INTEGER, - receipt_blob_gas_price NUMERIC, - blob_versioned_hashes VARCHAR[], - hash VARCHAR NOT NULL, - nonce INTEGER, - transaction_index INTEGER, - from_address VARCHAR, - to_address VARCHAR, - value NUMERIC, - gas INTEGER, - gas_price INTEGER, - input VARCHAR, - receipt_cumulative_gas_used INTEGER, - receipt_gas_used INTEGER, - receipt_contract_address VARCHAR, - receipt_root VARCHAR, - receipt_status INTEGER, - block_timestamp TIMESTAMP WITHOUT TIME ZONE, - block_number INTEGER, - block_hash VARCHAR, - max_fee_per_gas INTEGER, - max_priority_fee_per_gas INTEGER, - transaction_type INTEGER, - receipt_effective_gas_price INTEGER, - PRIMARY KEY (hash) -); - -CREATE TABLE IF NOT EXISTS zkevm_batches ( - batch_index SERIAL NOT NULL, - coinbase VARCHAR, - state_root VARCHAR, - global_exit_root VARCHAR, - mainnet_exit_root VARCHAR, - rollup_exit_root VARCHAR, - local_exit_root VARCHAR, - acc_input_hash VARCHAR, - timestamp TIMESTAMP WITHOUT TIME ZONE, - transactions VARCHAR[], - blocks INTEGER[], - start_block_number INTEGER, - end_block_number INTEGER, - block_count INTEGER, - transaction_count INTEGER, - sequence_batch_tx_hash VARCHAR, - sequence_batch_block_number INTEGER, - sequence_batch_block_timestamp TIMESTAMP WITHOUT TIME ZONE, - verify_batch_tx_hash VARCHAR, - verify_batch_block_number INTEGER, - verify_batch_block_timestamp TIMESTAMP WITHOUT TIME ZONE, - number INTEGER, - send_sequences_tx_hash VARCHAR, - PRIMARY KEY (batch_index) -); - -UPDATE alembic_version SET version_num='e3a3e2114b9c' WHERE alembic_version.version_num = '040e5251f45d'; - -COMMIT; \ No newline at end of file diff --git a/hemera/migrations/manual_versions/20240802_add_uniswap_v2_table.sql b/hemera/migrations/manual_versions/20240802_add_uniswap_v2_table.sql deleted file mode 100644 index 3d6efd16c..000000000 --- a/hemera/migrations/manual_versions/20240802_add_uniswap_v2_table.sql +++ /dev/null @@ -1,27 +0,0 @@ -BEGIN; - --- Running upgrade e3a3e2114b9c -> aa99dd347ef1 - -CREATE TABLE IF NOT EXISTS feature_uniswap_v2_pools ( - factory_address BYTEA NOT NULL, - pool_address BYTEA NOT NULL, - token0_address BYTEA, - token1_address BYTEA, - length NUMERIC(100), - called_block_number BIGINT, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (factory_address, pool_address) -); - -ALTER TABLE feature_uniswap_v3_pools ADD COLUMN called_block_number BIGINT; - -ALTER TABLE feature_uniswap_v3_pools DROP COLUMN mint_block_number; - -ALTER TABLE feature_uniswap_v3_tokens ADD COLUMN called_block_number BIGINT; - -ALTER TABLE feature_uniswap_v3_tokens DROP COLUMN mint_block_number; - -UPDATE alembic_version SET version_num='aa99dd347ef1' WHERE alembic_version.version_num = 'e3a3e2114b9c'; - -COMMIT; \ No newline at end of file diff --git a/hemera/migrations/manual_versions/20240805_add_column_to_contracts_table.sql b/hemera/migrations/manual_versions/20240805_add_column_to_contracts_table.sql deleted file mode 100644 index 03b4bb3ad..000000000 --- a/hemera/migrations/manual_versions/20240805_add_column_to_contracts_table.sql +++ /dev/null @@ -1,13 +0,0 @@ -BEGIN; - --- Running upgrade aa99dd347ef1 -> 832fa52da346 - -CREATE EXTENSION IF NOT EXISTS pgcrypto;; - -ALTER TABLE contracts ADD COLUMN deployed_code_hash VARCHAR GENERATED ALWAYS AS (encode(digest('0x'||encode(deployed_code, 'hex'), 'sha256'), 'hex')) STORED; - -ALTER TABLE contracts ADD COLUMN transaction_from_address BYTEA; - -UPDATE alembic_version SET version_num='832fa52da346' WHERE alembic_version.version_num = 'aa99dd347ef1'; - -COMMIT; \ No newline at end of file diff --git a/hemera/migrations/manual_versions/20240806_add_current_traits_activeness.sql b/hemera/migrations/manual_versions/20240806_add_current_traits_activeness.sql deleted file mode 100644 index 34bc9742c..000000000 --- a/hemera/migrations/manual_versions/20240806_add_current_traits_activeness.sql +++ /dev/null @@ -1,16 +0,0 @@ -BEGIN; - --- Running upgrade 832fa52da346 -> b86e241b5e18 - -CREATE TABLE IF NOT EXISTS current_traits_activeness ( - block_number BIGINT NOT NULL, - address BYTEA NOT NULL, - value JSONB, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (address) -); - -UPDATE alembic_version SET version_num='b86e241b5e18' WHERE alembic_version.version_num = '832fa52da346'; - -COMMIT; \ No newline at end of file diff --git a/hemera/migrations/manual_versions/20240808_add_blue_chip_holding.sql b/hemera/migrations/manual_versions/20240808_add_blue_chip_holding.sql deleted file mode 100644 index 0d293c78c..000000000 --- a/hemera/migrations/manual_versions/20240808_add_blue_chip_holding.sql +++ /dev/null @@ -1,18 +0,0 @@ -BEGIN; - --- Running upgrade b86e241b5e18 -> 1b1c6a8b6c7b - -CREATE TABLE IF NOT EXISTS feature_blue_chip_holders ( - wallet_address BYTEA NOT NULL, - hold_detail JSONB, - current_count BIGINT, - called_block_number BIGINT, - called_block_timestamp TIMESTAMP WITHOUT TIME ZONE, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (wallet_address) -); - -UPDATE alembic_version SET version_num='1b1c6a8b6c7b' WHERE alembic_version.version_num = 'b86e241b5e18'; - -COMMIT; \ No newline at end of file diff --git a/hemera/migrations/manual_versions/20240813_add_daily_wallet_address_tables.sql b/hemera/migrations/manual_versions/20240813_add_daily_wallet_address_tables.sql deleted file mode 100644 index 45fc53b6f..000000000 --- a/hemera/migrations/manual_versions/20240813_add_daily_wallet_address_tables.sql +++ /dev/null @@ -1,99 +0,0 @@ -BEGIN; - --- Running upgrade 1b1c6a8b6c7b -> bf51d23c852f - -CREATE TABLE IF NOT EXISTS daily_contract_interacted_aggregates ( - block_date DATE NOT NULL, - from_address BYTEA NOT NULL, - to_address BYTEA NOT NULL, - contract_interacted_cnt INTEGER, - PRIMARY KEY (block_date, from_address, to_address) -); - -CREATE TABLE IF NOT EXISTS daily_wallet_addresses_aggregates ( - address BYTEA NOT NULL, - block_date DATE NOT NULL, - txn_in_cnt INTEGER, - txn_out_cnt INTEGER, - txn_in_value NUMERIC(78), - txn_out_value NUMERIC(78), - internal_txn_in_cnt INTEGER, - internal_txn_out_cnt INTEGER, - internal_txn_in_value NUMERIC(78), - internal_txn_out_value NUMERIC(78), - erc20_transfer_in_cnt INTEGER, - erc721_transfer_in_cnt INTEGER, - erc1155_transfer_in_cnt INTEGER, - erc20_transfer_out_cnt INTEGER, - erc721_transfer_out_cnt INTEGER, - erc1155_transfer_out_cnt INTEGER, - internal_txn_cnt INTEGER GENERATED ALWAYS AS (internal_txn_in_cnt + internal_txn_out_cnt) STORED, - erc20_transfer_cnt INTEGER GENERATED ALWAYS AS (erc20_transfer_in_cnt + erc20_transfer_out_cnt) STORED, - erc721_transfer_cnt INTEGER GENERATED ALWAYS AS (erc721_transfer_in_cnt + erc721_transfer_out_cnt) STORED, - erc1155_transfer_cnt INTEGER GENERATED ALWAYS AS (erc1155_transfer_in_cnt + erc1155_transfer_out_cnt) STORED, - txn_self_cnt INTEGER, - txn_in_error_cnt INTEGER, - txn_out_error_cnt INTEGER, - txn_self_error_cnt INTEGER, - txn_cnt INTEGER GENERATED ALWAYS AS (((txn_in_cnt + txn_out_cnt) - txn_self_cnt)) STORED, - deposit_cnt INTEGER, - withdraw_cnt INTEGER, - gas_in_used NUMERIC(78), - l2_txn_in_fee NUMERIC(78), - l1_txn_in_fee NUMERIC(78), - txn_in_fee NUMERIC(78), - gas_out_used NUMERIC(78), - l2_txn_out_fee NUMERIC(78), - l1_txn_out_fee NUMERIC(78), - txn_out_fee NUMERIC(78), - contract_deployed_cnt INTEGER, - from_address_unique_interacted_cnt INTEGER, - to_address_unique_interacted_cnt INTEGER, - PRIMARY KEY (address, block_date) -); - -CREATE TABLE IF NOT EXISTS period_wallet_addresses_aggregates ( - address BYTEA NOT NULL, - period_date DATE NOT NULL, - txn_in_cnt INTEGER, - txn_out_cnt INTEGER, - txn_in_value NUMERIC(78), - txn_out_value NUMERIC(78), - internal_txn_in_cnt INTEGER, - internal_txn_out_cnt INTEGER, - internal_txn_in_value NUMERIC(78), - internal_txn_out_value NUMERIC(78), - erc20_transfer_in_cnt INTEGER, - erc721_transfer_in_cnt INTEGER, - erc1155_transfer_in_cnt INTEGER, - erc20_transfer_out_cnt INTEGER, - erc721_transfer_out_cnt INTEGER, - erc1155_transfer_out_cnt INTEGER, - internal_txn_cnt INTEGER GENERATED ALWAYS AS (internal_txn_in_cnt + internal_txn_out_cnt) STORED, - erc20_transfer_cnt INTEGER GENERATED ALWAYS AS (erc20_transfer_in_cnt + erc20_transfer_out_cnt) STORED, - erc721_transfer_cnt INTEGER GENERATED ALWAYS AS (erc721_transfer_in_cnt + erc721_transfer_out_cnt) STORED, - erc1155_transfer_cnt INTEGER GENERATED ALWAYS AS (erc1155_transfer_in_cnt + erc1155_transfer_out_cnt) STORED, - txn_self_cnt INTEGER NOT NULL, - txn_in_error_cnt INTEGER NOT NULL, - txn_out_error_cnt INTEGER NOT NULL, - txn_self_error_cnt INTEGER NOT NULL, - txn_cnt INTEGER GENERATED ALWAYS AS (((txn_in_cnt + txn_out_cnt) - txn_self_cnt)) STORED, - deposit_cnt INTEGER, - withdraw_cnt INTEGER, - gas_in_used NUMERIC(78), - l2_txn_in_fee NUMERIC(78), - l1_txn_in_fee NUMERIC(78), - txn_in_fee NUMERIC(78), - gas_out_used NUMERIC(78), - l2_txn_out_fee NUMERIC(78), - l1_txn_out_fee NUMERIC(78), - txn_out_fee NUMERIC(78), - contract_deployed_cnt INTEGER, - from_address_unique_interacted_cnt INTEGER, - to_address_unique_interacted_cnt INTEGER, - PRIMARY KEY (address, period_date) -); - -UPDATE alembic_version SET version_num='bf51d23c852f' WHERE alembic_version.version_num = '1b1c6a8b6c7b'; - -COMMIT; \ No newline at end of file diff --git a/hemera/migrations/manual_versions/20240827_add_token_price_table.sql b/hemera/migrations/manual_versions/20240827_add_token_price_table.sql deleted file mode 100644 index c079eac2b..000000000 --- a/hemera/migrations/manual_versions/20240827_add_token_price_table.sql +++ /dev/null @@ -1,20 +0,0 @@ -BEGIN; - --- Running upgrade bf51d23c852f -> 2359a28d63cb -CREATE TABLE IF NOT EXISTS IF NOT EXISTS token_hourly_prices( - symbol VARCHAR NOT NULL, - timestamp TIMESTAMP WITHOUT TIME ZONE NOT NULL, - price NUMERIC, - PRIMARY KEY (symbol, timestamp) -); - -CREATE TABLE IF NOT EXISTS IF NOT EXISTS token_prices( - symbol VARCHAR NOT NULL, - timestamp TIMESTAMP WITHOUT TIME ZONE NOT NULL, - price NUMERIC, - PRIMARY KEY (symbol, timestamp) -); - -UPDATE alembic_version SET version_num='2359a28d63cb' WHERE alembic_version.version_num = 'bf51d23c852f'; - -COMMIT; \ No newline at end of file diff --git a/hemera/migrations/manual_versions/20240830_add_address_token_deposit_table.sql b/hemera/migrations/manual_versions/20240830_add_address_token_deposit_table.sql deleted file mode 100644 index b84124dba..000000000 --- a/hemera/migrations/manual_versions/20240830_add_address_token_deposit_table.sql +++ /dev/null @@ -1,45 +0,0 @@ -BEGIN; - --- Running upgrade 2359a28d63cb -> 6c2eecd6316b - -CREATE TABLE IF NOT EXISTS af_token_deposits__transactions ( - transaction_hash BYTEA NOT NULL, - wallet_address BYTEA, - chain_id BIGINT, - contract_address BYTEA, - token_address BYTEA, - value NUMERIC(100), - block_number BIGINT, - block_timestamp TIMESTAMP WITHOUT TIME ZONE, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - reorg BOOLEAN DEFAULT false, - PRIMARY KEY (transaction_hash) -); - -CREATE INDEX af_deposits_transactions_block_number_index ON af_token_deposits__transactions (block_number DESC); - -CREATE INDEX af_deposits_transactions_chain_id_index ON af_token_deposits__transactions (chain_id); - -CREATE INDEX af_deposits_transactions_contract_address_index ON af_token_deposits__transactions (contract_address); - -CREATE INDEX af_deposits_transactions_token_address_index ON af_token_deposits__transactions (token_address); - -CREATE INDEX af_deposits_transactions_wallet_address_index ON af_token_deposits__transactions (wallet_address); - -CREATE TABLE IF NOT EXISTS af_token_deposits_current ( - wallet_address BYTEA NOT NULL, - chain_id BIGINT NOT NULL, - contract_address BYTEA NOT NULL, - token_address BYTEA NOT NULL, - value NUMERIC(100), - block_number BIGINT, - block_timestamp TIMESTAMP WITHOUT TIME ZONE, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (wallet_address, token_address, contract_address, chain_id) -); - -UPDATE alembic_version SET version_num='6c2eecd6316b' WHERE alembic_version.version_num = '2359a28d63cb'; - -COMMIT; \ No newline at end of file diff --git a/hemera/migrations/manual_versions/20240831_add_ens.sql b/hemera/migrations/manual_versions/20240831_add_ens.sql deleted file mode 100644 index c482df9b0..000000000 --- a/hemera/migrations/manual_versions/20240831_add_ens.sql +++ /dev/null @@ -1,72 +0,0 @@ -BEGIN; - --- Running upgrade 6c2eecd6316b -> 43d14640a8ac - -CREATE TABLE IF NOT EXISTS af_ens_address_current ( - address BYTEA NOT NULL, - name VARCHAR, - reverse_node BYTEA, - block_number BIGINT, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (address) -); - -CREATE TABLE IF NOT EXISTS af_ens_event ( - transaction_hash BYTEA NOT NULL, - transaction_index INTEGER NOT NULL, - log_index INTEGER NOT NULL, - block_number BIGINT, - block_hash BYTEA, - block_timestamp TIMESTAMP WITHOUT TIME ZONE, - method VARCHAR, - event_name VARCHAR, - topic0 VARCHAR, - from_address BYTEA, - to_address BYTEA, - base_node BYTEA, - node BYTEA, - label BYTEA, - name VARCHAR, - expires TIMESTAMP WITHOUT TIME ZONE, - owner BYTEA, - resolver BYTEA, - registrant BYTEA, - address BYTEA, - reverse_base_node BYTEA, - reverse_node BYTEA, - reverse_label BYTEA, - reverse_name VARCHAR, - token_id NUMERIC(100), - w_token_id NUMERIC(100), - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - reorg BOOLEAN DEFAULT false, - CONSTRAINT ens_tnx_log_index PRIMARY KEY (transaction_hash, log_index) -); - -CREATE INDEX ens_event_address ON af_ens_event (from_address); - -CREATE INDEX ens_idx_block_number_log_index ON af_ens_event (block_number, log_index DESC); - -CREATE TABLE IF NOT EXISTS af_ens_node_current ( - node BYTEA NOT NULL, - token_id NUMERIC(100), - w_token_id NUMERIC(100), - first_owned_by BYTEA, - name VARCHAR, - registration TIMESTAMP WITHOUT TIME ZONE, - expires TIMESTAMP WITHOUT TIME ZONE, - address BYTEA, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (node) -); - -CREATE INDEX ens_idx_address ON af_ens_node_current (address); - -CREATE INDEX ens_idx_name_md5 ON af_ens_node_current (md5(name)); - -UPDATE alembic_version SET version_num='43d14640a8ac' WHERE alembic_version.version_num = '6c2eecd6316b'; - -COMMIT; \ No newline at end of file diff --git a/hemera/migrations/manual_versions/20240906_add_uniswap_v3_enhance_table.sql b/hemera/migrations/manual_versions/20240906_add_uniswap_v3_enhance_table.sql deleted file mode 100644 index a53ae901a..000000000 --- a/hemera/migrations/manual_versions/20240906_add_uniswap_v3_enhance_table.sql +++ /dev/null @@ -1,186 +0,0 @@ -BEGIN; - --- Running upgrade 43d14640a8ac -> f4efa18760cc - -CREATE TABLE IF NOT EXISTS af_uniswap_v3_pool_prices_current ( - pool_address BYTEA NOT NULL, - block_number BIGINT, - block_timestamp BIGINT, - factory_address BYTEA, - sqrt_price_x96 NUMERIC(100), - tick NUMERIC(100), - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (pool_address) -); - -CREATE TABLE IF NOT EXISTS af_uniswap_v3_pool_prices_hist ( - pool_address BYTEA NOT NULL, - block_number BIGINT NOT NULL, - block_timestamp BIGINT NOT NULL, - sqrt_price_x96 NUMERIC(100), - tick NUMERIC(100), - factory_address BYTEA, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - reorg BOOLEAN DEFAULT false, - PRIMARY KEY (pool_address, block_timestamp, block_number) -); - -CREATE TABLE IF NOT EXISTS af_uniswap_v3_pool_swap_hist ( - pool_address BYTEA NOT NULL, - transaction_hash BYTEA NOT NULL, - log_index INTEGER NOT NULL, - block_number BIGINT, - block_timestamp BIGINT, - position_token_address BYTEA, - transaction_from_address BYTEA, - sender BYTEA, - recipient BYTEA, - liquidity NUMERIC(100), - tick NUMERIC(100), - sqrt_price_x96 NUMERIC(100), - amount0 NUMERIC(100), - amount1 NUMERIC(100), - token0_address BYTEA, - token1_address BYTEA, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - reorg BOOLEAN DEFAULT false, - PRIMARY KEY (pool_address, transaction_hash, log_index) -); - -CREATE TABLE IF NOT EXISTS af_uniswap_v3_pools ( - position_token_address BYTEA NOT NULL, - pool_address BYTEA NOT NULL, - factory_address BYTEA, - token0_address BYTEA, - token1_address BYTEA, - fee NUMERIC(100), - tick_spacing NUMERIC(100), - block_number BIGINT, - block_timestamp BIGINT, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (position_token_address, pool_address) -); - -CREATE TABLE IF NOT EXISTS af_uniswap_v3_token_collect_fee_hist ( - position_token_address BYTEA NOT NULL, - token_id NUMERIC(100) NOT NULL, - block_number BIGINT NOT NULL, - block_timestamp BIGINT NOT NULL, - log_index INTEGER NOT NULL, - transaction_hash BYTEA, - owner BYTEA, - recipient BYTEA, - amount0 NUMERIC(100), - amount1 NUMERIC(100), - pool_address BYTEA, - token0_address BYTEA, - token1_address BYTEA, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - reorg BOOLEAN DEFAULT false, - PRIMARY KEY (position_token_address, token_id, block_timestamp, block_number, log_index) -); - -CREATE INDEX af_uniswap_v3_token_collect_fee_hist_owner_index ON af_uniswap_v3_token_collect_fee_hist (owner); - -CREATE INDEX af_uniswap_v3_token_collect_fee_hist_pool_index ON af_uniswap_v3_token_collect_fee_hist (pool_address); - -CREATE INDEX af_uniswap_v3_token_collect_fee_hist_token0_index ON af_uniswap_v3_token_collect_fee_hist (token0_address); - -CREATE INDEX af_uniswap_v3_token_collect_fee_hist_token1_index ON af_uniswap_v3_token_collect_fee_hist (token1_address); - -CREATE INDEX af_uniswap_v3_token_collect_fee_hist_token_id_index ON af_uniswap_v3_token_collect_fee_hist (token_id); - -CREATE TABLE IF NOT EXISTS af_uniswap_v3_token_data_current ( - position_token_address BYTEA NOT NULL, - token_id NUMERIC(100) NOT NULL, - block_number BIGINT, - block_timestamp BIGINT, - wallet_address BYTEA, - pool_address BYTEA, - liquidity NUMERIC(100), - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (position_token_address, token_id) -); - -CREATE INDEX af_uniswap_v3_token_data_current_wallet_desc_index ON af_uniswap_v3_token_data_current (wallet_address DESC); - -CREATE TABLE IF NOT EXISTS af_uniswap_v3_token_data_hist ( - position_token_address BYTEA NOT NULL, - token_id NUMERIC(100) NOT NULL, - block_number BIGINT NOT NULL, - block_timestamp BIGINT NOT NULL, - wallet_address BYTEA, - pool_address BYTEA, - liquidity NUMERIC(100), - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - reorg BOOLEAN DEFAULT false, - PRIMARY KEY (position_token_address, token_id, block_timestamp, block_number) -); - -CREATE INDEX af_uniswap_v3_token_data_hist_token_block_desc_index ON af_uniswap_v3_token_data_hist (position_token_address DESC, block_timestamp DESC); - -CREATE INDEX af_uniswap_v3_token_data_hist_wallet_token_block_desc_index ON af_uniswap_v3_token_data_hist (wallet_address DESC, position_token_address DESC, block_timestamp DESC); - -CREATE TABLE IF NOT EXISTS af_uniswap_v3_token_liquidity_hist ( - position_token_address BYTEA NOT NULL, - token_id NUMERIC(100) NOT NULL, - block_number BIGINT NOT NULL, - block_timestamp BIGINT NOT NULL, - log_index INTEGER NOT NULL, - transaction_hash BYTEA, - owner BYTEA, - liquidity NUMERIC(100), - amount0 NUMERIC(100), - amount1 NUMERIC(100), - pool_address BYTEA, - token0_address BYTEA, - token1_address BYTEA, - action_type VARCHAR, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - reorg BOOLEAN DEFAULT false, - PRIMARY KEY (position_token_address, token_id, block_timestamp, block_number, log_index) -); - -CREATE INDEX af_uniswap_v3_token_liquidity_hist_owner_index ON af_uniswap_v3_token_liquidity_hist (owner); - -CREATE INDEX af_uniswap_v3_token_liquidity_hist_pool_index ON af_uniswap_v3_token_liquidity_hist (pool_address); - -CREATE INDEX af_uniswap_v3_token_liquidity_hist_token0_index ON af_uniswap_v3_token_liquidity_hist (token0_address); - -CREATE INDEX af_uniswap_v3_token_liquidity_hist_token1_index ON af_uniswap_v3_token_liquidity_hist (token1_address); - -CREATE INDEX af_uniswap_v3_token_liquidity_hist_token_id_index ON af_uniswap_v3_token_liquidity_hist (token_id); - -CREATE TABLE IF NOT EXISTS af_uniswap_v3_tokens ( - position_token_address BYTEA NOT NULL, - token_id NUMERIC(100) NOT NULL, - pool_address BYTEA, - tick_lower NUMERIC(100), - tick_upper NUMERIC(100), - fee NUMERIC(100), - block_number BIGINT, - block_timestamp BIGINT, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (position_token_address, token_id) -); - -CREATE INDEX af_uniswap_v3_tokens_nft_index ON af_uniswap_v3_tokens (position_token_address); - -DROP INDEX feature_uniswap_v3_tokens_nft_index; - -DROP TABLE feature_uniswap_v3_tokens; - -DROP TABLE feature_uniswap_v3_pools; - -UPDATE alembic_version SET version_num='f4efa18760cc' WHERE alembic_version.version_num = '43d14640a8ac'; - -COMMIT; \ No newline at end of file diff --git a/hemera/migrations/manual_versions/20240910_add_address_index.sql b/hemera/migrations/manual_versions/20240910_add_address_index.sql deleted file mode 100644 index 5ec637bc6..000000000 --- a/hemera/migrations/manual_versions/20240910_add_address_index.sql +++ /dev/null @@ -1,96 +0,0 @@ -BEGIN; - --- Running upgrade f4efa18760cc -> e8f78802f27a - -CREATE TABLE IF NOT EXISTS address_nft_1155_holders ( - address BYTEA NOT NULL, - token_address BYTEA NOT NULL, - token_id NUMERIC(100) NOT NULL, - balance_of NUMERIC(100), - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (address, token_address, token_id) -); - -CREATE INDEX address_nft_1155_holders_token_address_balance_of_idx ON address_nft_1155_holders (token_address, token_id, balance_of DESC); - -CREATE TABLE IF NOT EXISTS address_nft_transfers ( - address BYTEA NOT NULL, - block_number INTEGER NOT NULL, - log_index INTEGER NOT NULL, - transaction_hash BYTEA, - block_timestamp TIMESTAMP WITHOUT TIME ZONE NOT NULL, - block_hash BYTEA NOT NULL, - token_address BYTEA, - related_address BYTEA, - transfer_type SMALLINT, - token_id NUMERIC(100) NOT NULL, - value NUMERIC(100), - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (address, block_number, log_index, block_timestamp, block_hash, token_id) -); - -CREATE TABLE IF NOT EXISTS address_token_holders ( - address BYTEA NOT NULL, - token_address BYTEA NOT NULL, - balance_of NUMERIC(100), - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (address, token_address) -); - -CREATE INDEX address_token_holders_token_address_balance_of_idx ON address_token_holders (token_address, balance_of DESC); - -CREATE TABLE IF NOT EXISTS address_token_transfers ( - address BYTEA NOT NULL, - block_number INTEGER NOT NULL, - log_index INTEGER NOT NULL, - transaction_hash BYTEA, - block_timestamp TIMESTAMP WITHOUT TIME ZONE NOT NULL, - block_hash BYTEA NOT NULL, - token_address BYTEA, - related_address BYTEA, - transfer_type SMALLINT, - value NUMERIC(100), - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (address, block_number, log_index, block_timestamp, block_hash) -); - -CREATE TABLE IF NOT EXISTS address_transactions ( - address BYTEA NOT NULL, - block_number INTEGER NOT NULL, - transaction_index INTEGER NOT NULL, - transaction_hash BYTEA, - block_timestamp TIMESTAMP WITHOUT TIME ZONE NOT NULL, - block_hash BYTEA, - txn_type SMALLINT, - related_address BYTEA, - value NUMERIC(100), - transaction_fee NUMERIC(100), - receipt_status INTEGER, - method TEXT, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (address, block_number, transaction_index, block_timestamp) -); - -CREATE INDEX address_transactions_address_block_timestamp_block_number_t_idx ON address_transactions (address, block_timestamp DESC, block_number DESC, transaction_index DESC); - -CREATE INDEX address_transactions_address_txn_type_block_timestamp_block_idx ON address_transactions (address, txn_type, block_timestamp DESC, block_number DESC, transaction_index DESC); - -CREATE TABLE IF NOT EXISTS token_address_nft_inventories ( - token_address BYTEA NOT NULL, - token_id NUMERIC(100) NOT NULL, - wallet_address BYTEA, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (token_address, token_id) -); - -CREATE INDEX token_address_nft_inventories_wallet_address_token_address__idx ON token_address_nft_inventories (wallet_address, token_address, token_id); - -UPDATE alembic_version SET version_num='e8f78802f27a' WHERE alembic_version.version_num = 'f4efa18760cc'; - -COMMIT; \ No newline at end of file diff --git a/hemera/migrations/manual_versions/20240911_add_opensea.sql b/hemera/migrations/manual_versions/20240911_add_opensea.sql deleted file mode 100644 index 3166004d1..000000000 --- a/hemera/migrations/manual_versions/20240911_add_opensea.sql +++ /dev/null @@ -1,115 +0,0 @@ -BEGIN; - --- Running upgrade e8f78802f27a -> 3dd9b90d2e31 - -CREATE TABLE IF NOT EXISTS af_opensea__transactions ( - address BYTEA NOT NULL, - is_offer BOOLEAN NOT NULL, - related_address BYTEA, - transaction_type SMALLINT, - order_hash BYTEA, - zone BYTEA, - offer JSONB, - consideration JSONB, - fee JSONB, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - transaction_hash BYTEA, - block_number BIGINT NOT NULL, - log_index BIGINT NOT NULL, - block_timestamp TIMESTAMP WITHOUT TIME ZONE, - block_hash BYTEA NOT NULL, - reorg BOOLEAN DEFAULT false, - protocol_version VARCHAR DEFAULT '1.6', - PRIMARY KEY (address, is_offer, block_number, log_index, block_hash) -); - -CREATE INDEX af_opensea__transactions_address_block_number_log_index_blo_idx ON af_opensea__transactions (address, block_number DESC, log_index DESC, block_timestamp DESC); - -CREATE INDEX af_opensea__transactions_address_block_timestamp_idx ON af_opensea__transactions (address, block_timestamp DESC); - -CREATE INDEX af_opensea__transactions_block_timestamp_idx ON af_opensea__transactions (block_timestamp DESC); - -CREATE TABLE IF NOT EXISTS af_opensea_daily_transactions ( - address BYTEA NOT NULL, - block_date DATE NOT NULL, - buy_txn_count INTEGER, - sell_txn_count INTEGER, - swap_txn_count INTEGER, - buy_opensea_order_count INTEGER, - sell_opensea_order_count INTEGER, - swap_opensea_order_count INTEGER, - buy_nft_stats JSONB, - sell_nft_stats JSONB, - buy_volume_crypto JSONB, - sell_volume_crypto JSONB, - buy_volume_usd NUMERIC, - sell_volume_usd NUMERIC, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (address, block_date) -); - -CREATE TABLE IF NOT EXISTS af_opensea_na_crypto_token_mapping ( - id SERIAL NOT NULL, - address_var VARCHAR(42), - price_symbol VARCHAR, - decimals INTEGER DEFAULT 18 NOT NULL, - PRIMARY KEY (id) -); - -CREATE TABLE IF NOT EXISTS af_opensea_na_orders ( - order_hash BYTEA, - zone BYTEA, - offerer BYTEA, - recipient BYTEA, - offer JSON, - consideration JSON, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - transaction_hash BYTEA, - block_number BIGINT NOT NULL, - log_index INTEGER NOT NULL, - block_timestamp TIMESTAMP WITHOUT TIME ZONE, - block_hash BYTEA NOT NULL, - reorg BOOLEAN DEFAULT false, - protocol_version VARCHAR DEFAULT '1.6', - PRIMARY KEY (block_number, log_index, block_hash) -); - -CREATE INDEX idx_order_hash ON af_opensea_na_orders (order_hash); - -CREATE TABLE IF NOT EXISTS af_opensea_na_scheduled_metadata ( - id SERIAL NOT NULL, - dag_id VARCHAR, - execution_date TIMESTAMP WITHOUT TIME ZONE, - last_data_timestamp TIMESTAMP WITHOUT TIME ZONE, - PRIMARY KEY (id) -); - -CREATE TABLE IF NOT EXISTS af_opensea_profile ( - address BYTEA NOT NULL, - buy_txn_count INTEGER DEFAULT 0, - sell_txn_count INTEGER DEFAULT 0, - swap_txn_count INTEGER DEFAULT 0, - buy_opensea_order_count INTEGER DEFAULT 0, - sell_opensea_order_count INTEGER DEFAULT 0, - swap_opensea_order_count INTEGER DEFAULT 0, - buy_nft_stats JSONB, - sell_nft_stats JSONB, - buy_volume_usd NUMERIC, - sell_volume_usd NUMERIC, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - first_transaction_hash BYTEA, - first_block_timestamp TIMESTAMP WITHOUT TIME ZONE, - txn_count INTEGER GENERATED ALWAYS AS ((buy_txn_count + sell_txn_count) + swap_txn_count) STORED, - opensea_order_count INTEGER GENERATED ALWAYS AS ((buy_opensea_order_count + sell_opensea_order_count) + swap_opensea_order_count) STORED, - volume_usd NUMERIC DEFAULT 0, - PRIMARY KEY (address) -); - -UPDATE alembic_version SET version_num='3dd9b90d2e31' WHERE alembic_version.version_num = 'e8f78802f27a'; - - -COMMIT; \ No newline at end of file diff --git a/hemera/migrations/manual_versions/20240912_add_merchant_and_uniswap_daily_table.sql b/hemera/migrations/manual_versions/20240912_add_merchant_and_uniswap_daily_table.sql deleted file mode 100644 index b1f51cc5d..000000000 --- a/hemera/migrations/manual_versions/20240912_add_merchant_and_uniswap_daily_table.sql +++ /dev/null @@ -1,165 +0,0 @@ -BEGIN; - --- Running upgrade 3dd9b90d2e31 -> c609922eae7a - -CREATE TABLE IF NOT EXISTS af_merchant_moe_pools ( - position_token_address BYTEA NOT NULL, - block_timestamp BIGINT, - block_number BIGINT, - token0_address BYTEA, - token1_address BYTEA, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - reorg BOOLEAN DEFAULT false, - PRIMARY KEY (position_token_address) -); - -CREATE TABLE IF NOT EXISTS af_merchant_moe_token_bin_current ( - position_token_address BYTEA NOT NULL, - token_id NUMERIC(100) NOT NULL, - block_timestamp BIGINT, - block_number BIGINT, - reserve0_bin NUMERIC(100), - reserve1_bin NUMERIC(100), - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (position_token_address, token_id) -); - -CREATE INDEX af_merchant_moe_token_bin_current_token_id_index ON af_merchant_moe_token_bin_current (position_token_address DESC, token_id ASC); - -CREATE TABLE IF NOT EXISTS af_merchant_moe_token_bin_hist ( - position_token_address BYTEA NOT NULL, - token_id NUMERIC(100) NOT NULL, - block_timestamp BIGINT NOT NULL, - block_number BIGINT NOT NULL, - reserve0_bin NUMERIC(100), - reserve1_bin NUMERIC(100), - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - reorg BOOLEAN DEFAULT false, - PRIMARY KEY (position_token_address, token_id, block_timestamp, block_number) -); - -CREATE INDEX af_merchant_moe_token_bin_hist_token_block_desc_index ON af_merchant_moe_token_bin_hist (position_token_address DESC, block_timestamp DESC); - -CREATE TABLE IF NOT EXISTS af_merchant_moe_token_supply_current ( - position_token_address BYTEA NOT NULL, - token_id NUMERIC(100) NOT NULL, - block_timestamp BIGINT, - block_number BIGINT, - total_supply NUMERIC(100), - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (position_token_address, token_id) -); - -CREATE TABLE IF NOT EXISTS af_merchant_moe_token_supply_hist ( - position_token_address BYTEA NOT NULL, - token_id NUMERIC(100) NOT NULL, - block_timestamp BIGINT NOT NULL, - block_number BIGINT NOT NULL, - total_supply NUMERIC(100), - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - reorg BOOLEAN DEFAULT false, - PRIMARY KEY (position_token_address, token_id, block_timestamp, block_number) -); - -CREATE INDEX af_merchant_moe_token_supply_hist_token_block_desc_index ON af_merchant_moe_token_supply_hist (position_token_address DESC, block_timestamp DESC); - -CREATE TABLE IF NOT EXISTS af_holding_balance_merchantmoe_period ( - period_date DATE NOT NULL, - protocol_id VARCHAR NOT NULL, - position_token_address BYTEA NOT NULL, - token_id NUMERIC NOT NULL, - wallet_address BYTEA NOT NULL, - token0_address BYTEA NOT NULL, - token0_symbol VARCHAR NOT NULL, - token0_balance NUMERIC(100, 18), - token1_address BYTEA NOT NULL, - token1_symbol VARCHAR NOT NULL, - token1_balance NUMERIC(100, 18), - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (period_date, protocol_id, position_token_address, token_id, wallet_address) -); - -CREATE INDEX af_holding_balance_merchantmoe_period_period_date ON af_holding_balance_merchantmoe_period (period_date); - -CREATE TABLE IF NOT EXISTS af_holding_balance_uniswap_v3_period ( - period_date DATE NOT NULL, - protocol_id VARCHAR NOT NULL, - position_token_address BYTEA NOT NULL, - token_id INTEGER NOT NULL, - wallet_address BYTEA NOT NULL, - token0_address BYTEA NOT NULL, - token0_symbol VARCHAR NOT NULL, - token0_balance NUMERIC(100, 18), - token1_address BYTEA NOT NULL, - token1_symbol VARCHAR NOT NULL, - token1_balance NUMERIC(100, 18), - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (period_date, protocol_id, position_token_address, token_id) -); - -CREATE INDEX af_holding_balance_uniswap_v3_period_period_date ON af_holding_balance_uniswap_v3_period (period_date); - -CREATE TABLE IF NOT EXISTS af_merchant_moe_token_bin_hist_period ( - period_date DATE NOT NULL, - position_token_address BYTEA NOT NULL, - token_id NUMERIC(100) NOT NULL, - reserve0_bin NUMERIC(100), - reserve1_bin NUMERIC(100), - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (period_date, position_token_address, token_id) -); - -CREATE TABLE IF NOT EXISTS af_uniswap_v3_pool_prices_daily ( - block_date DATE NOT NULL, - pool_address BYTEA NOT NULL, - sqrt_price_x96 NUMERIC(78), - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (block_date, pool_address) -); - -CREATE INDEX af_uniswap_v3_pool_prices_daily_block_date_index ON af_uniswap_v3_pool_prices_daily (block_date); - -CREATE TABLE IF NOT EXISTS af_uniswap_v3_pool_prices_period ( - period_date DATE NOT NULL, - pool_address BYTEA NOT NULL, - sqrt_price_x96 NUMERIC(78), - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (period_date, pool_address) -); - -CREATE INDEX af_uniswap_v3_pool_prices_period_period_date_index ON af_uniswap_v3_pool_prices_period (period_date); - -CREATE TABLE IF NOT EXISTS af_uniswap_v3_token_data_daily ( - block_date DATE NOT NULL, - position_token_address BYTEA NOT NULL, - token_id INTEGER NOT NULL, - wallet_address BYTEA NOT NULL, - pool_address BYTEA NOT NULL, - liquidity NUMERIC(78), - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (block_date, position_token_address, token_id) -); - -CREATE INDEX af_uniswap_v3_token_data_daily_index ON af_uniswap_v3_token_data_daily (block_date); - -CREATE TABLE IF NOT EXISTS af_uniswap_v3_token_data_period ( - period_date DATE NOT NULL, - position_token_address BYTEA NOT NULL, - token_id INTEGER NOT NULL, - wallet_address BYTEA NOT NULL, - pool_address BYTEA NOT NULL, - liquidity NUMERIC(78), - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (period_date, position_token_address, token_id) -); - -CREATE INDEX af_uniswap_v3_token_data_period_date_index ON af_uniswap_v3_token_data_period (period_date); - -UPDATE alembic_version SET version_num='c609922eae7a' WHERE alembic_version.version_num = '3dd9b90d2e31'; - -COMMIT; \ No newline at end of file diff --git a/hemera/migrations/manual_versions/20240927_add_merchant_moe_table.sql b/hemera/migrations/manual_versions/20240927_add_merchant_moe_table.sql deleted file mode 100644 index df3b46c7a..000000000 --- a/hemera/migrations/manual_versions/20240927_add_merchant_moe_table.sql +++ /dev/null @@ -1,66 +0,0 @@ -BEGIN; - --- Running upgrade c609922eae7a -> 67015d9fa59b - -CREATE TABLE IF NOT EXISTS af_merchant_moe_pool_data_current ( - pool_address BYTEA NOT NULL, - block_timestamp BIGINT, - block_number BIGINT, - active_id BIGINT, - bin_step BIGINT, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - reorg BOOLEAN, - PRIMARY KEY (pool_address) -); - -CREATE TABLE IF NOT EXISTS af_merchant_moe_pool_data_hist ( - pool_address BYTEA NOT NULL, - block_timestamp BIGINT NOT NULL, - block_number BIGINT NOT NULL, - active_id BIGINT, - bin_step BIGINT, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - reorg BOOLEAN, - PRIMARY KEY (pool_address, block_timestamp, block_number) -); - -CREATE TABLE IF NOT EXISTS af_staked_fbtc_current ( - vault_address BYTEA NOT NULL, - wallet_address BYTEA NOT NULL, - block_number BIGINT, - block_timestamp BIGINT, - amount NUMERIC(100), - changed_amount NUMERIC(100), - protocol_id VARCHAR, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (vault_address, wallet_address) -); - -CREATE INDEX af_staked_fbtc_current_protocol_block_desc_index ON af_staked_fbtc_current (protocol_id DESC); - -CREATE INDEX af_staked_fbtc_current_wallet_block_desc_index ON af_staked_fbtc_current (wallet_address DESC); - -CREATE TABLE IF NOT EXISTS af_staked_fbtc_detail_hist ( - vault_address BYTEA NOT NULL, - wallet_address BYTEA NOT NULL, - block_number BIGINT NOT NULL, - block_timestamp BIGINT NOT NULL, - amount NUMERIC(100), - changed_amount NUMERIC(100), - protocol_id VARCHAR, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - reorg BOOLEAN, - PRIMARY KEY (vault_address, wallet_address, block_timestamp, block_number) -); - -CREATE INDEX af_staked_fbtc_detail_hist_protocol_block_desc_index ON af_staked_fbtc_detail_hist (protocol_id DESC, block_timestamp DESC); - -CREATE INDEX af_staked_fbtc_detail_hist_wallet_block_desc_index ON af_staked_fbtc_detail_hist (wallet_address DESC, block_timestamp DESC); - -UPDATE alembic_version SET version_num='67015d9fa59b' WHERE alembic_version.version_num = 'c609922eae7a'; - -COMMIT; \ No newline at end of file diff --git a/hemera/migrations/manual_versions/20241017_earlier_table_change.sql b/hemera/migrations/manual_versions/20241017_earlier_table_change.sql deleted file mode 100644 index 187aca533..000000000 --- a/hemera/migrations/manual_versions/20241017_earlier_table_change.sql +++ /dev/null @@ -1,9 +0,0 @@ -BEGIN; - --- Running upgrade 67015d9fa59b -> bc23aa19668e - -ALTER TABLE af_ens_node_current ADD COLUMN block_number BIGINT; - -UPDATE alembic_version SET version_num='bc23aa19668e' WHERE alembic_version.version_num = '67015d9fa59b'; - -COMMIT; \ No newline at end of file diff --git a/hemera/migrations/manual_versions/20241105_add_address_index_and_stats.sql b/hemera/migrations/manual_versions/20241105_add_address_index_and_stats.sql deleted file mode 100644 index 5ab53222f..000000000 --- a/hemera/migrations/manual_versions/20241105_add_address_index_and_stats.sql +++ /dev/null @@ -1,292 +0,0 @@ -BEGIN; - --- Running upgrade bc23aa19668e -> 3bd2e3099bae - -CREATE TABLE IF NOT EXISTS address_contract_operations ( - address BYTEA NOT NULL, - trace_from_address BYTEA, - contract_address BYTEA, - trace_id TEXT NOT NULL, - block_number INTEGER NOT NULL, - transaction_index INTEGER NOT NULL, - transaction_hash BYTEA, - block_timestamp TIMESTAMP WITHOUT TIME ZONE NOT NULL, - block_hash BYTEA, - error TEXT, - status INTEGER, - creation_code BYTEA, - deployed_code BYTEA, - gas NUMERIC(100), - gas_used NUMERIC(100), - trace_type TEXT, - call_type TEXT, - transaction_receipt_status INTEGER, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (address, trace_id, block_number, transaction_index, block_timestamp) -); - -CREATE INDEX IF NOT EXISTS address_contract_operations_address_block_tn_t_idx ON address_contract_operations (address, block_timestamp DESC, block_number DESC, transaction_index DESC); - -CREATE TABLE IF NOT EXISTS address_internal_transactions ( - address BYTEA NOT NULL, - trace_id TEXT NOT NULL, - block_number INTEGER NOT NULL, - transaction_index INTEGER NOT NULL, - transaction_hash BYTEA, - block_timestamp TIMESTAMP WITHOUT TIME ZONE NOT NULL, - block_hash BYTEA, - error TEXT, - status INTEGER, - input_method TEXT, - value NUMERIC(100), - gas NUMERIC(100), - gas_used NUMERIC(100), - trace_type TEXT, - call_type TEXT, - txn_type SMALLINT, - related_address BYTEA, - transaction_receipt_status INTEGER, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (address, trace_id, block_number, transaction_index, block_timestamp) -); - -CREATE INDEX address_internal_transactions_address_nt_t_idx ON address_internal_transactions (address, block_timestamp DESC, block_number DESC, transaction_index DESC); - -CREATE TABLE IF NOT EXISTS af_erc1155_token_holdings_current ( - position_token_address BYTEA NOT NULL, - token_id NUMERIC(100) NOT NULL, - wallet_address BYTEA NOT NULL, - block_number BIGINT, - block_timestamp BIGINT, - balance NUMERIC(100), - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - PRIMARY KEY (position_token_address, token_id, wallet_address) -); - -CREATE INDEX af_erc1155_token_holdings_current_token_block_desc_index ON af_erc1155_token_holdings_current (position_token_address DESC, block_timestamp DESC); - -CREATE INDEX af_erc1155_token_holdings_current_wallet_block_desc_index ON af_erc1155_token_holdings_current (wallet_address DESC, block_timestamp DESC); - -CREATE TABLE IF NOT EXISTS af_erc1155_token_holdings_hist ( - position_token_address BYTEA NOT NULL, - token_id NUMERIC(100) NOT NULL, - wallet_address BYTEA NOT NULL, - balance NUMERIC(100), - block_number BIGINT NOT NULL, - block_timestamp BIGINT NOT NULL, - create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now(), - reorg BOOLEAN, - PRIMARY KEY (position_token_address, token_id, wallet_address, block_timestamp, block_number) -); - -CREATE INDEX feature_erc1155_token_holding_token_block_desc_index ON af_erc1155_token_holdings_hist (position_token_address DESC, block_timestamp DESC); - -CREATE INDEX feature_erc1155_token_holding_token_wallet_block_desc_index ON af_erc1155_token_holdings_hist (position_token_address DESC, wallet_address DESC, block_number DESC); - -CREATE TABLE IF NOT EXISTS af_index_daily_stats ( - address BYTEA NOT NULL, - block_date DATE NOT NULL, - transaction_in_count INTEGER, - transaction_out_count INTEGER, - transaction_self_count INTEGER, - transaction_in_value BIGINT, - transaction_out_value BIGINT, - transaction_self_value BIGINT, - transaction_in_fee NUMERIC, - transaction_out_fee NUMERIC, - transaction_self_fee NUMERIC, - internal_transaction_in_count INTEGER, - internal_transaction_out_count INTEGER, - internal_transaction_self_count INTEGER, - internal_transaction_in_value BIGINT, - internal_transaction_out_value BIGINT, - internal_transaction_self_value BIGINT, - erc20_transfer_in_count INTEGER, - erc20_transfer_out_count INTEGER, - erc20_transfer_self_count INTEGER, - nft_transfer_in_count INTEGER, - nft_transfer_out_count INTEGER, - nft_transfer_self_count INTEGER, - nft_721_transfer_in_count INTEGER, - nft_721_transfer_out_count INTEGER, - nft_721_transfer_self_count INTEGER, - nft_1155_transfer_in_count INTEGER, - nft_1155_transfer_out_count INTEGER, - nft_1155_transfer_self_count INTEGER, - contract_creation_count INTEGER, - contract_destruction_count INTEGER, - contract_operation_count INTEGER, - transaction_count INTEGER, - internal_transaction_count INTEGER, - erc20_transfer_count INTEGER, - nft_transfer_count INTEGER, - nft_721_transfer_count INTEGER, - nft_1155_transfer_count INTEGER, - PRIMARY KEY (address, block_date) -); - -CREATE TABLE IF NOT EXISTS af_index_na_scheduled_metadata ( - id SERIAL NOT NULL, - dag_id VARCHAR, - execution_date TIMESTAMP WITHOUT TIME ZONE, - last_data_timestamp TIMESTAMP WITHOUT TIME ZONE, - PRIMARY KEY (id) -); - -CREATE TABLE IF NOT EXISTS af_index_stats ( - address BYTEA NOT NULL, - transaction_in_count INTEGER, - transaction_out_count INTEGER, - transaction_self_count INTEGER, - transaction_in_value NUMERIC, - transaction_out_value NUMERIC, - transaction_self_value NUMERIC, - transaction_in_fee NUMERIC, - transaction_out_fee NUMERIC, - transaction_self_fee NUMERIC, - internal_transaction_in_count INTEGER, - internal_transaction_out_count INTEGER, - internal_transaction_self_count INTEGER, - internal_transaction_in_value NUMERIC, - internal_transaction_out_value NUMERIC, - internal_transaction_self_value NUMERIC, - erc20_transfer_in_count INTEGER, - erc20_transfer_out_count INTEGER, - erc20_transfer_self_count INTEGER, - nft_transfer_in_count INTEGER, - nft_transfer_out_count INTEGER, - nft_transfer_self_count INTEGER, - nft_721_transfer_in_count INTEGER, - nft_721_transfer_out_count INTEGER, - nft_721_transfer_self_count INTEGER, - nft_1155_transfer_in_count INTEGER, - nft_1155_transfer_out_count INTEGER, - nft_1155_transfer_self_count INTEGER, - contract_creation_count INTEGER, - contract_destruction_count INTEGER, - contract_operation_count INTEGER, - transaction_count INTEGER, - internal_transaction_count INTEGER, - erc20_transfer_count INTEGER, - nft_transfer_count INTEGER, - nft_721_transfer_count INTEGER, - nft_1155_transfer_count INTEGER, - tag VARCHAR, - PRIMARY KEY (address) -); - -CREATE TABLE IF NOT EXISTS af_index_token_address_daily_stats ( - address BYTEA NOT NULL, - token_holder_count INTEGER, - token_transfer_count INTEGER, - update_time TIMESTAMP WITHOUT TIME ZONE, - PRIMARY KEY (address) -); - -CREATE TABLE IF NOT EXISTS af_index_token_address_stats ( - address BYTEA NOT NULL, - token_holder_count INTEGER, - token_transfer_count INTEGER, - update_time TIMESTAMP WITHOUT TIME ZONE, - PRIMARY KEY (address) -); - -CREATE TABLE IF NOT EXISTS af_stats_na_daily_addresses ( - block_date DATE NOT NULL, - active_address_cnt BIGINT, - receiver_address_cnt BIGINT, - sender_address_cnt BIGINT, - total_address_cnt BIGINT, - new_address_cnt BIGINT, - PRIMARY KEY (block_date) -); - -CREATE TABLE IF NOT EXISTS af_stats_na_daily_blocks ( - block_date DATE NOT NULL, - cnt BIGINT, - avg_size NUMERIC, - avg_gas_limit NUMERIC, - avg_gas_used NUMERIC, - total_gas_used BIGINT, - avg_gas_used_percentage NUMERIC, - avg_txn_cnt NUMERIC, - total_cnt BIGINT, - block_interval NUMERIC, - PRIMARY KEY (block_date) -); - -CREATE TABLE IF NOT EXISTS af_stats_na_daily_bridge_transactions ( - block_date DATE NOT NULL, - deposit_cnt BIGINT, - withdraw_cnt BIGINT, - PRIMARY KEY (block_date) -); - -CREATE TABLE IF NOT EXISTS af_stats_na_daily_tokens ( - block_date DATE NOT NULL, - erc20_active_address_cnt INTEGER, - erc20_total_transfer_cnt BIGINT, - erc721_active_address_cnt INTEGER, - erc721_total_transfer_cnt BIGINT, - erc1155_active_address_cnt INTEGER, - erc1155_total_transfer_cnt BIGINT, - PRIMARY KEY (block_date) -); - -CREATE TABLE IF NOT EXISTS af_stats_na_daily_transactions ( - block_date DATE NOT NULL, - cnt BIGINT, - total_cnt BIGINT, - txn_error_cnt BIGINT, - avg_transaction_fee NUMERIC, - avg_gas_price NUMERIC, - max_gas_price NUMERIC, - min_gas_price NUMERIC, - avg_receipt_l1_fee NUMERIC, - max_receipt_l1_fee NUMERIC, - min_receipt_l1_fee NUMERIC, - avg_receipt_l1_gas_price NUMERIC, - max_receipt_l1_gas_price NUMERIC, - min_receipt_l1_gas_price NUMERIC, - PRIMARY KEY (block_date) -); - -CREATE TABLE IF NOT EXISTS coin_prices ( - block_date TIMESTAMP WITHOUT TIME ZONE NOT NULL, - price NUMERIC, - PRIMARY KEY (block_date) -); - -CREATE TABLE IF NOT EXISTS scheduled_metadata ( - id SERIAL NOT NULL, - dag_id VARCHAR, - execution_date TIMESTAMP WITHOUT TIME ZONE, - last_data_timestamp TIMESTAMP WITHOUT TIME ZONE, - PRIMARY KEY (id) -); - -DROP TABLE IF EXISTS daily_wallet_addresses_aggregates; - -DROP TABLE IF EXISTS daily_addresses_aggregates; - -DROP TABLE IF EXISTS daily_blocks_aggregates; - -DROP TABLE IF EXISTS daily_tokens_aggregates; - -DROP TABLE IF EXISTS scheduled_token_count_metadata; - -DROP TABLE IF EXISTS scheduled_wallet_count_metadata; - -DROP TABLE IF EXISTS daily_contract_interacted_aggregates; - -DROP TABLE IF EXISTS daily_transactions_aggregates; - -DROP TABLE IF EXISTS statistics_wallet_addresses; - -UPDATE alembic_version SET version_num='3bd2e3099bae' WHERE alembic_version.version_num = 'bc23aa19668e'; - -COMMIT; \ No newline at end of file diff --git a/hemera/migrations/manual_versions/20241121_add_failure_records_table.sql b/hemera/migrations/manual_versions/20241121_add_failure_records_table.sql deleted file mode 100644 index 9e8d7a71b..000000000 --- a/hemera/migrations/manual_versions/20241121_add_failure_records_table.sql +++ /dev/null @@ -1,19 +0,0 @@ -BEGIN; - --- Running upgrade 3bd2e3099bae -> f846e3abeb18 - -CREATE TABLE IF NOT EXISTS failure_records ( - record_id BIGSERIAL NOT NULL, - mission_sign VARCHAR, - output_types VARCHAR, - start_block_number BIGINT, - end_block_number BIGINT, - exception_stage VARCHAR, - exception JSON, - crash_time TIMESTAMP WITHOUT TIME ZONE, - PRIMARY KEY (record_id) -); - -UPDATE alembic_version SET version_num='f846e3abeb18' WHERE alembic_version.version_num = '3bd2e3099bae'; - -COMMIT; \ No newline at end of file diff --git a/hemera/migrations/manual_versions/20241128_update_table_for_0.6.0.sql b/hemera/migrations/manual_versions/20241128_update_table_for_0.6.0.sql deleted file mode 100644 index 8c02660ea..000000000 --- a/hemera/migrations/manual_versions/20241128_update_table_for_0.6.0.sql +++ /dev/null @@ -1,17 +0,0 @@ -BEGIN; - --- Running upgrade f846e3abeb18 -> 3c7ea7b95dc5 - -ALTER TABLE logs DROP CONSTRAINT logs_pkey; - -CREATE INDEX logs_pkey ON logs (transaction_hash, block_hash, log_index); - -ALTER TABLE af_holding_balance_uniswap_v3_period DROP CONSTRAINT af_holding_balance_uniswap_v3_period_pkey; - -ALTER TABLE af_holding_balance_uniswap_v3_period RENAME position_token_address TO pool_address; - -CREATE INDEX af_holding_balance_uniswap_v3_period_pkey ON af_holding_balance_uniswap_v3_period (period_date, protocol_id, pool_address, token_id); - -UPDATE alembic_version SET version_num='f846e3abeb18' WHERE alembic_version.version_num = '3c7ea7b95dc5'; - -COMMIT; \ No newline at end of file diff --git a/hemera/migrations/script.py.mako b/hemera/migrations/script.py.mako deleted file mode 100644 index fbc4b07dc..000000000 --- a/hemera/migrations/script.py.mako +++ /dev/null @@ -1,26 +0,0 @@ -"""${message} - -Revision ID: ${up_revision} -Revises: ${down_revision | comma,n} -Create Date: ${create_date} - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa -${imports if imports else ""} - -# revision identifiers, used by Alembic. -revision: str = ${repr(up_revision)} -down_revision: Union[str, None] = ${repr(down_revision)} -branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} -depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} - - -def upgrade() -> None: - ${upgrades if upgrades else "pass"} - - -def downgrade() -> None: - ${downgrades if downgrades else "pass"} diff --git a/hemera/migrations/versions/20240704_base_version.py b/hemera/migrations/versions/20240704_base_version.py deleted file mode 100644 index 937422fc9..000000000 --- a/hemera/migrations/versions/20240704_base_version.py +++ /dev/null @@ -1,637 +0,0 @@ -"""base version - -Revision ID: 5e4608933f64 -Revises: -Create Date: 2024-07-04 19:05:00.122248 - -""" - -from typing import Sequence, Union - -import sqlalchemy as sa -from alembic import op -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision: str = "5e4608933f64" -down_revision: Union[str, None] = None -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table( - "address_coin_balances", - sa.Column("address", postgresql.BYTEA(), nullable=False), - sa.Column("balance", sa.NUMERIC(precision=100), nullable=True), - sa.Column("block_number", sa.BIGINT(), nullable=False), - sa.Column("block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("reorg", sa.BOOLEAN(), nullable=True, server_default=sa.text("false")), - sa.PrimaryKeyConstraint("address", "block_number"), - ) - op.create_table( - "address_token_balances", - sa.Column("address", postgresql.BYTEA(), nullable=False), - sa.Column("token_id", sa.NUMERIC(precision=78), nullable=True), - sa.Column("token_type", sa.VARCHAR(), nullable=True), - sa.Column("token_address", postgresql.BYTEA(), nullable=False), - sa.Column("balance", sa.NUMERIC(precision=100), nullable=True), - sa.Column("block_number", sa.BIGINT(), nullable=False), - sa.Column("block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("reorg", sa.BOOLEAN(), nullable=True, server_default=sa.text("false")), - sa.PrimaryKeyConstraint("address", "token_address", "token_id", "block_number"), - ) - op.create_table( - "block_ts_mapper", - sa.Column("ts", sa.BIGINT(), nullable=False), - sa.Column("block_number", sa.BIGINT(), nullable=True), - sa.Column("timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.PrimaryKeyConstraint("ts"), - ) - op.create_index( - "block_ts_mapper_idx", - "block_ts_mapper", - [sa.text("block_number DESC")], - unique=False, - ) - op.create_table( - "blocks", - sa.Column("hash", postgresql.BYTEA(), nullable=False), - sa.Column("number", sa.BIGINT(), nullable=True), - sa.Column("timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("parent_hash", postgresql.BYTEA(), nullable=True), - sa.Column("nonce", postgresql.BYTEA(), nullable=True), - sa.Column("gas_limit", sa.NUMERIC(precision=100), nullable=True), - sa.Column("gas_used", sa.NUMERIC(precision=100), nullable=True), - sa.Column("base_fee_per_gas", sa.NUMERIC(precision=100), nullable=True), - sa.Column("difficulty", sa.NUMERIC(precision=38), nullable=True), - sa.Column("total_difficulty", sa.NUMERIC(precision=38), nullable=True), - sa.Column("size", sa.BIGINT(), nullable=True), - sa.Column("miner", postgresql.BYTEA(), nullable=True), - sa.Column("sha3_uncles", postgresql.BYTEA(), nullable=True), - sa.Column("transactions_root", postgresql.BYTEA(), nullable=True), - sa.Column("transactions_count", sa.BIGINT(), nullable=True), - sa.Column("state_root", postgresql.BYTEA(), nullable=True), - sa.Column("receipts_root", postgresql.BYTEA(), nullable=True), - sa.Column("extra_data", postgresql.BYTEA(), nullable=True), - sa.Column("withdrawals_root", postgresql.BYTEA(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("reorg", sa.BOOLEAN(), nullable=True, server_default=sa.text("false")), - sa.PrimaryKeyConstraint("hash"), - ) - op.create_index("blocks_number_index", "blocks", [sa.text("number DESC")], unique=False) - op.create_index("blocks_timestamp_index", "blocks", [sa.text("timestamp DESC")], unique=False) - op.create_table( - "contract_internal_transactions", - sa.Column("trace_id", sa.VARCHAR(), nullable=False), - sa.Column("from_address", postgresql.BYTEA(), nullable=True), - sa.Column("to_address", postgresql.BYTEA(), nullable=True), - sa.Column("value", sa.NUMERIC(precision=100), nullable=True), - sa.Column("trace_type", sa.VARCHAR(), nullable=True), - sa.Column("call_type", sa.VARCHAR(), nullable=True), - sa.Column("gas", sa.NUMERIC(precision=100), nullable=True), - sa.Column("gas_used", sa.NUMERIC(precision=100), nullable=True), - sa.Column("trace_address", postgresql.ARRAY(sa.INTEGER()), nullable=True), - sa.Column("error", sa.TEXT(), nullable=True), - sa.Column("status", sa.INTEGER(), nullable=True), - sa.Column("block_number", sa.BIGINT(), nullable=True), - sa.Column("block_hash", postgresql.BYTEA(), nullable=True), - sa.Column("block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("transaction_index", sa.INTEGER(), nullable=True), - sa.Column("transaction_hash", postgresql.BYTEA(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("reorg", sa.BOOLEAN(), nullable=True, server_default=sa.text("false")), - sa.PrimaryKeyConstraint("trace_id"), - ) - op.create_index( - "contract_internal_transactions_transaction_hash_idx", - "contract_internal_transactions", - ["transaction_hash"], - unique=False, - ) - op.create_index( - "internal_transactions_address_number_transaction_index", - "contract_internal_transactions", - [ - "from_address", - "to_address", - sa.text("block_number DESC"), - sa.text("transaction_index DESC"), - ], - unique=False, - ) - op.create_index( - "internal_transactions_block_timestamp_index", - "contract_internal_transactions", - [sa.text("block_timestamp DESC")], - unique=False, - ) - op.create_table( - "contracts", - sa.Column("address", postgresql.BYTEA(), nullable=False), - sa.Column("name", sa.VARCHAR(), nullable=True), - sa.Column("contract_creator", postgresql.BYTEA(), nullable=True), - sa.Column("creation_code", postgresql.BYTEA(), nullable=True), - sa.Column("deployed_code", postgresql.BYTEA(), nullable=True), - sa.Column("block_number", sa.BIGINT(), nullable=True), - sa.Column("block_hash", postgresql.BYTEA(), nullable=True), - sa.Column("block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("transaction_index", sa.INTEGER(), nullable=True), - sa.Column("transaction_hash", postgresql.BYTEA(), nullable=True), - sa.Column("official_website", sa.VARCHAR(), nullable=True), - sa.Column("description", sa.VARCHAR(), nullable=True), - sa.Column("email", sa.VARCHAR(), nullable=True), - sa.Column("social_list", postgresql.JSONB(astext_type=sa.Text()), nullable=True), - sa.Column("is_verified", sa.BOOLEAN(), nullable=True), - sa.Column("is_proxy", sa.BOOLEAN(), nullable=True), - sa.Column("implementation_contract", postgresql.BYTEA(), nullable=True), - sa.Column("verified_implementation_contract", postgresql.BYTEA(), nullable=True), - sa.Column("proxy_standard", sa.VARCHAR(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("reorg", sa.BOOLEAN(), nullable=True, server_default=sa.text("false")), - sa.PrimaryKeyConstraint("address"), - ) - op.create_table( - "erc1155_token_holders", - sa.Column("token_address", postgresql.BYTEA(), nullable=False), - sa.Column("wallet_address", postgresql.BYTEA(), nullable=False), - sa.Column("token_id", sa.NUMERIC(precision=78), nullable=False), - sa.Column("balance_of", sa.NUMERIC(precision=100), nullable=True), - sa.Column("latest_call_contract_time", postgresql.TIMESTAMP(), nullable=True), - sa.Column("block_number", sa.BIGINT(), nullable=True), - sa.Column("block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("reorg", sa.BOOLEAN(), nullable=True, server_default=sa.text("false")), - sa.PrimaryKeyConstraint("token_address", "wallet_address", "token_id"), - ) - op.create_index( - "erc1155_token_holders_token_address_balance_of_index", - "erc1155_token_holders", - ["token_address", sa.text("balance_of DESC")], - unique=False, - ) - op.create_table( - "erc1155_token_id_details", - sa.Column("address", postgresql.BYTEA(), nullable=False), - sa.Column("token_id", sa.NUMERIC(precision=78), nullable=False), - sa.Column("token_supply", sa.NUMERIC(precision=78), nullable=True), - sa.Column("token_uri", sa.VARCHAR(), nullable=True), - sa.Column("token_uri_info", postgresql.JSONB(astext_type=sa.Text()), nullable=True), - sa.Column("block_number", sa.BIGINT(), nullable=True), - sa.Column("block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("reorg", sa.BOOLEAN(), nullable=True, server_default=sa.text("false")), - sa.PrimaryKeyConstraint("address", "token_id"), - ) - op.create_index( - "erc1155_detail_desc_address_id_index", - "erc1155_token_id_details", - [sa.text("address DESC"), "token_id"], - unique=False, - ) - op.create_table( - "erc1155_token_transfers", - sa.Column("transaction_hash", postgresql.BYTEA(), nullable=False), - sa.Column("log_index", sa.INTEGER(), nullable=False), - sa.Column("from_address", postgresql.BYTEA(), nullable=True), - sa.Column("to_address", postgresql.BYTEA(), nullable=True), - sa.Column("token_address", postgresql.BYTEA(), nullable=True), - sa.Column("token_id", sa.NUMERIC(precision=78), nullable=True), - sa.Column("value", sa.NUMERIC(precision=100), nullable=True), - sa.Column("block_number", sa.BIGINT(), nullable=True), - sa.Column("block_hash", postgresql.BYTEA(), nullable=True), - sa.Column("block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("reorg", sa.BOOLEAN(), nullable=True, server_default=sa.text("false")), - sa.PrimaryKeyConstraint("transaction_hash", "log_index"), - ) - op.create_index( - "erc1155_token_transfers_address_block_number_log_index_index", - "erc1155_token_transfers", - [ - "token_address", - "from_address", - "to_address", - sa.text("block_number DESC"), - sa.text("log_index DESC"), - ], - unique=False, - ) - op.create_index( - "erc1155_token_transfers_block_timestamp_index", - "erc1155_token_transfers", - [sa.text("block_timestamp DESC")], - unique=False, - ) - op.create_table( - "erc20_token_holders", - sa.Column("token_address", postgresql.BYTEA(), nullable=False), - sa.Column("wallet_address", postgresql.BYTEA(), nullable=False), - sa.Column("balance_of", sa.NUMERIC(precision=100), nullable=True), - sa.Column("block_number", sa.BIGINT(), nullable=True), - sa.Column("block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("reorg", sa.BOOLEAN(), nullable=True, server_default=sa.text("false")), - sa.PrimaryKeyConstraint("token_address", "wallet_address"), - ) - op.create_index( - "erc20_token_holders_token_address_balance_of_index", - "erc20_token_holders", - ["token_address", sa.text("balance_of DESC")], - unique=False, - ) - op.create_table( - "erc20_token_transfers", - sa.Column("transaction_hash", postgresql.BYTEA(), nullable=False), - sa.Column("log_index", sa.INTEGER(), nullable=False), - sa.Column("from_address", postgresql.BYTEA(), nullable=True), - sa.Column("to_address", postgresql.BYTEA(), nullable=True), - sa.Column("token_address", postgresql.BYTEA(), nullable=True), - sa.Column("value", sa.NUMERIC(precision=100), nullable=True), - sa.Column("block_number", sa.BIGINT(), nullable=True), - sa.Column("block_hash", postgresql.BYTEA(), nullable=True), - sa.Column("block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("reorg", sa.BOOLEAN(), nullable=True, server_default=sa.text("false")), - sa.PrimaryKeyConstraint("transaction_hash", "log_index"), - ) - op.create_index( - "erc20_token_transfers_address_block_number_log_index_index", - "erc20_token_transfers", - [ - "token_address", - "from_address", - "to_address", - sa.text("block_number DESC"), - sa.text("log_index DESC"), - ], - unique=False, - ) - op.create_index( - "erc20_token_transfers_block_timestamp_index", - "erc20_token_transfers", - [sa.text("block_timestamp DESC")], - unique=False, - ) - op.create_table( - "erc721_token_holders", - sa.Column("token_address", postgresql.BYTEA(), nullable=False), - sa.Column("wallet_address", postgresql.BYTEA(), nullable=False), - sa.Column("balance_of", sa.NUMERIC(precision=100), nullable=True), - sa.Column("block_number", sa.BIGINT(), nullable=True), - sa.Column("block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("reorg", sa.BOOLEAN(), nullable=True, server_default=sa.text("false")), - sa.PrimaryKeyConstraint("token_address", "wallet_address"), - ) - op.create_index( - "erc721_token_holders_token_address_balance_of_index", - "erc721_token_holders", - ["token_address", sa.text("balance_of DESC")], - unique=False, - ) - op.create_table( - "erc721_token_id_changes", - sa.Column("address", postgresql.BYTEA(), nullable=False), - sa.Column("token_id", sa.NUMERIC(precision=78), nullable=False), - sa.Column("token_owner", postgresql.BYTEA(), nullable=True), - sa.Column("block_number", sa.BIGINT(), nullable=False), - sa.Column("block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("reorg", sa.BOOLEAN(), nullable=True, server_default=sa.text("false")), - sa.PrimaryKeyConstraint("address", "token_id", "block_number"), - ) - op.create_index( - "erc721_change_address_id_number_desc_index", - "erc721_token_id_changes", - ["address", "token_id", sa.text("block_number DESC")], - unique=False, - ) - op.create_table( - "erc721_token_id_details", - sa.Column("address", postgresql.BYTEA(), nullable=False), - sa.Column("token_id", sa.NUMERIC(precision=78), nullable=False), - sa.Column("token_owner", postgresql.BYTEA(), nullable=True), - sa.Column("token_uri", sa.VARCHAR(), nullable=True), - sa.Column("token_uri_info", postgresql.JSONB(astext_type=sa.Text()), nullable=True), - sa.Column("block_number", sa.BIGINT(), nullable=True), - sa.Column("block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("reorg", sa.BOOLEAN(), nullable=True, server_default=sa.text("false")), - sa.PrimaryKeyConstraint("address", "token_id"), - ) - op.create_index( - "erc721_detail_owner_address_id_index", - "erc721_token_id_details", - [sa.text("token_owner DESC"), "address", "token_id"], - unique=False, - ) - op.create_table( - "erc721_token_transfers", - sa.Column("transaction_hash", postgresql.BYTEA(), nullable=False), - sa.Column("log_index", sa.INTEGER(), nullable=False), - sa.Column("from_address", postgresql.BYTEA(), nullable=True), - sa.Column("to_address", postgresql.BYTEA(), nullable=True), - sa.Column("token_address", postgresql.BYTEA(), nullable=True), - sa.Column("token_id", sa.NUMERIC(precision=78), nullable=True), - sa.Column("token_uri", postgresql.JSONB(astext_type=sa.Text()), nullable=True), - sa.Column("block_number", sa.BIGINT(), nullable=True), - sa.Column("block_hash", postgresql.BYTEA(), nullable=True), - sa.Column("block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("reorg", sa.BOOLEAN(), nullable=True, server_default=sa.text("false")), - sa.PrimaryKeyConstraint("transaction_hash", "log_index"), - ) - op.create_index( - "erc721_token_transfers_address_block_number_log_index_index", - "erc721_token_transfers", - [ - "token_address", - "from_address", - "to_address", - sa.text("block_number DESC"), - sa.text("log_index DESC"), - ], - unique=False, - ) - op.create_index( - "erc721_token_transfers_block_timestamp_index", - "erc721_token_transfers", - [sa.text("block_timestamp DESC")], - unique=False, - ) - op.create_table( - "fix_record", - sa.Column("job_id", sa.INTEGER(), nullable=False), - sa.Column("start_block_number", sa.BIGINT(), nullable=True), - sa.Column("last_fixed_block_number", sa.BIGINT(), nullable=True), - sa.Column("remain_process", sa.INTEGER(), nullable=True), - sa.Column("job_status", sa.VARCHAR(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("job_id"), - ) - op.create_table( - "logs", - sa.Column("log_index", sa.INTEGER(), nullable=False), - sa.Column("address", postgresql.BYTEA(), nullable=True), - sa.Column("data", postgresql.BYTEA(), nullable=True), - sa.Column("topic0", postgresql.BYTEA(), nullable=True), - sa.Column("topic1", postgresql.BYTEA(), nullable=True), - sa.Column("topic2", postgresql.BYTEA(), nullable=True), - sa.Column("topic3", postgresql.BYTEA(), nullable=True), - sa.Column("transaction_hash", postgresql.BYTEA(), nullable=False), - sa.Column("transaction_index", sa.INTEGER(), nullable=True), - sa.Column("block_number", sa.BIGINT(), nullable=True), - sa.Column("block_hash", postgresql.BYTEA(), nullable=True), - sa.Column("block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("reorg", sa.BOOLEAN(), nullable=True, server_default=sa.text("false")), - sa.PrimaryKeyConstraint("log_index", "transaction_hash"), - ) - op.create_index( - "logs_address_block_number_log_index_index", - "logs", - ["address", sa.text("block_number DESC"), sa.text("log_index DESC")], - unique=False, - ) - op.create_index( - "logs_block_timestamp_index", - "logs", - [sa.text("block_timestamp DESC")], - unique=False, - ) - op.create_table( - "sync_record", - sa.Column("mission_type", sa.VARCHAR(), nullable=False), - sa.Column("entity_types", sa.INTEGER(), nullable=False), - sa.Column("last_block_number", sa.BIGINT(), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("mission_type", "entity_types"), - ) - op.create_table( - "tokens", - sa.Column("address", postgresql.BYTEA(), nullable=False), - sa.Column("name", sa.VARCHAR(), nullable=True), - sa.Column("symbol", sa.VARCHAR(), nullable=True), - sa.Column("total_supply", sa.NUMERIC(precision=100), nullable=True), - sa.Column("decimals", sa.NUMERIC(precision=100), nullable=True), - sa.Column("token_type", sa.VARCHAR(), nullable=True), - sa.Column("holder_count", sa.INTEGER(), nullable=True), - sa.Column("transfer_count", sa.INTEGER(), nullable=True), - sa.Column("icon_url", sa.VARCHAR(), nullable=True), - sa.Column("urls", postgresql.JSONB(astext_type=sa.Text()), nullable=True), - sa.Column("volume_24h", sa.NUMERIC(precision=38, scale=2), nullable=True), - sa.Column("price", sa.NUMERIC(precision=38, scale=6), nullable=True), - sa.Column("previous_price", sa.NUMERIC(precision=38, scale=6), nullable=True), - sa.Column("market_cap", sa.NUMERIC(precision=38, scale=2), nullable=True), - sa.Column("on_chain_market_cap", sa.NUMERIC(precision=38, scale=2), nullable=True), - sa.Column("is_verified", sa.BOOLEAN(), nullable=True), - sa.Column("cmc_id", sa.INTEGER(), nullable=True), - sa.Column("cmc_slug", sa.VARCHAR(), nullable=True), - sa.Column("gecko_id", sa.VARCHAR(), nullable=True), - sa.Column("description", sa.VARCHAR(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("address"), - ) - op.create_index("tokens_symbol_index", "tokens", ["symbol"], unique=False) - op.create_index("tokens_type_index", "tokens", ["token_type"], unique=False) - op.create_table( - "traces", - sa.Column("trace_id", sa.VARCHAR(), nullable=False), - sa.Column("from_address", postgresql.BYTEA(), nullable=True), - sa.Column("to_address", postgresql.BYTEA(), nullable=True), - sa.Column("value", sa.NUMERIC(precision=100), nullable=True), - sa.Column("input", postgresql.BYTEA(), nullable=True), - sa.Column("output", postgresql.BYTEA(), nullable=True), - sa.Column("trace_type", sa.VARCHAR(), nullable=True), - sa.Column("call_type", sa.VARCHAR(), nullable=True), - sa.Column("gas", sa.NUMERIC(precision=100), nullable=True), - sa.Column("gas_used", sa.NUMERIC(precision=100), nullable=True), - sa.Column("subtraces", sa.INTEGER(), nullable=True), - sa.Column("trace_address", postgresql.ARRAY(sa.INTEGER()), nullable=True), - sa.Column("error", sa.TEXT(), nullable=True), - sa.Column("status", sa.INTEGER(), nullable=True), - sa.Column("block_number", sa.BIGINT(), nullable=True), - sa.Column("block_hash", postgresql.BYTEA(), nullable=True), - sa.Column("block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("transaction_index", sa.INTEGER(), nullable=True), - sa.Column("transaction_hash", postgresql.BYTEA(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("reorg", sa.BOOLEAN(), nullable=True, server_default=sa.text("false")), - sa.PrimaryKeyConstraint("trace_id"), - ) - op.create_index( - "traces_address_block_timestamp_index", - "traces", - ["from_address", "to_address", sa.text("block_timestamp DESC")], - unique=False, - ) - op.create_index("traces_transaction_hash_index", "traces", ["transaction_hash"], unique=False) - op.create_table( - "transactions", - sa.Column("hash", postgresql.BYTEA(), nullable=False), - sa.Column("transaction_index", sa.INTEGER(), nullable=True), - sa.Column("from_address", postgresql.BYTEA(), nullable=True), - sa.Column("to_address", postgresql.BYTEA(), nullable=True), - sa.Column("value", sa.NUMERIC(precision=100), nullable=True), - sa.Column("transaction_type", sa.INTEGER(), nullable=True), - sa.Column("input", postgresql.BYTEA(), nullable=True), - sa.Column("nonce", sa.INTEGER(), nullable=True), - sa.Column("block_hash", postgresql.BYTEA(), nullable=True), - sa.Column("block_number", sa.BIGINT(), nullable=True), - sa.Column("block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("gas", sa.NUMERIC(precision=100), nullable=True), - sa.Column("gas_price", sa.NUMERIC(precision=100), nullable=True), - sa.Column("max_fee_per_gas", sa.NUMERIC(precision=100), nullable=True), - sa.Column("max_priority_fee_per_gas", sa.NUMERIC(precision=100), nullable=True), - sa.Column("receipt_root", postgresql.BYTEA(), nullable=True), - sa.Column("receipt_status", sa.INTEGER(), nullable=True), - sa.Column("receipt_gas_used", sa.NUMERIC(precision=100), nullable=True), - sa.Column("receipt_cumulative_gas_used", sa.NUMERIC(precision=100), nullable=True), - sa.Column("receipt_effective_gas_price", sa.NUMERIC(precision=100), nullable=True), - sa.Column("receipt_l1_fee", sa.NUMERIC(precision=100), nullable=True), - sa.Column("receipt_l1_fee_scalar", sa.NUMERIC(precision=100, scale=18), nullable=True), - sa.Column("receipt_l1_gas_used", sa.NUMERIC(precision=100), nullable=True), - sa.Column("receipt_l1_gas_price", sa.NUMERIC(precision=100), nullable=True), - sa.Column("receipt_blob_gas_used", sa.NUMERIC(precision=100), nullable=True), - sa.Column("receipt_blob_gas_price", sa.NUMERIC(precision=100), nullable=True), - sa.Column("blob_versioned_hashes", postgresql.ARRAY(postgresql.BYTEA()), nullable=True), - sa.Column("receipt_contract_address", postgresql.BYTEA(), nullable=True), - sa.Column("exist_error", sa.BOOLEAN(), nullable=True), - sa.Column("error", sa.TEXT(), nullable=True), - sa.Column("revert_reason", sa.TEXT(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("reorg", sa.BOOLEAN(), nullable=True, server_default=sa.text("false")), - sa.PrimaryKeyConstraint("hash"), - ) - op.create_index( - "transactions_address_block_number_transaction_idx", - "transactions", - [ - "from_address", - "to_address", - sa.text("block_number DESC"), - sa.text("transaction_index DESC"), - ], - unique=False, - ) - op.create_index( - "transactions_block_timestamp_block_number_index", - "transactions", - [sa.text("block_timestamp DESC"), sa.text("block_number DESC")], - unique=False, - ) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_index("transactions_block_timestamp_block_number_index", table_name="transactions") - op.drop_index("transactions_address_block_number_transaction_idx", table_name="transactions") - op.drop_table("transactions") - op.drop_index("traces_transaction_hash_index", table_name="traces") - op.drop_index("traces_address_block_timestamp_index", table_name="traces") - op.drop_table("traces") - op.drop_index("tokens_type_index", table_name="tokens") - op.drop_index("tokens_symbol_index", table_name="tokens") - op.drop_table("tokens") - op.drop_table("sync_record") - op.drop_index("logs_block_timestamp_index", table_name="logs") - op.drop_index("logs_address_block_number_log_index_index", table_name="logs") - op.drop_table("logs") - op.drop_table("fix_record") - op.drop_index( - "erc721_token_transfers_block_timestamp_index", - table_name="erc721_token_transfers", - ) - op.drop_index( - "erc721_token_transfers_address_block_number_log_index_index", - table_name="erc721_token_transfers", - ) - op.drop_table("erc721_token_transfers") - op.drop_index("erc721_detail_owner_address_id_index", table_name="erc721_token_id_details") - op.drop_table("erc721_token_id_details") - op.drop_index( - "erc721_change_address_id_number_desc_index", - table_name="erc721_token_id_changes", - ) - op.drop_table("erc721_token_id_changes") - op.drop_index( - "erc721_token_holders_token_address_balance_of_index", - table_name="erc721_token_holders", - ) - op.drop_table("erc721_token_holders") - op.drop_index( - "erc20_token_transfers_block_timestamp_index", - table_name="erc20_token_transfers", - ) - op.drop_index( - "erc20_token_transfers_address_block_number_log_index_index", - table_name="erc20_token_transfers", - ) - op.drop_table("erc20_token_transfers") - op.drop_index( - "erc20_token_holders_token_address_balance_of_index", - table_name="erc20_token_holders", - ) - op.drop_table("erc20_token_holders") - op.drop_index( - "erc1155_token_transfers_block_timestamp_index", - table_name="erc1155_token_transfers", - ) - op.drop_index( - "erc1155_token_transfers_address_block_number_log_index_index", - table_name="erc1155_token_transfers", - ) - op.drop_table("erc1155_token_transfers") - op.drop_index("erc1155_detail_desc_address_id_index", table_name="erc1155_token_id_details") - op.drop_table("erc1155_token_id_details") - op.drop_index( - "erc1155_token_holders_token_address_balance_of_index", - table_name="erc1155_token_holders", - ) - op.drop_table("erc1155_token_holders") - op.drop_table("contracts") - op.drop_index( - "internal_transactions_block_timestamp_index", - table_name="contract_internal_transactions", - ) - op.drop_index( - "internal_transactions_address_number_transaction_index", - table_name="contract_internal_transactions", - ) - op.drop_index( - "contract_internal_transactions_transaction_hash_idx", - table_name="contract_internal_transactions", - ) - op.drop_table("contract_internal_transactions") - op.drop_index("blocks_timestamp_index", table_name="blocks") - op.drop_index("blocks_number_index", table_name="blocks") - op.drop_table("blocks") - op.drop_index("block_ts_mapper_idx", table_name="block_ts_mapper") - op.drop_table("block_ts_mapper") - op.drop_table("address_token_balances") - op.drop_table("address_coin_balances") - # ### end Alembic commands ### diff --git a/hemera/migrations/versions/20240708_tokens_table_add_column_block_number.py b/hemera/migrations/versions/20240708_tokens_table_add_column_block_number.py deleted file mode 100644 index 2fde3feaf..000000000 --- a/hemera/migrations/versions/20240708_tokens_table_add_column_block_number.py +++ /dev/null @@ -1,30 +0,0 @@ -"""tokens table add column 'block_number' - -Revision ID: 8a915490914a -Revises: 5e4608933f64 -Create Date: 2024-07-08 21:57:41.134980 - -""" - -from typing import Sequence, Union - -import sqlalchemy as sa -from alembic import op - -# revision identifiers, used by Alembic. -revision: str = "8a915490914a" -down_revision: Union[str, None] = "5e4608933f64" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.add_column("tokens", sa.Column("block_number", sa.BIGINT(), nullable=True)) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_column("tokens", "block_number") - # ### end Alembic commands ### diff --git a/hemera/migrations/versions/20240716_add_api_server_table.py b/hemera/migrations/versions/20240716_add_api_server_table.py deleted file mode 100644 index d9440e6a2..000000000 --- a/hemera/migrations/versions/20240716_add_api_server_table.py +++ /dev/null @@ -1,145 +0,0 @@ -"""add api server table - -Revision ID: b15f744e8582 -Revises: 8a915490914a -Create Date: 2024-07-16 14:16:26.060954 - -""" - -from typing import Sequence, Union - -import sqlalchemy as sa -from alembic import op -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision: str = "b15f744e8582" -down_revision: Union[str, None] = "8a915490914a" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table( - "daily_addresses_aggregates", - sa.Column("block_date", sa.DATE(), nullable=False), - sa.Column("active_address_cnt", sa.BIGINT(), nullable=True), - sa.Column("receiver_address_cnt", sa.BIGINT(), nullable=True), - sa.Column("sender_address_cnt", sa.BIGINT(), nullable=True), - sa.Column("total_address_cnt", sa.BIGINT(), nullable=True), - sa.Column("new_address_cnt", sa.BIGINT(), nullable=True), - sa.PrimaryKeyConstraint("block_date"), - ) - op.create_table( - "daily_blocks_aggregates", - sa.Column("block_date", sa.DATE(), nullable=False), - sa.Column("cnt", sa.BIGINT(), nullable=True), - sa.Column("avg_size", sa.NUMERIC(), nullable=True), - sa.Column("avg_gas_limit", sa.NUMERIC(), nullable=True), - sa.Column("avg_gas_used", sa.NUMERIC(), nullable=True), - sa.Column("total_gas_used", sa.BIGINT(), nullable=True), - sa.Column("avg_gas_used_percentage", sa.NUMERIC(), nullable=True), - sa.Column("avg_txn_cnt", sa.NUMERIC(), nullable=True), - sa.Column("total_cnt", sa.BIGINT(), nullable=True), - sa.Column("block_interval", sa.NUMERIC(), nullable=True), - sa.PrimaryKeyConstraint("block_date"), - ) - op.create_table( - "daily_tokens_aggregates", - sa.Column("block_date", sa.DATE(), nullable=False), - sa.Column("erc20_active_address_cnt", sa.INTEGER(), nullable=True), - sa.Column("erc20_total_transfer_cnt", sa.BIGINT(), nullable=True), - sa.Column("erc721_active_address_cnt", sa.INTEGER(), nullable=True), - sa.Column("erc721_total_transfer_cnt", sa.BIGINT(), nullable=True), - sa.Column("erc1155_active_address_cnt", sa.INTEGER(), nullable=True), - sa.Column("erc1155_total_transfer_cnt", sa.BIGINT(), nullable=True), - sa.PrimaryKeyConstraint("block_date"), - ) - op.create_table( - "daily_transactions_aggregates", - sa.Column("block_date", sa.DATE(), nullable=False), - sa.Column("cnt", sa.BIGINT(), nullable=True), - sa.Column("total_cnt", sa.BIGINT(), nullable=True), - sa.Column("txn_error_cnt", sa.BIGINT(), nullable=True), - sa.Column("avg_transaction_fee", sa.NUMERIC(), nullable=True), - sa.Column("avg_gas_price", sa.NUMERIC(), nullable=True), - sa.Column("max_gas_price", sa.NUMERIC(), nullable=True), - sa.Column("min_gas_price", sa.NUMERIC(), nullable=True), - sa.Column("avg_receipt_l1_fee", sa.NUMERIC(), nullable=True), - sa.Column("max_receipt_l1_fee", sa.NUMERIC(), nullable=True), - sa.Column("min_receipt_l1_fee", sa.NUMERIC(), nullable=True), - sa.Column("avg_receipt_l1_gas_used", sa.NUMERIC(), nullable=True), - sa.Column("max_receipt_l1_gas_used", sa.NUMERIC(), nullable=True), - sa.Column("min_receipt_l1_gas_used", sa.NUMERIC(), nullable=True), - sa.Column("avg_receipt_l1_gas_price", sa.NUMERIC(), nullable=True), - sa.Column("max_receipt_l1_gas_price", sa.NUMERIC(), nullable=True), - sa.Column("min_receipt_l1_gas_price", sa.NUMERIC(), nullable=True), - sa.Column("avg_receipt_l1_fee_scalar", sa.NUMERIC(), nullable=True), - sa.Column("max_receipt_l1_fee_scalar", sa.NUMERIC(), nullable=True), - sa.Column("min_receipt_l1_fee_scalar", sa.NUMERIC(), nullable=True), - sa.PrimaryKeyConstraint("block_date"), - ) - op.create_table( - "scheduled_token_count_metadata", - sa.Column("id", sa.INTEGER(), nullable=False), - sa.Column("dag_id", sa.VARCHAR(), nullable=True), - sa.Column("execution_date", sa.DateTime(), nullable=True), - sa.Column("last_data_timestamp", sa.DateTime(), nullable=True), - sa.PrimaryKeyConstraint("id"), - ) - op.create_table( - "scheduled_wallet_count_metadata", - sa.Column("id", sa.INTEGER(), nullable=False), - sa.Column("dag_id", sa.VARCHAR(), nullable=True), - sa.Column("execution_date", sa.DateTime(), nullable=True), - sa.Column("last_data_timestamp", sa.DateTime(), nullable=True), - sa.PrimaryKeyConstraint("id"), - ) - op.create_table( - "statistics_wallet_addresses", - sa.Column("address", postgresql.BYTEA(), nullable=False), - sa.Column("txn_in_cnt", sa.INTEGER(), nullable=True), - sa.Column("txn_out_cnt", sa.INTEGER(), nullable=True), - sa.Column("txn_in_value", sa.NUMERIC(precision=78), nullable=True), - sa.Column("txn_out_value", sa.NUMERIC(precision=78), nullable=True), - sa.Column("internal_txn_in_cnt", sa.INTEGER(), nullable=True), - sa.Column("internal_txn_out_cnt", sa.INTEGER(), nullable=True), - sa.Column("internal_txn_in_value", sa.NUMERIC(precision=78), nullable=True), - sa.Column("internal_txn_out_value", sa.NUMERIC(precision=78), nullable=True), - sa.Column("erc20_transfer_in_cnt", sa.INTEGER(), nullable=True), - sa.Column("erc721_transfer_in_cnt", sa.INTEGER(), nullable=True), - sa.Column("erc1155_transfer_in_cnt", sa.INTEGER(), nullable=True), - sa.Column("erc20_transfer_out_cnt", sa.INTEGER(), nullable=True), - sa.Column("erc721_transfer_out_cnt", sa.INTEGER(), nullable=True), - sa.Column("erc1155_transfer_out_cnt", sa.INTEGER(), nullable=True), - sa.Column("txn_cnt", sa.INTEGER(), nullable=True), - sa.Column("internal_txn_cnt", sa.INTEGER(), nullable=True), - sa.Column("erc20_transfer_cnt", sa.INTEGER(), nullable=True), - sa.Column("erc721_transfer_cnt", sa.INTEGER(), nullable=True), - sa.Column("erc1155_transfer_cnt", sa.INTEGER(), nullable=True), - sa.Column("deposit_cnt", sa.INTEGER(), nullable=True), - sa.Column("withdraw_cnt", sa.INTEGER(), nullable=True), - sa.Column("tag", sa.VARCHAR(), nullable=True), - sa.PrimaryKeyConstraint("address"), - ) - op.create_table( - "wallet_addresses", - sa.Column("address", postgresql.BYTEA(), nullable=False), - sa.Column("ens_name", sa.VARCHAR(), nullable=True), - sa.PrimaryKeyConstraint("address"), - ) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table("scheduled_wallet_count_metadata") - op.drop_table("scheduled_token_count_metadata") - op.drop_table("daily_transactions_aggregates") - op.drop_table("daily_tokens_aggregates") - op.drop_table("daily_blocks_aggregates") - op.drop_table("daily_addresses_aggregates") - op.drop_table("wallet_addresses") - op.drop_table("statistics_wallet_addresses") - # ### end Alembic commands ### diff --git a/hemera/migrations/versions/20240725_update_index_table_optimize.py b/hemera/migrations/versions/20240725_update_index_table_optimize.py deleted file mode 100644 index 472a55cbd..000000000 --- a/hemera/migrations/versions/20240725_update_index_table_optimize.py +++ /dev/null @@ -1,865 +0,0 @@ -"""update index & table optimize - -Revision ID: 9f2cf385645f -Revises: b15f744e8582 -Create Date: 2024-07-25 10:53:54.958874 - -""" - -from typing import Sequence, Union - -import sqlalchemy as sa -from alembic import op -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision: str = "9f2cf385645f" -down_revision: Union[str, None] = "b15f744e8582" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table( - "address_current_token_balances", - sa.Column("address", postgresql.BYTEA(), nullable=False), - sa.Column("token_id", sa.NUMERIC(precision=78), nullable=True), - sa.Column("token_type", sa.VARCHAR(), nullable=True), - sa.Column("token_address", postgresql.BYTEA(), nullable=False), - sa.Column("balance", sa.NUMERIC(precision=100), nullable=True), - sa.Column("block_number", sa.BIGINT(), nullable=True), - sa.Column("block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("reorg", sa.BOOLEAN(), nullable=True, server_default=sa.text("false")), - sa.PrimaryKeyConstraint("address", "token_address", "token_id"), - ) - op.create_index( - "current_token_balances_token_address_balance_of_index", - "address_current_token_balances", - ["token_address", sa.text("balance DESC")], - unique=False, - ) - op.create_index( - "current_token_balances_token_address_id_balance_of_index", - "address_current_token_balances", - ["token_address", "token_id", sa.text("balance DESC")], - unique=False, - ) - op.drop_index( - "erc721_token_holders_token_address_balance_of_index", - table_name="erc721_token_holders", - ) - op.drop_table("erc721_token_holders") - op.drop_index( - "erc20_token_holders_token_address_balance_of_index", - table_name="erc20_token_holders", - ) - op.drop_table("erc20_token_holders") - op.drop_table("wallet_addresses") - op.drop_index( - "erc1155_token_holders_token_address_balance_of_index", - table_name="erc1155_token_holders", - ) - op.drop_table("erc1155_token_holders") - op.create_index( - "coin_balance_address_number_desc_index", - "address_coin_balances", - [sa.text("address DESC"), sa.text("block_number DESC")], - unique=False, - ) - op.create_index( - "token_balance_address_id_number_index", - "address_token_balances", - [ - "address", - "token_address", - sa.text("token_id DESC"), - sa.text("block_number DESC"), - ], - unique=False, - ) - op.add_column("blocks", sa.Column("blob_gas_used", sa.NUMERIC(precision=100), nullable=True)) - op.add_column("blocks", sa.Column("excess_blob_gas", sa.NUMERIC(precision=100), nullable=True)) - op.add_column("blocks", sa.Column("traces_count", sa.BIGINT(), nullable=True)) - op.add_column("blocks", sa.Column("internal_transactions_count", sa.BIGINT(), nullable=True)) - op.create_index( - "blocks_hash_unique_when_not_reorg", - "blocks", - ["hash"], - unique=True, - postgresql_where=sa.text("reorg = false"), - ) - op.create_index( - "blocks_number_unique_when_not_reorg", - "blocks", - ["number"], - unique=True, - postgresql_where=sa.text("reorg = false"), - ) - op.drop_index( - "internal_transactions_address_number_transaction_index", - table_name="contract_internal_transactions", - ) - op.drop_index( - "internal_transactions_block_timestamp_index", - table_name="contract_internal_transactions", - ) - op.create_index( - "internal_transactions_block_number_index", - "contract_internal_transactions", - [sa.text("block_number DESC")], - unique=False, - ) - op.create_index( - "internal_transactions_from_address_number_transaction_index", - "contract_internal_transactions", - [ - "from_address", - sa.text("block_number DESC"), - sa.text("transaction_index DESC"), - ], - unique=False, - ) - op.create_index( - "internal_transactions_number_transaction_index", - "contract_internal_transactions", - [sa.text("block_number DESC"), sa.text("transaction_index DESC")], - unique=False, - ) - op.create_index( - "internal_transactions_to_address_number_transaction_index", - "contract_internal_transactions", - ["to_address", sa.text("block_number DESC"), sa.text("transaction_index DESC")], - unique=False, - ) - op.drop_index("erc1155_detail_desc_address_id_index", table_name="erc1155_token_id_details") - op.drop_constraint("erc1155_token_id_details_pkey", "erc1155_token_id_details", type_="primary") - op.alter_column("erc1155_token_id_details", "address", new_column_name="token_address") - op.create_index( - "erc1155_detail_desc_address_id_index", - "erc1155_token_id_details", - [sa.text("token_address DESC"), "token_id"], - unique=False, - ) - op.create_primary_key( - "erc1155_token_id_details_pkey", - "erc1155_token_id_details", - ["token_address", "token_id"], - ) - op.alter_column( - "erc1155_token_transfers", - "token_id", - existing_type=sa.NUMERIC(precision=78, scale=0), - nullable=False, - ) - op.alter_column( - "erc1155_token_transfers", - "block_hash", - existing_type=postgresql.BYTEA(), - nullable=False, - ) - op.drop_index( - "erc1155_token_transfers_address_block_number_log_index_index", - table_name="erc1155_token_transfers", - ) - op.drop_index( - "erc1155_token_transfers_block_timestamp_index", - table_name="erc1155_token_transfers", - ) - op.create_index( - "erc1155_token_transfers_from_address_number_log_index_index", - "erc1155_token_transfers", - ["from_address", sa.text("block_number DESC"), sa.text("log_index DESC")], - unique=False, - ) - op.create_index( - "erc1155_token_transfers_number_log_index", - "erc1155_token_transfers", - [sa.text("block_number DESC"), sa.text("log_index DESC")], - unique=False, - ) - op.create_index( - "erc1155_token_transfers_to_address_number_log_index_index", - "erc1155_token_transfers", - ["to_address", sa.text("block_number DESC"), sa.text("log_index DESC")], - unique=False, - ) - op.create_index( - "erc1155_token_transfers_token_address_from_index", - "erc1155_token_transfers", - ["token_address", "from_address"], - unique=False, - ) - op.create_index( - "erc1155_token_transfers_token_address_id_index", - "erc1155_token_transfers", - ["token_address", "token_id"], - unique=False, - ) - op.create_index( - "erc1155_token_transfers_token_address_number_log_index_index", - "erc1155_token_transfers", - ["token_address", sa.text("block_number DESC"), sa.text("log_index DESC")], - unique=False, - ) - op.create_index( - "erc1155_token_transfers_token_address_to_index", - "erc1155_token_transfers", - ["token_address", "to_address"], - unique=False, - ) - op.drop_constraint("erc1155_token_transfers_pkey", "erc1155_token_transfers", type_="primary") - op.create_primary_key( - "erc1155_token_transfers_pkey", - "erc1155_token_transfers", - ["transaction_hash", "block_hash", "log_index", "token_id"], - ) - op.alter_column( - "erc20_token_transfers", - "block_hash", - existing_type=postgresql.BYTEA(), - nullable=False, - ) - op.drop_index( - "erc20_token_transfers_address_block_number_log_index_index", - table_name="erc20_token_transfers", - ) - op.drop_index( - "erc20_token_transfers_block_timestamp_index", - table_name="erc20_token_transfers", - ) - op.create_index( - "erc20_token_transfers_from_address_number_log_index_index", - "erc20_token_transfers", - ["from_address", sa.text("block_number DESC"), sa.text("log_index DESC")], - unique=False, - ) - op.create_index( - "erc20_token_transfers_number_log_index", - "erc20_token_transfers", - [sa.text("block_number DESC"), sa.text("log_index DESC")], - unique=False, - ) - op.create_index( - "erc20_token_transfers_to_address_number_log_index_index", - "erc20_token_transfers", - ["to_address", sa.text("block_number DESC"), sa.text("log_index DESC")], - unique=False, - ) - op.create_index( - "erc20_token_transfers_token_address_from_index_index", - "erc20_token_transfers", - ["token_address", "from_address"], - unique=False, - ) - op.create_index( - "erc20_token_transfers_token_address_number_log_index_index", - "erc20_token_transfers", - ["token_address", sa.text("block_number DESC"), sa.text("log_index DESC")], - unique=False, - ) - op.create_index( - "erc20_token_transfers_token_address_to_index_index", - "erc20_token_transfers", - ["token_address", "to_address"], - unique=False, - ) - - op.drop_index( - "erc721_change_address_id_number_desc_index", - table_name="erc721_token_id_changes", - ) - op.drop_constraint("erc721_token_id_changes_pkey", "erc721_token_id_changes", type_="primary") - op.alter_column("erc721_token_id_changes", "address", new_column_name="token_address") - op.create_index( - "erc721_change_address_id_number_desc_index", - "erc721_token_id_changes", - ["token_address", "token_id", sa.text("block_number DESC")], - unique=False, - ) - op.create_primary_key( - "erc721_token_id_changes_pkey", - "erc721_token_id_changes", - ["token_address", "token_id", "block_number"], - ) - - op.drop_index("erc721_detail_owner_address_id_index", table_name="erc721_token_id_details") - op.drop_constraint("erc721_token_id_details_pkey", "erc721_token_id_details", type_="primary") - op.alter_column("erc721_token_id_details", "address", new_column_name="token_address") - op.create_index( - "erc721_detail_owner_address_id_index", - "erc721_token_id_details", - [sa.text("token_owner DESC"), "token_address", "token_id"], - unique=False, - ) - op.create_primary_key( - "erc721_token_id_details_pkey", - "erc721_token_id_details", - ["token_address", "token_id"], - ) - - op.alter_column( - "erc721_token_transfers", - "block_hash", - existing_type=postgresql.BYTEA(), - nullable=False, - ) - op.drop_index( - "erc721_token_transfers_address_block_number_log_index_index", - table_name="erc721_token_transfers", - ) - op.create_index( - "erc721_token_transfers_from_address_number_log_index_index", - "erc721_token_transfers", - ["from_address", sa.text("block_number DESC"), sa.text("log_index DESC")], - unique=False, - ) - op.create_index( - "erc721_token_transfers_number_log_index", - "erc721_token_transfers", - [sa.text("block_number DESC"), sa.text("log_index DESC")], - unique=False, - ) - op.create_index( - "erc721_token_transfers_to_address_number_log_index_index", - "erc721_token_transfers", - ["to_address", sa.text("block_number DESC"), sa.text("log_index DESC")], - unique=False, - ) - op.create_index( - "erc721_token_transfers_token_address_from_index", - "erc721_token_transfers", - ["token_address", "from_address"], - unique=False, - ) - op.create_index( - "erc721_token_transfers_token_address_id_index", - "erc721_token_transfers", - ["token_address", "token_id"], - unique=False, - ) - op.create_index( - "erc721_token_transfers_token_address_number_log_index_index", - "erc721_token_transfers", - ["token_address", sa.text("block_number DESC"), sa.text("log_index DESC")], - unique=False, - ) - op.create_index( - "erc721_token_transfers_token_address_to_index", - "erc721_token_transfers", - ["token_address", "to_address"], - unique=False, - ) - op.drop_column("erc721_token_transfers", "token_uri") - op.alter_column("logs", "block_hash", existing_type=postgresql.BYTEA(), nullable=False) - op.create_index( - "logs_address_topic_0_number_log_index_index", - "logs", - ["address", "topic0", sa.text("block_number DESC"), sa.text("log_index DESC")], - unique=False, - ) - op.create_index( - "logs_block_number_log_index_index", - "logs", - [sa.text("block_number DESC"), sa.text("log_index DESC")], - unique=False, - ) - op.create_index("tokens_name_index", "tokens", ["name"], unique=False) - op.create_index( - "tokens_type_holders_index", - "tokens", - ["token_type", sa.text("holder_count DESC")], - unique=False, - ) - op.create_index( - "tokens_type_on_chain_market_cap_index", - "tokens", - ["token_type", sa.text("on_chain_market_cap DESC")], - unique=False, - ) - op.drop_index("traces_address_block_timestamp_index", table_name="traces") - op.create_index( - "traces_block_number_index", - "traces", - [sa.text("block_number DESC")], - unique=False, - ) - op.create_index( - "traces_from_address_block_number_index", - "traces", - ["from_address", sa.text("block_number DESC")], - unique=False, - ) - op.create_index( - "traces_to_address_block_number_index", - "traces", - ["to_address", sa.text("block_number DESC")], - unique=False, - ) - op.add_column( - "transactions", - sa.Column( - "method_id", - sa.VARCHAR(), - sa.Computed( - "substr(input :: pg_catalog.varchar, 3, 8)", - ), - nullable=True, - ), - ) - op.drop_index("transactions_address_block_number_transaction_idx", table_name="transactions") - op.drop_index("transactions_block_timestamp_block_number_index", table_name="transactions") - op.create_index( - "transactions_block_number_transaction_index", - "transactions", - [sa.text("block_number DESC"), sa.text("transaction_index DESC")], - unique=False, - ) - op.create_index( - "transactions_block_timestamp_index", - "transactions", - ["block_timestamp"], - unique=False, - ) - op.create_index( - "transactions_from_address_block_number_transaction_idx", - "transactions", - [ - sa.text("from_address ASC"), - sa.text("block_number DESC"), - sa.text("transaction_index DESC"), - ], - unique=False, - ) - op.create_index( - "transactions_to_address_block_number_transaction_idx", - "transactions", - [ - sa.text("to_address ASC"), - sa.text("block_number DESC"), - sa.text("transaction_index DESC"), - ], - unique=False, - ) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_index( - "transactions_to_address_block_number_transaction_idx", - table_name="transactions", - ) - op.drop_index( - "transactions_from_address_block_number_transaction_idx", - table_name="transactions", - ) - op.drop_index("transactions_block_timestamp_index", table_name="transactions") - op.drop_index("transactions_block_number_transaction_index", table_name="transactions") - op.create_index( - "transactions_block_timestamp_block_number_index", - "transactions", - [sa.text("block_timestamp DESC"), sa.text("block_number DESC")], - unique=False, - ) - op.create_index( - "transactions_address_block_number_transaction_idx", - "transactions", - [ - "from_address", - "to_address", - sa.text("block_number DESC"), - sa.text("transaction_index DESC"), - ], - unique=False, - ) - op.drop_column("transactions", "method_id") - op.drop_index("traces_to_address_block_number_index", table_name="traces") - op.drop_index("traces_from_address_block_number_index", table_name="traces") - op.drop_index("traces_block_number_index", table_name="traces") - op.create_index( - "traces_address_block_timestamp_index", - "traces", - ["from_address", "to_address", sa.text("block_timestamp DESC")], - unique=False, - ) - op.drop_index("tokens_type_on_chain_market_cap_index", table_name="tokens") - op.drop_index("tokens_type_holders_index", table_name="tokens") - op.drop_index("tokens_name_index", table_name="tokens") - op.drop_index("logs_block_number_log_index_index", table_name="logs") - op.drop_index("logs_address_topic_0_number_log_index_index", table_name="logs") - op.alter_column("logs", "block_hash", existing_type=postgresql.BYTEA(), nullable=True) - op.add_column( - "erc721_token_transfers", - sa.Column( - "token_uri", - postgresql.JSONB(astext_type=sa.Text()), - autoincrement=False, - nullable=True, - ), - ) - op.drop_index( - "erc721_token_transfers_token_address_to_index", - table_name="erc721_token_transfers", - ) - op.drop_index( - "erc721_token_transfers_token_address_number_log_index_index", - table_name="erc721_token_transfers", - ) - op.drop_index( - "erc721_token_transfers_token_address_id_index", - table_name="erc721_token_transfers", - ) - op.drop_index( - "erc721_token_transfers_token_address_from_index", - table_name="erc721_token_transfers", - ) - op.drop_index( - "erc721_token_transfers_to_address_number_log_index_index", - table_name="erc721_token_transfers", - ) - op.drop_index("erc721_token_transfers_number_log_index", table_name="erc721_token_transfers") - op.drop_index( - "erc721_token_transfers_from_address_number_log_index_index", - table_name="erc721_token_transfers", - ) - op.create_index( - "erc721_token_transfers_address_block_number_log_index_index", - "erc721_token_transfers", - [ - "token_address", - "from_address", - "to_address", - sa.text("block_number DESC"), - sa.text("log_index DESC"), - ], - unique=False, - ) - op.alter_column( - "erc721_token_transfers", - "block_hash", - existing_type=postgresql.BYTEA(), - nullable=True, - ) - - op.drop_index("erc721_detail_owner_address_id_index", table_name="erc721_token_id_details") - op.drop_constraint("erc721_token_id_details_pkey", "erc721_token_id_details", type_="primary") - op.alter_column("erc721_token_id_details", "token_address", new_column_name="address") - op.create_index( - "erc721_detail_owner_address_id_index", - "erc721_token_id_details", - [sa.text("token_owner DESC"), "address", "token_id"], - unique=False, - ) - op.create_primary_key( - "erc721_token_id_details_pkey", - "erc721_token_id_details", - ["address", "token_id"], - ) - - op.drop_index( - "erc721_change_address_id_number_desc_index", - table_name="erc721_token_id_changes", - ) - op.drop_constraint("erc721_token_id_changes_pkey", "erc721_token_id_changes", type_="primary") - op.alter_column("erc721_token_id_changes", "token_address", new_column_name="address") - op.create_index( - "erc721_change_address_id_number_desc_index", - "erc721_token_id_changes", - ["address", "token_id", sa.text("block_number DESC")], - unique=False, - ) - op.create_primary_key( - "erc721_token_id_changes_pkey", - "erc721_token_id_changes", - ["address", "token_id", "block_number"], - ) - - op.drop_index( - "erc20_token_transfers_token_address_to_index_index", - table_name="erc20_token_transfers", - ) - op.drop_index( - "erc20_token_transfers_token_address_number_log_index_index", - table_name="erc20_token_transfers", - ) - op.drop_index( - "erc20_token_transfers_token_address_from_index_index", - table_name="erc20_token_transfers", - ) - op.drop_index( - "erc20_token_transfers_to_address_number_log_index_index", - table_name="erc20_token_transfers", - ) - op.drop_index("erc20_token_transfers_number_log_index", table_name="erc20_token_transfers") - op.drop_index( - "erc20_token_transfers_from_address_number_log_index_index", - table_name="erc20_token_transfers", - ) - op.create_index( - "erc20_token_transfers_block_timestamp_index", - "erc20_token_transfers", - [sa.text("block_timestamp DESC")], - unique=False, - ) - op.create_index( - "erc20_token_transfers_address_block_number_log_index_index", - "erc20_token_transfers", - [ - "token_address", - "from_address", - "to_address", - sa.text("block_number DESC"), - sa.text("log_index DESC"), - ], - unique=False, - ) - op.alter_column( - "erc20_token_transfers", - "block_hash", - existing_type=postgresql.BYTEA(), - nullable=True, - ) - op.drop_index( - "erc1155_token_transfers_token_address_to_index", - table_name="erc1155_token_transfers", - ) - op.drop_index( - "erc1155_token_transfers_token_address_number_log_index_index", - table_name="erc1155_token_transfers", - ) - op.drop_index( - "erc1155_token_transfers_token_address_id_index", - table_name="erc1155_token_transfers", - ) - op.drop_index( - "erc1155_token_transfers_token_address_from_index", - table_name="erc1155_token_transfers", - ) - op.drop_index( - "erc1155_token_transfers_to_address_number_log_index_index", - table_name="erc1155_token_transfers", - ) - op.drop_index("erc1155_token_transfers_number_log_index", table_name="erc1155_token_transfers") - op.drop_index( - "erc1155_token_transfers_from_address_number_log_index_index", - table_name="erc1155_token_transfers", - ) - op.create_index( - "erc1155_token_transfers_block_timestamp_index", - "erc1155_token_transfers", - [sa.text("block_timestamp DESC")], - unique=False, - ) - op.create_index( - "erc1155_token_transfers_address_block_number_log_index_index", - "erc1155_token_transfers", - [ - "token_address", - "from_address", - "to_address", - sa.text("block_number DESC"), - sa.text("log_index DESC"), - ], - unique=False, - ) - op.alter_column( - "erc1155_token_transfers", - "block_hash", - existing_type=postgresql.BYTEA(), - nullable=True, - ) - op.alter_column( - "erc1155_token_transfers", - "token_id", - existing_type=sa.NUMERIC(precision=78, scale=0), - nullable=True, - ) - op.drop_constraint("erc1155_token_transfers_pkey", "erc1155_token_transfers", type_="primary") - op.create_primary_key( - "erc1155_token_transfers_pkey", - "erc1155_token_transfers", - ["transaction_hash", "log_index"], - ) - op.drop_index("erc1155_detail_desc_address_id_index", table_name="erc1155_token_id_details") - op.drop_constraint("erc1155_token_id_details_pkey", "erc1155_token_id_details", type_="primary") - op.alter_column("erc1155_token_id_details", "token_address", new_column_name="address") - op.create_index( - "erc1155_detail_desc_address_id_index", - "erc1155_token_id_details", - [sa.text("address DESC"), "token_id"], - unique=False, - ) - op.create_primary_key( - "erc1155_token_id_details_pkey", - "erc1155_token_id_details", - ["address", "token_id"], - ) - op.drop_index( - "internal_transactions_to_address_number_transaction_index", - table_name="contract_internal_transactions", - ) - op.drop_index( - "internal_transactions_number_transaction_index", - table_name="contract_internal_transactions", - ) - op.drop_index( - "internal_transactions_from_address_number_transaction_index", - table_name="contract_internal_transactions", - ) - op.drop_index( - "internal_transactions_block_number_index", - table_name="contract_internal_transactions", - ) - op.create_index( - "internal_transactions_block_timestamp_index", - "contract_internal_transactions", - [sa.text("block_timestamp DESC")], - unique=False, - ) - op.create_index( - "internal_transactions_address_number_transaction_index", - "contract_internal_transactions", - [ - "from_address", - "to_address", - sa.text("block_number DESC"), - sa.text("transaction_index DESC"), - ], - unique=False, - ) - op.drop_index( - "blocks_number_unique_when_not_reorg", - table_name="blocks", - postgresql_where=sa.text("reorg = false"), - ) - op.drop_index( - "blocks_hash_unique_when_not_reorg", - table_name="blocks", - postgresql_where=sa.text("reorg = false"), - ) - op.drop_column("blocks", "internal_transactions_count") - op.drop_column("blocks", "traces_count") - op.drop_column("blocks", "excess_blob_gas") - op.drop_column("blocks", "blob_gas_used") - op.drop_index("token_balance_address_id_number_index", table_name="address_token_balances") - op.drop_index("coin_balance_address_number_desc_index", table_name="address_coin_balances") - op.create_table( - "erc1155_token_holders", - sa.Column("token_address", postgresql.BYTEA(), autoincrement=False, nullable=False), - sa.Column("wallet_address", postgresql.BYTEA(), autoincrement=False, nullable=False), - sa.Column( - "token_id", - sa.NUMERIC(precision=78, scale=0), - autoincrement=False, - nullable=False, - ), - sa.Column( - "balance_of", - sa.NUMERIC(precision=100, scale=0), - autoincrement=False, - nullable=True, - ), - sa.Column( - "latest_call_contract_time", - postgresql.TIMESTAMP(), - autoincrement=False, - nullable=True, - ), - sa.Column("block_number", sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column( - "block_timestamp", - postgresql.TIMESTAMP(), - autoincrement=False, - nullable=True, - ), - sa.Column("create_time", postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column("reorg", sa.BOOLEAN(), nullable=True, server_default=sa.text("false")), - sa.PrimaryKeyConstraint( - "token_address", - "wallet_address", - "token_id", - name="erc1155_token_holders_pkey", - ), - ) - op.create_index( - "erc1155_token_holders_token_address_balance_of_index", - "erc1155_token_holders", - ["token_address", sa.text("balance_of DESC")], - unique=False, - ) - op.create_table( - "wallet_addresses", - sa.Column("address", postgresql.BYTEA(), autoincrement=False, nullable=False), - sa.Column("ens_name", sa.VARCHAR(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint("address", name="wallet_addresses_pkey"), - ) - op.create_table( - "erc20_token_holders", - sa.Column("token_address", postgresql.BYTEA(), autoincrement=False, nullable=False), - sa.Column("wallet_address", postgresql.BYTEA(), autoincrement=False, nullable=False), - sa.Column( - "balance_of", - sa.NUMERIC(precision=100, scale=0), - autoincrement=False, - nullable=True, - ), - sa.Column("block_number", sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column( - "block_timestamp", - postgresql.TIMESTAMP(), - autoincrement=False, - nullable=True, - ), - sa.Column("create_time", postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column("reorg", sa.BOOLEAN(), nullable=True, server_default=sa.text("false")), - sa.PrimaryKeyConstraint("token_address", "wallet_address", name="erc20_token_holders_pkey"), - ) - op.create_index( - "erc20_token_holders_token_address_balance_of_index", - "erc20_token_holders", - ["token_address", sa.text("balance_of DESC")], - unique=False, - ) - op.create_table( - "erc721_token_holders", - sa.Column("token_address", postgresql.BYTEA(), autoincrement=False, nullable=False), - sa.Column("wallet_address", postgresql.BYTEA(), autoincrement=False, nullable=False), - sa.Column( - "balance_of", - sa.NUMERIC(precision=100, scale=0), - autoincrement=False, - nullable=True, - ), - sa.Column("block_number", sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column( - "block_timestamp", - postgresql.TIMESTAMP(), - autoincrement=False, - nullable=True, - ), - sa.Column("create_time", postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column("reorg", sa.BOOLEAN(), nullable=True, server_default=sa.text("false")), - sa.PrimaryKeyConstraint("token_address", "wallet_address", name="erc721_token_holders_pkey"), - ) - op.create_index( - "erc721_token_holders_token_address_balance_of_index", - "erc721_token_holders", - ["token_address", sa.text("balance_of DESC")], - unique=False, - ) - op.drop_index( - "current_token_balances_token_address_id_balance_of_index", - table_name="address_current_token_balances", - ) - op.drop_index( - "current_token_balances_token_address_balance_of_index", - table_name="address_current_token_balances", - ) - op.drop_table("address_current_token_balances") - # ### end Alembic commands ### diff --git a/hemera/migrations/versions/20240726_modify_sync_record_table.py b/hemera/migrations/versions/20240726_modify_sync_record_table.py deleted file mode 100644 index 4b60effcb..000000000 --- a/hemera/migrations/versions/20240726_modify_sync_record_table.py +++ /dev/null @@ -1,46 +0,0 @@ -"""modify sync_record table - -Revision ID: 0b922153e040 -Revises: 9f2cf385645f -Create Date: 2024-07-26 19:35:46.987343 - -""" - -from typing import Sequence, Union - -import sqlalchemy as sa -from alembic import op -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision: str = "0b922153e040" -down_revision: Union[str, None] = "9f2cf385645f" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table("sync_record") - op.create_table( - "sync_record", - sa.Column("mission_sign", sa.VARCHAR(), nullable=False), - sa.Column("last_block_number", sa.BIGINT(), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("mission_sign"), - ) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table("sync_record") - op.create_table( - "sync_record", - sa.Column("mission_type", sa.VARCHAR(), nullable=False), - sa.Column("entity_types", sa.INTEGER(), nullable=False), - sa.Column("last_block_number", sa.BIGINT(), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("mission_type", "entity_types"), - ) - # ### end Alembic commands ### diff --git a/hemera/migrations/versions/20240731_add_feature_records_and_uniswap_v3_.py b/hemera/migrations/versions/20240731_add_feature_records_and_uniswap_v3_.py deleted file mode 100644 index 78308698c..000000000 --- a/hemera/migrations/versions/20240731_add_feature_records_and_uniswap_v3_.py +++ /dev/null @@ -1,85 +0,0 @@ -"""add feature records and uniswap v3 feature - -Revision ID: 3d5ce8939570 -Revises: 0b922153e040 -Create Date: 2024-07-31 10:37:12.548772 - -""" - -from typing import Sequence, Union - -import sqlalchemy as sa -from alembic import op -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision: str = "3d5ce8939570" -down_revision: Union[str, None] = "0b922153e040" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table( - "all_feature_value_records", - sa.Column("feature_id", sa.NUMERIC(precision=100), nullable=False), - sa.Column("block_number", sa.BIGINT(), nullable=False), - sa.Column("address", postgresql.BYTEA(), nullable=False), - sa.Column("value", postgresql.JSONB(astext_type=sa.Text()), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("block_number", "feature_id", "address"), - ) - op.create_index( - "all_feature_value_records_feature_block_index", - "all_feature_value_records", - ["feature_id", sa.text("block_number DESC")], - unique=False, - ) - op.create_table( - "feature_uniswap_v3_pools", - sa.Column("nft_address", postgresql.BYTEA(), nullable=False), - sa.Column("pool_address", postgresql.BYTEA(), nullable=False), - sa.Column("token0_address", postgresql.BYTEA(), nullable=True), - sa.Column("token1_address", postgresql.BYTEA(), nullable=True), - sa.Column("fee", sa.NUMERIC(precision=100), nullable=True), - sa.Column("tick_spacing", sa.NUMERIC(precision=100), nullable=True), - sa.Column("mint_block_number", sa.BIGINT(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("nft_address", "pool_address"), - ) - op.create_table( - "feature_uniswap_v3_tokens", - sa.Column("nft_address", postgresql.BYTEA(), nullable=False), - sa.Column("token_id", sa.NUMERIC(precision=100), nullable=False), - sa.Column("pool_address", postgresql.BYTEA(), nullable=True), - sa.Column("tick_lower", sa.NUMERIC(precision=100), nullable=True), - sa.Column("tick_upper", sa.NUMERIC(precision=100), nullable=True), - sa.Column("fee", sa.NUMERIC(precision=100), nullable=True), - sa.Column("mint_block_number", sa.BIGINT(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("nft_address", "token_id"), - ) - op.create_index( - "feature_uniswap_v3_tokens_nft_index", - "feature_uniswap_v3_tokens", - ["nft_address"], - unique=False, - ) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_index("feature_uniswap_v3_tokens_nft_index", table_name="feature_uniswap_v3_tokens") - op.drop_table("feature_uniswap_v3_tokens") - op.drop_table("feature_uniswap_v3_pools") - op.drop_index( - "all_feature_value_records_feature_block_index", - table_name="all_feature_value_records", - ) - op.drop_table("all_feature_value_records") - # ### end Alembic commands ### diff --git a/hemera/migrations/versions/20240731_add_user_ops_table.py b/hemera/migrations/versions/20240731_add_user_ops_table.py deleted file mode 100644 index 197d3a831..000000000 --- a/hemera/migrations/versions/20240731_add_user_ops_table.py +++ /dev/null @@ -1,64 +0,0 @@ -"""add user ops table - -Revision ID: 9a1e927f02bb -Revises: 3d5ce8939570 -Create Date: 2024-07-31 13:11:10.244802 - -""" - -from typing import Sequence, Union - -import sqlalchemy as sa -from alembic import op -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision: str = "9a1e927f02bb" -down_revision: Union[str, None] = "3d5ce8939570" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table( - "user_operations_results", - sa.Column("user_op_hash", postgresql.BYTEA(), nullable=False), - sa.Column("sender", sa.VARCHAR(length=42), nullable=True), - sa.Column("paymaster", sa.VARCHAR(length=42), nullable=True), - sa.Column("nonce", sa.NUMERIC(), nullable=True), - sa.Column("status", sa.BOOLEAN(), nullable=True), - sa.Column("actual_gas_cost", sa.NUMERIC(), nullable=True), - sa.Column("actual_gas_used", sa.NUMERIC(), nullable=True), - sa.Column("init_code", postgresql.BYTEA(), nullable=True), - sa.Column("call_data", postgresql.BYTEA(), nullable=True), - sa.Column("call_gas_limit", sa.NUMERIC(), nullable=True), - sa.Column("verification_gas_limit", sa.NUMERIC(), nullable=True), - sa.Column("pre_verification_gas", sa.NUMERIC(), nullable=True), - sa.Column("max_fee_per_gas", sa.NUMERIC(), nullable=True), - sa.Column("max_priority_fee_per_gas", sa.NUMERIC(), nullable=True), - sa.Column("paymaster_and_data", postgresql.BYTEA(), nullable=True), - sa.Column("signature", postgresql.BYTEA(), nullable=True), - sa.Column("transactions_hash", postgresql.BYTEA(), nullable=True), - sa.Column("transactions_index", sa.INTEGER(), nullable=True), - sa.Column("block_number", sa.BIGINT(), nullable=True), - sa.Column("block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("bundler", sa.VARCHAR(length=42), nullable=True), - sa.Column("start_log_index", sa.INTEGER(), nullable=True), - sa.Column("end_log_index", sa.INTEGER(), nullable=True), - sa.PrimaryKeyConstraint("user_op_hash"), - ) - op.create_index( - "transactions_hash_index", - "user_operations_results", - ["transactions_hash"], - unique=False, - ) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_index("transactions_hash_index", table_name="user_operations_results") - op.drop_table("user_operations_results") - # ### end Alembic commands ### diff --git a/hemera/migrations/versions/20240802_add_exception_recorder_table.py b/hemera/migrations/versions/20240802_add_exception_recorder_table.py deleted file mode 100644 index 7f655b18a..000000000 --- a/hemera/migrations/versions/20240802_add_exception_recorder_table.py +++ /dev/null @@ -1,42 +0,0 @@ -"""add exception recorder table - -Revision ID: 040e5251f45d -Revises: 9a1e927f02bb -Create Date: 2024-08-02 17:57:31.418456 - -""" - -from typing import Sequence, Union - -import sqlalchemy as sa -from alembic import op -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision: str = "040e5251f45d" -down_revision: Union[str, None] = "9a1e927f02bb" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table( - "exception_records", - sa.Column("id", sa.BIGINT(), autoincrement=True, nullable=False), - sa.Column("block_number", sa.BIGINT(), nullable=True), - sa.Column("dataclass", sa.VARCHAR(), nullable=True), - sa.Column("level", sa.VARCHAR(), nullable=True), - sa.Column("message_type", sa.VARCHAR(), nullable=True), - sa.Column("message", sa.VARCHAR(), nullable=True), - sa.Column("exception_env", postgresql.JSONB(astext_type=sa.Text()), nullable=True), - sa.Column("record_time", postgresql.TIMESTAMP(), nullable=True), - sa.PrimaryKeyConstraint("id"), - ) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table("exception_records") - # ### end Alembic commands ### diff --git a/hemera/migrations/versions/20240802_add_l2_chain_table.py b/hemera/migrations/versions/20240802_add_l2_chain_table.py deleted file mode 100644 index d2439e763..000000000 --- a/hemera/migrations/versions/20240802_add_l2_chain_table.py +++ /dev/null @@ -1,284 +0,0 @@ -"""Describe your changes here - -Revision ID: e3a3e2114b9c -Revises: 040e5251f45d -Create Date: 2024-08-02 11:00:08.496753 - -""" - -from typing import Sequence, Union - -import sqlalchemy as sa -from alembic import op -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision: str = "e3a3e2114b9c" -down_revision: Union[str, None] = "040e5251f45d" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table( - "arbitrum_state_batches", - sa.Column("node_num", sa.INTEGER(), nullable=False), - sa.Column("create_l1_block_number", sa.INTEGER(), nullable=True), - sa.Column("create_l1_block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("create_l1_block_hash", sa.VARCHAR(), nullable=True), - sa.Column("create_l1_transaction_hash", sa.VARCHAR(), nullable=True), - sa.Column("l1_block_number", sa.INTEGER(), nullable=True), - sa.Column("l1_block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("l1_block_hash", sa.VARCHAR(), nullable=True), - sa.Column("l1_transaction_hash", sa.VARCHAR(), nullable=True), - sa.Column("parent_node_hash", sa.VARCHAR(), nullable=True), - sa.Column("node_hash", sa.VARCHAR(), nullable=True), - sa.Column("block_hash", sa.VARCHAR(), nullable=True), - sa.Column("send_root", sa.VARCHAR(), nullable=True), - sa.Column("start_block_number", sa.INTEGER(), nullable=True), - sa.Column("end_block_number", sa.INTEGER(), nullable=True), - sa.Column("transaction_count", sa.INTEGER(), nullable=True), - sa.Column("block_count", sa.INTEGER(), nullable=True), - sa.PrimaryKeyConstraint("node_num"), - ) - op.create_table( - "arbitrum_transaction_batches", - sa.Column("batch_index", sa.INTEGER(), nullable=False), - sa.Column("l1_block_number", sa.INTEGER(), nullable=True), - sa.Column("l1_block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("l1_block_hash", sa.VARCHAR(), nullable=True), - sa.Column("l1_transaction_hash", sa.VARCHAR(), nullable=True), - sa.Column("batch_root", sa.VARCHAR(), nullable=True), - sa.Column("start_block_number", sa.INTEGER(), nullable=True), - sa.Column("end_block_number", sa.INTEGER(), nullable=True), - sa.Column("transaction_count", sa.INTEGER(), nullable=True), - sa.Column("block_count", sa.INTEGER(), nullable=True), - sa.PrimaryKeyConstraint("batch_index"), - ) - op.create_table( - "bridge_tokens", - sa.Column("l1_token_address", postgresql.BYTEA(), nullable=False), - sa.Column("l2_token_address", postgresql.BYTEA(), nullable=False), - sa.PrimaryKeyConstraint("l1_token_address", "l2_token_address"), - ) - op.create_table( - "data_store_tx_mapping", - sa.Column("data_store_id", sa.INTEGER(), nullable=False), - sa.Column("index", sa.INTEGER(), nullable=False), - sa.Column("block_number", sa.INTEGER(), nullable=True), - sa.Column("transaction_hash", sa.VARCHAR(), nullable=True), - sa.PrimaryKeyConstraint("data_store_id", "index"), - ) - op.create_table( - "data_stores", - sa.Column("id", sa.INTEGER(), nullable=False), - sa.Column("store_number", sa.INTEGER(), nullable=True), - sa.Column("duration_data_store_id", sa.INTEGER(), nullable=True), - sa.Column("index", sa.INTEGER(), nullable=True), - sa.Column("data_commitment", sa.VARCHAR(), nullable=True), - sa.Column("msg_hash", sa.VARCHAR(), nullable=True), - sa.Column("init_time", postgresql.TIMESTAMP(), nullable=True), - sa.Column("expire_time", postgresql.TIMESTAMP(), nullable=True), - sa.Column("duration", sa.INTEGER(), nullable=True), - sa.Column("store_period_length", sa.INTEGER(), nullable=True), - sa.Column("fee", sa.INTEGER(), nullable=True), - sa.Column("confirmer", sa.VARCHAR(), nullable=True), - sa.Column("header", sa.VARCHAR(), nullable=True), - sa.Column("init_tx_hash", sa.VARCHAR(), nullable=True), - sa.Column("init_gas_used", sa.INTEGER(), nullable=True), - sa.Column("init_block_number", sa.INTEGER(), nullable=True), - sa.Column("confirmed", sa.BOOLEAN(), nullable=True), - sa.Column("signatory_record", sa.VARCHAR(), nullable=True), - sa.Column("confirm_tx_hash", sa.VARCHAR(), nullable=True), - sa.Column("confirm_gas_used", sa.INTEGER(), nullable=True), - sa.Column("batch_index", sa.INTEGER(), nullable=True), - sa.Column("tx_count", sa.INTEGER(), nullable=True), - sa.Column("block_count", sa.INTEGER(), nullable=True), - sa.PrimaryKeyConstraint("id"), - ) - op.create_table( - "l1_state_batches", - sa.Column("batch_index", sa.INTEGER(), nullable=False), - sa.Column("previous_total_elements", sa.INTEGER(), nullable=True), - sa.Column("batch_size", sa.INTEGER(), nullable=True), - sa.Column("l1_block_number", sa.INTEGER(), nullable=True), - sa.Column("l1_block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("l1_block_hash", sa.VARCHAR(), nullable=True), - sa.Column("l1_transaction_hash", sa.VARCHAR(), nullable=True), - sa.Column("extra_data", sa.VARCHAR(), nullable=True), - sa.Column("batch_root", sa.VARCHAR(), nullable=True), - sa.PrimaryKeyConstraint("batch_index"), - ) - op.create_table( - "l1_to_l2_bridge_transactions", - sa.Column("msg_hash", postgresql.BYTEA(), nullable=False), - sa.Column("version", sa.INTEGER(), nullable=True), - sa.Column("index", sa.INTEGER(), nullable=True), - sa.Column("l1_block_number", sa.INTEGER(), nullable=True), - sa.Column("l1_block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("l1_block_hash", postgresql.BYTEA(), nullable=True), - sa.Column("l1_transaction_hash", postgresql.BYTEA(), nullable=True), - sa.Column("l1_from_address", postgresql.BYTEA(), nullable=True), - sa.Column("l1_to_address", postgresql.BYTEA(), nullable=True), - sa.Column("l2_block_number", sa.INTEGER(), nullable=True), - sa.Column("l2_block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("l2_block_hash", postgresql.BYTEA(), nullable=True), - sa.Column("l2_transaction_hash", postgresql.BYTEA(), nullable=True), - sa.Column("l2_from_address", postgresql.BYTEA(), nullable=True), - sa.Column("l2_to_address", postgresql.BYTEA(), nullable=True), - sa.Column("amount", sa.NUMERIC(precision=78), nullable=True), - sa.Column("from_address", postgresql.BYTEA(), nullable=True), - sa.Column("to_address", postgresql.BYTEA(), nullable=True), - sa.Column("l1_token_address", postgresql.BYTEA(), nullable=True), - sa.Column("l2_token_address", postgresql.BYTEA(), nullable=True), - sa.Column("extra_info", postgresql.JSON(astext_type=sa.Text()), nullable=True), - sa.Column("_type", sa.INTEGER(), nullable=True), - sa.Column("sender", postgresql.BYTEA(), nullable=True), - sa.Column("target", postgresql.BYTEA(), nullable=True), - sa.Column("data", postgresql.BYTEA(), nullable=True), - sa.PrimaryKeyConstraint("msg_hash"), - ) - op.create_table( - "l2_to_l1_bridge_transactions", - sa.Column("msg_hash", postgresql.BYTEA(), nullable=False), - sa.Column("version", sa.INTEGER(), nullable=True), - sa.Column("index", sa.INTEGER(), nullable=True), - sa.Column("l2_block_number", sa.INTEGER(), nullable=True), - sa.Column("l2_block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("l2_block_hash", postgresql.BYTEA(), nullable=True), - sa.Column("l2_transaction_hash", postgresql.BYTEA(), nullable=True), - sa.Column("l2_from_address", postgresql.BYTEA(), nullable=True), - sa.Column("l2_to_address", postgresql.BYTEA(), nullable=True), - sa.Column("l1_block_number", sa.INTEGER(), nullable=True), - sa.Column("l1_block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("l1_block_hash", postgresql.BYTEA(), nullable=True), - sa.Column("l1_transaction_hash", postgresql.BYTEA(), nullable=True), - sa.Column("l1_from_address", postgresql.BYTEA(), nullable=True), - sa.Column("l1_to_address", postgresql.BYTEA(), nullable=True), - sa.Column("amount", sa.NUMERIC(precision=78), nullable=True), - sa.Column("from_address", postgresql.BYTEA(), nullable=True), - sa.Column("to_address", postgresql.BYTEA(), nullable=True), - sa.Column("l1_token_address", postgresql.BYTEA(), nullable=True), - sa.Column("l2_token_address", postgresql.BYTEA(), nullable=True), - sa.Column("extra_info", postgresql.JSON(astext_type=sa.Text()), nullable=True), - sa.Column("_type", sa.INTEGER(), nullable=True), - sa.Column("l1_proven_transaction_hash", postgresql.BYTEA(), nullable=True), - sa.Column("l1_proven_block_number", sa.INTEGER(), nullable=True), - sa.Column("l1_proven_block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("l1_proven_block_hash", postgresql.BYTEA(), nullable=True), - sa.Column("l1_proven_from_address", postgresql.BYTEA(), nullable=True), - sa.Column("l1_proven_to_address", postgresql.BYTEA(), nullable=True), - sa.PrimaryKeyConstraint("msg_hash"), - ) - op.create_table( - "linea_batches", - sa.Column("number", sa.INTEGER(), nullable=False), - sa.Column("verify_tx_hash", sa.VARCHAR(), nullable=True), - sa.Column("verify_block_number", sa.INTEGER(), nullable=True), - sa.Column("timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("blocks", postgresql.ARRAY(sa.INTEGER()), nullable=True), - sa.Column("transactions", postgresql.ARRAY(sa.VARCHAR()), nullable=True), - sa.Column("last_finalized_block_number", sa.INTEGER(), nullable=True), - sa.Column("tx_count", sa.INTEGER(), nullable=True), - sa.Column("block_count", sa.INTEGER(), nullable=True), - sa.PrimaryKeyConstraint("number"), - ) - op.create_table( - "mantle_batches", - sa.Column("index", sa.INTEGER(), nullable=False), - sa.Column("data_store_index", sa.INTEGER(), nullable=True), - sa.Column("upgrade_data_store_id", sa.INTEGER(), nullable=True), - sa.Column("data_store_id", sa.INTEGER(), nullable=True), - sa.Column("status", sa.INTEGER(), nullable=True), - sa.Column("confirm_at", postgresql.TIMESTAMP(), nullable=True), - sa.PrimaryKeyConstraint("index"), - ) - op.create_table( - "op_bedrock_state_batches", - sa.Column("batch_index", sa.INTEGER(), nullable=False), - sa.Column("l1_block_number", sa.INTEGER(), nullable=True), - sa.Column("l1_block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("l1_block_hash", sa.VARCHAR(), nullable=True), - sa.Column("l1_transaction_hash", sa.VARCHAR(), nullable=True), - sa.Column("start_block_number", sa.INTEGER(), nullable=True), - sa.Column("end_block_number", sa.INTEGER(), nullable=True), - sa.Column("batch_root", sa.VARCHAR(), nullable=True), - sa.Column("transaction_count", sa.INTEGER(), nullable=True), - sa.Column("block_count", sa.INTEGER(), nullable=True), - sa.PrimaryKeyConstraint("batch_index"), - ) - op.create_table( - "op_da_transactions", - sa.Column("receipt_blob_gas_used", sa.INTEGER(), nullable=True), - sa.Column("receipt_blob_gas_price", sa.NUMERIC(), nullable=True), - sa.Column("blob_versioned_hashes", postgresql.ARRAY(sa.VARCHAR()), nullable=True), - sa.Column("hash", sa.VARCHAR(), nullable=False), - sa.Column("nonce", sa.INTEGER(), nullable=True), - sa.Column("transaction_index", sa.INTEGER(), nullable=True), - sa.Column("from_address", sa.VARCHAR(), nullable=True), - sa.Column("to_address", sa.VARCHAR(), nullable=True), - sa.Column("value", sa.NUMERIC(), nullable=True), - sa.Column("gas", sa.INTEGER(), nullable=True), - sa.Column("gas_price", sa.INTEGER(), nullable=True), - sa.Column("input", sa.VARCHAR(), nullable=True), - sa.Column("receipt_cumulative_gas_used", sa.INTEGER(), nullable=True), - sa.Column("receipt_gas_used", sa.INTEGER(), nullable=True), - sa.Column("receipt_contract_address", sa.VARCHAR(), nullable=True), - sa.Column("receipt_root", sa.VARCHAR(), nullable=True), - sa.Column("receipt_status", sa.INTEGER(), nullable=True), - sa.Column("block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("block_number", sa.INTEGER(), nullable=True), - sa.Column("block_hash", sa.VARCHAR(), nullable=True), - sa.Column("max_fee_per_gas", sa.INTEGER(), nullable=True), - sa.Column("max_priority_fee_per_gas", sa.INTEGER(), nullable=True), - sa.Column("transaction_type", sa.INTEGER(), nullable=True), - sa.Column("receipt_effective_gas_price", sa.INTEGER(), nullable=True), - sa.PrimaryKeyConstraint("hash"), - ) - op.create_table( - "zkevm_batches", - sa.Column("batch_index", sa.INTEGER(), nullable=False), - sa.Column("coinbase", sa.VARCHAR(), nullable=True), - sa.Column("state_root", sa.VARCHAR(), nullable=True), - sa.Column("global_exit_root", sa.VARCHAR(), nullable=True), - sa.Column("mainnet_exit_root", sa.VARCHAR(), nullable=True), - sa.Column("rollup_exit_root", sa.VARCHAR(), nullable=True), - sa.Column("local_exit_root", sa.VARCHAR(), nullable=True), - sa.Column("acc_input_hash", sa.VARCHAR(), nullable=True), - sa.Column("timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("transactions", postgresql.ARRAY(sa.VARCHAR()), nullable=True), - sa.Column("blocks", postgresql.ARRAY(sa.INTEGER()), nullable=True), - sa.Column("start_block_number", sa.INTEGER(), nullable=True), - sa.Column("end_block_number", sa.INTEGER(), nullable=True), - sa.Column("block_count", sa.INTEGER(), nullable=True), - sa.Column("transaction_count", sa.INTEGER(), nullable=True), - sa.Column("sequence_batch_tx_hash", sa.VARCHAR(), nullable=True), - sa.Column("sequence_batch_block_number", sa.INTEGER(), nullable=True), - sa.Column("sequence_batch_block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("verify_batch_tx_hash", sa.VARCHAR(), nullable=True), - sa.Column("verify_batch_block_number", sa.INTEGER(), nullable=True), - sa.Column("verify_batch_block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("number", sa.INTEGER(), nullable=True), - sa.Column("send_sequences_tx_hash", sa.VARCHAR(), nullable=True), - sa.PrimaryKeyConstraint("batch_index"), - ) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table("zkevm_batches") - op.drop_table("op_da_transactions") - op.drop_table("op_bedrock_state_batches") - op.drop_table("mantle_batches") - op.drop_table("linea_batches") - op.drop_table("l2_to_l1_bridge_transactions") - op.drop_table("l1_to_l2_bridge_transactions") - op.drop_table("l1_state_batches") - op.drop_table("data_stores") - op.drop_table("data_store_tx_mapping") - op.drop_table("bridge_tokens") - op.drop_table("arbitrum_transaction_batches") - op.drop_table("arbitrum_state_batches") - # ### end Alembic commands ### diff --git a/hemera/migrations/versions/20240802_add_uniswap_v2_table.py b/hemera/migrations/versions/20240802_add_uniswap_v2_table.py deleted file mode 100644 index aa6bab591..000000000 --- a/hemera/migrations/versions/20240802_add_uniswap_v2_table.py +++ /dev/null @@ -1,54 +0,0 @@ -"""add uniswap v2 table - -Revision ID: aa99dd347ef1 -Revises: e3a3e2114b9c -Create Date: 2024-08-02 16:17:43.105236 - -""" - -from typing import Sequence, Union - -import sqlalchemy as sa -from alembic import op -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision: str = "aa99dd347ef1" -down_revision: Union[str, None] = "e3a3e2114b9c" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table( - "feature_uniswap_v2_pools", - sa.Column("factory_address", postgresql.BYTEA(), nullable=False), - sa.Column("pool_address", postgresql.BYTEA(), nullable=False), - sa.Column("token0_address", postgresql.BYTEA(), nullable=True), - sa.Column("token1_address", postgresql.BYTEA(), nullable=True), - sa.Column("length", sa.NUMERIC(precision=100), nullable=True), - sa.Column("called_block_number", sa.BIGINT(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("factory_address", "pool_address"), - ) - op.add_column("feature_uniswap_v3_pools", sa.Column("called_block_number", sa.BIGINT(), nullable=True)) - op.drop_column("feature_uniswap_v3_pools", "mint_block_number") - op.add_column("feature_uniswap_v3_tokens", sa.Column("called_block_number", sa.BIGINT(), nullable=True)) - op.drop_column("feature_uniswap_v3_tokens", "mint_block_number") - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.add_column( - "feature_uniswap_v3_tokens", sa.Column("mint_block_number", sa.BIGINT(), autoincrement=False, nullable=True) - ) - op.drop_column("feature_uniswap_v3_tokens", "called_block_number") - op.add_column( - "feature_uniswap_v3_pools", sa.Column("mint_block_number", sa.BIGINT(), autoincrement=False, nullable=True) - ) - op.drop_column("feature_uniswap_v3_pools", "called_block_number") - op.drop_table("feature_uniswap_v2_pools") - # ### end Alembic commands ### diff --git a/hemera/migrations/versions/20240805_add_column_to_contracts_table.py b/hemera/migrations/versions/20240805_add_column_to_contracts_table.py deleted file mode 100644 index 554be922c..000000000 --- a/hemera/migrations/versions/20240805_add_column_to_contracts_table.py +++ /dev/null @@ -1,44 +0,0 @@ -"""add column to contracts table - -Revision ID: 832fa52da346 -Revises: aa99dd347ef1 -Create Date: 2024-08-05 19:33:42.496838 - -""" - -from typing import Sequence, Union - -import sqlalchemy as sa -from alembic import op -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision: str = "832fa52da346" -down_revision: Union[str, None] = "aa99dd347ef1" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.execute("CREATE EXTENSION IF NOT EXISTS pgcrypto;") - op.add_column( - "contracts", - sa.Column( - "deployed_code_hash", - sa.VARCHAR(), - sa.Computed( - "encode(digest('0x'||encode(deployed_code, 'hex'), 'sha256'), 'hex')", - ), - nullable=True, - ), - ) - op.add_column("contracts", sa.Column("transaction_from_address", postgresql.BYTEA(), nullable=True)) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_column("contracts", "transaction_from_address") - op.drop_column("contracts", "deployed_code_hash") - # ### end Alembic commands ### diff --git a/hemera/migrations/versions/20240806_add_current_traits_activeness.py b/hemera/migrations/versions/20240806_add_current_traits_activeness.py deleted file mode 100644 index 2136c5e84..000000000 --- a/hemera/migrations/versions/20240806_add_current_traits_activeness.py +++ /dev/null @@ -1,39 +0,0 @@ -"""add current_traits_activeness - -Revision ID: b86e241b5e18 -Revises: 832fa52da346 -Create Date: 2024-08-06 14:03:13.234591 - -""" - -from typing import Sequence, Union - -import sqlalchemy as sa -from alembic import op -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision: str = "b86e241b5e18" -down_revision: Union[str, None] = "832fa52da346" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table( - "current_traits_activeness", - sa.Column("block_number", sa.BIGINT(), nullable=False), - sa.Column("address", postgresql.BYTEA(), nullable=False), - sa.Column("value", postgresql.JSONB(astext_type=sa.Text()), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("address"), - ) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table("current_traits_activeness") - # ### end Alembic commands ### diff --git a/hemera/migrations/versions/20240808_add_blue_chip_holding.py b/hemera/migrations/versions/20240808_add_blue_chip_holding.py deleted file mode 100644 index 94e539ce2..000000000 --- a/hemera/migrations/versions/20240808_add_blue_chip_holding.py +++ /dev/null @@ -1,41 +0,0 @@ -"""add blue chip holding - -Revision ID: 1b1c6a8b6c7b -Revises: b86e241b5e18 -Create Date: 2024-08-08 18:04:05.716199 - -""" - -from typing import Sequence, Union - -import sqlalchemy as sa -from alembic import op -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision: str = "1b1c6a8b6c7b" -down_revision: Union[str, None] = "b86e241b5e18" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table( - "feature_blue_chip_holders", - sa.Column("wallet_address", postgresql.BYTEA(), nullable=False), - sa.Column("hold_detail", postgresql.JSONB(astext_type=sa.Text()), nullable=True), - sa.Column("current_count", sa.BIGINT(), nullable=True), - sa.Column("called_block_number", sa.BIGINT(), nullable=True), - sa.Column("called_block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("wallet_address"), - ) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table("feature_blue_chip_holders") - # ### end Alembic commands ### diff --git a/hemera/migrations/versions/20240813_add_daily_wallet_address_tables.py b/hemera/migrations/versions/20240813_add_daily_wallet_address_tables.py deleted file mode 100644 index 38bcc4e9b..000000000 --- a/hemera/migrations/versions/20240813_add_daily_wallet_address_tables.py +++ /dev/null @@ -1,194 +0,0 @@ -"""add daily wallet address tables - -Revision ID: bf51d23c852f -Revises: 1b1c6a8b6c7b -Create Date: 2024-08-13 13:20:17.153801 - -""" - -from typing import Sequence, Union - -import sqlalchemy as sa -from alembic import op -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision: str = "bf51d23c852f" -down_revision: Union[str, None] = "1b1c6a8b6c7b" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table( - "daily_contract_interacted_aggregates", - sa.Column("block_date", sa.DATE(), nullable=False), - sa.Column("from_address", postgresql.BYTEA(), nullable=False), - sa.Column("to_address", postgresql.BYTEA(), nullable=False), - sa.Column("contract_interacted_cnt", sa.INTEGER(), nullable=True), - sa.PrimaryKeyConstraint("block_date", "from_address", "to_address"), - ) - op.create_table( - "daily_wallet_addresses_aggregates", - sa.Column("address", postgresql.BYTEA(), nullable=False), - sa.Column("block_date", sa.DATE(), nullable=False), - sa.Column("txn_in_cnt", sa.INTEGER(), nullable=True), - sa.Column("txn_out_cnt", sa.INTEGER(), nullable=True), - sa.Column("txn_in_value", sa.NUMERIC(precision=78), nullable=True), - sa.Column("txn_out_value", sa.NUMERIC(precision=78), nullable=True), - sa.Column("internal_txn_in_cnt", sa.INTEGER(), nullable=True), - sa.Column("internal_txn_out_cnt", sa.INTEGER(), nullable=True), - sa.Column("internal_txn_in_value", sa.NUMERIC(precision=78), nullable=True), - sa.Column("internal_txn_out_value", sa.NUMERIC(precision=78), nullable=True), - sa.Column("erc20_transfer_in_cnt", sa.INTEGER(), nullable=True), - sa.Column("erc721_transfer_in_cnt", sa.INTEGER(), nullable=True), - sa.Column("erc1155_transfer_in_cnt", sa.INTEGER(), nullable=True), - sa.Column("erc20_transfer_out_cnt", sa.INTEGER(), nullable=True), - sa.Column("erc721_transfer_out_cnt", sa.INTEGER(), nullable=True), - sa.Column("erc1155_transfer_out_cnt", sa.INTEGER(), nullable=True), - sa.Column( - "internal_txn_cnt", - sa.INTEGER(), - sa.Computed( - "internal_txn_in_cnt + internal_txn_out_cnt", - ), - nullable=True, - ), - sa.Column( - "erc20_transfer_cnt", - sa.INTEGER(), - sa.Computed( - "erc20_transfer_in_cnt + erc20_transfer_out_cnt", - ), - nullable=True, - ), - sa.Column( - "erc721_transfer_cnt", - sa.INTEGER(), - sa.Computed( - "erc721_transfer_in_cnt + erc721_transfer_out_cnt", - ), - nullable=True, - ), - sa.Column( - "erc1155_transfer_cnt", - sa.INTEGER(), - sa.Computed( - "erc1155_transfer_in_cnt + erc1155_transfer_out_cnt", - ), - nullable=True, - ), - sa.Column("txn_self_cnt", sa.INTEGER(), nullable=True), - sa.Column("txn_in_error_cnt", sa.INTEGER(), nullable=True), - sa.Column("txn_out_error_cnt", sa.INTEGER(), nullable=True), - sa.Column("txn_self_error_cnt", sa.INTEGER(), nullable=True), - sa.Column( - "txn_cnt", - sa.INTEGER(), - sa.Computed( - "((txn_in_cnt + txn_out_cnt) - txn_self_cnt)", - ), - nullable=True, - ), - sa.Column("deposit_cnt", sa.INTEGER(), nullable=True), - sa.Column("withdraw_cnt", sa.INTEGER(), nullable=True), - sa.Column("gas_in_used", sa.NUMERIC(precision=78), nullable=True), - sa.Column("l2_txn_in_fee", sa.NUMERIC(precision=78), nullable=True), - sa.Column("l1_txn_in_fee", sa.NUMERIC(precision=78), nullable=True), - sa.Column("txn_in_fee", sa.NUMERIC(precision=78), nullable=True), - sa.Column("gas_out_used", sa.NUMERIC(precision=78), nullable=True), - sa.Column("l2_txn_out_fee", sa.NUMERIC(precision=78), nullable=True), - sa.Column("l1_txn_out_fee", sa.NUMERIC(precision=78), nullable=True), - sa.Column("txn_out_fee", sa.NUMERIC(precision=78), nullable=True), - sa.Column("contract_deployed_cnt", sa.INTEGER(), nullable=True), - sa.Column("from_address_unique_interacted_cnt", sa.INTEGER(), nullable=True), - sa.Column("to_address_unique_interacted_cnt", sa.INTEGER(), nullable=True), - sa.PrimaryKeyConstraint("address", "block_date"), - ) - op.create_table( - "period_wallet_addresses_aggregates", - sa.Column("address", postgresql.BYTEA(), nullable=False), - sa.Column("period_date", sa.DATE(), nullable=False), - sa.Column("txn_in_cnt", sa.INTEGER(), nullable=True), - sa.Column("txn_out_cnt", sa.INTEGER(), nullable=True), - sa.Column("txn_in_value", sa.NUMERIC(precision=78), nullable=True), - sa.Column("txn_out_value", sa.NUMERIC(precision=78), nullable=True), - sa.Column("internal_txn_in_cnt", sa.INTEGER(), nullable=True), - sa.Column("internal_txn_out_cnt", sa.INTEGER(), nullable=True), - sa.Column("internal_txn_in_value", sa.NUMERIC(precision=78), nullable=True), - sa.Column("internal_txn_out_value", sa.NUMERIC(precision=78), nullable=True), - sa.Column("erc20_transfer_in_cnt", sa.INTEGER(), nullable=True), - sa.Column("erc721_transfer_in_cnt", sa.INTEGER(), nullable=True), - sa.Column("erc1155_transfer_in_cnt", sa.INTEGER(), nullable=True), - sa.Column("erc20_transfer_out_cnt", sa.INTEGER(), nullable=True), - sa.Column("erc721_transfer_out_cnt", sa.INTEGER(), nullable=True), - sa.Column("erc1155_transfer_out_cnt", sa.INTEGER(), nullable=True), - sa.Column( - "internal_txn_cnt", - sa.INTEGER(), - sa.Computed( - "internal_txn_in_cnt + internal_txn_out_cnt", - ), - nullable=True, - ), - sa.Column( - "erc20_transfer_cnt", - sa.INTEGER(), - sa.Computed( - "erc20_transfer_in_cnt + erc20_transfer_out_cnt", - ), - nullable=True, - ), - sa.Column( - "erc721_transfer_cnt", - sa.INTEGER(), - sa.Computed( - "erc721_transfer_in_cnt + erc721_transfer_out_cnt", - ), - nullable=True, - ), - sa.Column( - "erc1155_transfer_cnt", - sa.INTEGER(), - sa.Computed( - "erc1155_transfer_in_cnt + erc1155_transfer_out_cnt", - ), - nullable=True, - ), - sa.Column("txn_self_cnt", sa.INTEGER(), nullable=False), - sa.Column("txn_in_error_cnt", sa.INTEGER(), nullable=False), - sa.Column("txn_out_error_cnt", sa.INTEGER(), nullable=False), - sa.Column("txn_self_error_cnt", sa.INTEGER(), nullable=False), - sa.Column( - "txn_cnt", - sa.INTEGER(), - sa.Computed( - "((txn_in_cnt + txn_out_cnt) - txn_self_cnt)", - ), - nullable=True, - ), - sa.Column("deposit_cnt", sa.INTEGER(), nullable=True), - sa.Column("withdraw_cnt", sa.INTEGER(), nullable=True), - sa.Column("gas_in_used", sa.NUMERIC(precision=78), nullable=True), - sa.Column("l2_txn_in_fee", sa.NUMERIC(precision=78), nullable=True), - sa.Column("l1_txn_in_fee", sa.NUMERIC(precision=78), nullable=True), - sa.Column("txn_in_fee", sa.NUMERIC(precision=78), nullable=True), - sa.Column("gas_out_used", sa.NUMERIC(precision=78), nullable=True), - sa.Column("l2_txn_out_fee", sa.NUMERIC(precision=78), nullable=True), - sa.Column("l1_txn_out_fee", sa.NUMERIC(precision=78), nullable=True), - sa.Column("txn_out_fee", sa.NUMERIC(precision=78), nullable=True), - sa.Column("contract_deployed_cnt", sa.INTEGER(), nullable=True), - sa.Column("from_address_unique_interacted_cnt", sa.INTEGER(), nullable=True), - sa.Column("to_address_unique_interacted_cnt", sa.INTEGER(), nullable=True), - sa.PrimaryKeyConstraint("address", "period_date"), - ) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table("period_wallet_addresses_aggregates") - op.drop_table("daily_wallet_addresses_aggregates") - op.drop_table("daily_contract_interacted_aggregates") - # ### end Alembic commands ### diff --git a/hemera/migrations/versions/20240827_add_token_price_table.py b/hemera/migrations/versions/20240827_add_token_price_table.py deleted file mode 100644 index 3159b079a..000000000 --- a/hemera/migrations/versions/20240827_add_token_price_table.py +++ /dev/null @@ -1,46 +0,0 @@ -"""add token price table - -Revision ID: 2359a28d63cb -Revises: bf51d23c852f -Create Date: 2024-08-27 17:58:50.838313 - -""" - -from typing import Sequence, Union - -import sqlalchemy as sa -from alembic import op - -# revision identifiers, used by Alembic. -revision: str = "2359a28d63cb" -down_revision: Union[str, None] = "bf51d23c852f" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table( - "token_hourly_prices", - sa.Column("symbol", sa.String(), nullable=False), - sa.Column("timestamp", sa.DateTime(), nullable=False), - sa.Column("price", sa.Numeric(), nullable=True), - sa.PrimaryKeyConstraint("symbol", "timestamp"), - if_not_exists=True, - ) - - op.create_table( - "token_prices", - sa.Column("symbol", sa.String(), nullable=False), - sa.Column("timestamp", sa.DateTime(), nullable=False), - sa.Column("price", sa.Numeric(), nullable=True), - sa.PrimaryKeyConstraint("symbol", "timestamp"), - if_not_exists=True, - ) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - pass - # ### end Alembic commands ### diff --git a/hemera/migrations/versions/20240830_add_address_token_deposit_table.py b/hemera/migrations/versions/20240830_add_address_token_deposit_table.py deleted file mode 100644 index 2f1183191..000000000 --- a/hemera/migrations/versions/20240830_add_address_token_deposit_table.py +++ /dev/null @@ -1,91 +0,0 @@ -"""add address_token_deposit table - -Revision ID: 6c2eecd6316b -Revises: 2359a28d63cb -Create Date: 2024-08-30 15:09:43.357835 - -""" - -from typing import Sequence, Union - -import sqlalchemy as sa -from alembic import op -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision: str = "6c2eecd6316b" -down_revision: Union[str, None] = "2359a28d63cb" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table( - "af_token_deposits__transactions", - sa.Column("transaction_hash", postgresql.BYTEA(), nullable=False), - sa.Column("wallet_address", postgresql.BYTEA(), nullable=True), - sa.Column("chain_id", sa.BIGINT(), nullable=True), - sa.Column("contract_address", postgresql.BYTEA(), nullable=True), - sa.Column("token_address", postgresql.BYTEA(), nullable=True), - sa.Column("value", sa.NUMERIC(precision=100), nullable=True), - sa.Column("block_number", sa.BIGINT(), nullable=True), - sa.Column("block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("reorg", sa.BOOLEAN(), nullable=True, server_default=sa.text("false")), - sa.PrimaryKeyConstraint("transaction_hash"), - ) - op.create_index( - "af_deposits_transactions_block_number_index", - "af_token_deposits__transactions", - [sa.text("block_number DESC")], - unique=False, - ) - op.create_index( - "af_deposits_transactions_chain_id_index", "af_token_deposits__transactions", ["chain_id"], unique=False - ) - op.create_index( - "af_deposits_transactions_contract_address_index", - "af_token_deposits__transactions", - ["contract_address"], - unique=False, - ) - op.create_index( - "af_deposits_transactions_token_address_index", - "af_token_deposits__transactions", - ["token_address"], - unique=False, - ) - op.create_index( - "af_deposits_transactions_wallet_address_index", - "af_token_deposits__transactions", - ["wallet_address"], - unique=False, - ) - op.create_table( - "af_token_deposits_current", - sa.Column("wallet_address", postgresql.BYTEA(), nullable=False), - sa.Column("chain_id", sa.BIGINT(), nullable=False), - sa.Column("contract_address", postgresql.BYTEA(), nullable=False), - sa.Column("token_address", postgresql.BYTEA(), nullable=False), - sa.Column("value", sa.NUMERIC(precision=100), nullable=True), - sa.Column("block_number", sa.BIGINT(), nullable=True), - sa.Column("block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("wallet_address", "token_address", "contract_address", "chain_id"), - ) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_index("af_deposits_transactions_wallet_address_index", table_name="af_token_deposits__transactions") - op.drop_index("af_deposits_transactions_token_address_index", table_name="af_token_deposits__transactions") - op.drop_index("af_deposits_transactions_contract_address_index", table_name="af_token_deposits__transactions") - op.drop_index("af_deposits_transactions_chain_id_index", table_name="af_token_deposits__transactions") - op.drop_index("af_deposits_transactions_block_number_index", table_name="af_token_deposits__transactions") - op.drop_table("af_token_deposits_current") - op.drop_table("af_token_deposits__transactions") - # ### end Alembic commands ### diff --git a/hemera/migrations/versions/20240831_add_ens.py b/hemera/migrations/versions/20240831_add_ens.py deleted file mode 100644 index 49846dd4a..000000000 --- a/hemera/migrations/versions/20240831_add_ens.py +++ /dev/null @@ -1,99 +0,0 @@ -"""ens - -Revision ID: 43d14640a8ac -Revises: 6c2eecd6316b -Create Date: 2024-09-05 11:08:15.501786 - -""" - -from typing import Sequence, Union - -import sqlalchemy as sa -from alembic import op -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision: str = "43d14640a8ac" -down_revision: Union[str, None] = "6c2eecd6316b" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table( - "af_ens_address_current", - sa.Column("address", postgresql.BYTEA(), nullable=False), - sa.Column("name", sa.VARCHAR(), nullable=True), - sa.Column("reverse_node", postgresql.BYTEA(), nullable=True), - sa.Column("block_number", sa.BIGINT(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("address"), - ) - op.create_table( - "af_ens_event", - sa.Column("transaction_hash", postgresql.BYTEA(), nullable=False), - sa.Column("transaction_index", sa.INTEGER(), nullable=False), - sa.Column("log_index", sa.INTEGER(), nullable=False), - sa.Column("block_number", sa.BIGINT(), nullable=True), - sa.Column("block_hash", postgresql.BYTEA(), nullable=True), - sa.Column("block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("method", sa.VARCHAR(), nullable=True), - sa.Column("event_name", sa.VARCHAR(), nullable=True), - sa.Column("topic0", sa.VARCHAR(), nullable=True), - sa.Column("from_address", postgresql.BYTEA(), nullable=True), - sa.Column("to_address", postgresql.BYTEA(), nullable=True), - sa.Column("base_node", postgresql.BYTEA(), nullable=True), - sa.Column("node", postgresql.BYTEA(), nullable=True), - sa.Column("label", postgresql.BYTEA(), nullable=True), - sa.Column("name", sa.VARCHAR(), nullable=True), - sa.Column("expires", postgresql.TIMESTAMP(), nullable=True), - sa.Column("owner", postgresql.BYTEA(), nullable=True), - sa.Column("resolver", postgresql.BYTEA(), nullable=True), - sa.Column("registrant", postgresql.BYTEA(), nullable=True), - sa.Column("address", postgresql.BYTEA(), nullable=True), - sa.Column("reverse_base_node", postgresql.BYTEA(), nullable=True), - sa.Column("reverse_node", postgresql.BYTEA(), nullable=True), - sa.Column("reverse_label", postgresql.BYTEA(), nullable=True), - sa.Column("reverse_name", sa.VARCHAR(), nullable=True), - sa.Column("token_id", sa.NUMERIC(precision=100), nullable=True), - sa.Column("w_token_id", sa.NUMERIC(precision=100), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("reorg", sa.BOOLEAN(), server_default=sa.text("false"), nullable=True), - sa.PrimaryKeyConstraint("transaction_hash", "log_index", name="ens_tnx_log_index"), - ) - op.create_index("ens_event_address", "af_ens_event", ["from_address"], unique=False) - op.create_index( - "ens_idx_block_number_log_index", "af_ens_event", ["block_number", sa.text("log_index DESC")], unique=False - ) - op.create_table( - "af_ens_node_current", - sa.Column("node", postgresql.BYTEA(), nullable=False), - sa.Column("token_id", sa.NUMERIC(precision=100), nullable=True), - sa.Column("w_token_id", sa.NUMERIC(precision=100), nullable=True), - sa.Column("first_owned_by", postgresql.BYTEA(), nullable=True), - sa.Column("name", sa.String(), nullable=True), - sa.Column("registration", postgresql.TIMESTAMP(), nullable=True), - sa.Column("expires", postgresql.TIMESTAMP(), nullable=True), - sa.Column("address", postgresql.BYTEA(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("node"), - ) - op.create_index("ens_idx_address", "af_ens_node_current", ["address"], unique=False) - op.create_index("ens_idx_name_md5", "af_ens_node_current", [sa.text("md5(name)")], unique=False) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_index("ens_idx_name_md5", table_name="af_ens_node_current") - op.drop_index("ens_idx_address", table_name="af_ens_node_current") - op.drop_table("af_ens_node_current") - op.drop_index("ens_idx_block_number_log_index", table_name="af_ens_event") - op.drop_index("ens_event_address", table_name="af_ens_event") - op.drop_table("af_ens_event") - op.drop_table("af_ens_address_current") - # ### end Alembic commands ### diff --git a/hemera/migrations/versions/20240906_add_uniswap_v3_enhance_table.py b/hemera/migrations/versions/20240906_add_uniswap_v3_enhance_table.py deleted file mode 100644 index 0908bbef5..000000000 --- a/hemera/migrations/versions/20240906_add_uniswap_v3_enhance_table.py +++ /dev/null @@ -1,323 +0,0 @@ -"""add uniswap v3 enhance table - -Revision ID: f4efa18760cc -Revises: 43d14640a8ac -Create Date: 2024-09-06 13:24:28.201489 - -""" - -from typing import Sequence, Union - -import sqlalchemy as sa -from alembic import op -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision: str = "f4efa18760cc" -down_revision: Union[str, None] = "43d14640a8ac" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - - op.create_table( - "af_uniswap_v3_pool_prices_current", - sa.Column("pool_address", postgresql.BYTEA(), nullable=False), - sa.Column("block_number", sa.BIGINT(), nullable=True), - sa.Column("block_timestamp", sa.BIGINT(), nullable=True), - sa.Column("factory_address", postgresql.BYTEA(), nullable=True), - sa.Column("sqrt_price_x96", sa.NUMERIC(precision=100), nullable=True), - sa.Column("tick", sa.NUMERIC(precision=100), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("pool_address"), - ) - op.create_table( - "af_uniswap_v3_pool_prices_hist", - sa.Column("pool_address", postgresql.BYTEA(), nullable=False), - sa.Column("block_number", sa.BIGINT(), nullable=False), - sa.Column("block_timestamp", sa.BIGINT(), nullable=False), - sa.Column("sqrt_price_x96", sa.NUMERIC(precision=100), nullable=True), - sa.Column("tick", sa.NUMERIC(precision=100), nullable=True), - sa.Column("factory_address", postgresql.BYTEA(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("reorg", sa.BOOLEAN(), nullable=True, server_default=sa.text("false")), - sa.PrimaryKeyConstraint("pool_address", "block_timestamp", "block_number"), - ) - op.create_table( - "af_uniswap_v3_pool_swap_hist", - sa.Column("pool_address", postgresql.BYTEA(), nullable=False), - sa.Column("transaction_hash", postgresql.BYTEA(), nullable=False), - sa.Column("log_index", sa.INTEGER(), nullable=False), - sa.Column("block_number", sa.BIGINT(), nullable=True), - sa.Column("block_timestamp", sa.BIGINT(), nullable=True), - sa.Column("position_token_address", postgresql.BYTEA(), nullable=True), - sa.Column("transaction_from_address", postgresql.BYTEA(), nullable=True), - sa.Column("sender", postgresql.BYTEA(), nullable=True), - sa.Column("recipient", postgresql.BYTEA(), nullable=True), - sa.Column("liquidity", sa.NUMERIC(precision=100), nullable=True), - sa.Column("tick", sa.NUMERIC(precision=100), nullable=True), - sa.Column("sqrt_price_x96", sa.NUMERIC(precision=100), nullable=True), - sa.Column("amount0", sa.NUMERIC(precision=100), nullable=True), - sa.Column("amount1", sa.NUMERIC(precision=100), nullable=True), - sa.Column("token0_address", postgresql.BYTEA(), nullable=True), - sa.Column("token1_address", postgresql.BYTEA(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("reorg", sa.BOOLEAN(), nullable=True, server_default=sa.text("false")), - sa.PrimaryKeyConstraint("pool_address", "transaction_hash", "log_index"), - ) - op.create_table( - "af_uniswap_v3_pools", - sa.Column("position_token_address", postgresql.BYTEA(), nullable=False), - sa.Column("pool_address", postgresql.BYTEA(), nullable=False), - sa.Column("factory_address", postgresql.BYTEA(), nullable=True), - sa.Column("token0_address", postgresql.BYTEA(), nullable=True), - sa.Column("token1_address", postgresql.BYTEA(), nullable=True), - sa.Column("fee", sa.NUMERIC(precision=100), nullable=True), - sa.Column("tick_spacing", sa.NUMERIC(precision=100), nullable=True), - sa.Column("block_number", sa.BIGINT(), nullable=True), - sa.Column("block_timestamp", sa.BIGINT(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("position_token_address", "pool_address"), - ) - op.create_table( - "af_uniswap_v3_token_collect_fee_hist", - sa.Column("position_token_address", postgresql.BYTEA(), nullable=False), - sa.Column("token_id", sa.NUMERIC(precision=100), nullable=False), - sa.Column("block_number", sa.BIGINT(), nullable=False), - sa.Column("block_timestamp", sa.BIGINT(), nullable=False), - sa.Column("log_index", sa.INTEGER(), nullable=False), - sa.Column("transaction_hash", postgresql.BYTEA(), nullable=True), - sa.Column("owner", postgresql.BYTEA(), nullable=True), - sa.Column("recipient", postgresql.BYTEA(), nullable=True), - sa.Column("amount0", sa.NUMERIC(precision=100), nullable=True), - sa.Column("amount1", sa.NUMERIC(precision=100), nullable=True), - sa.Column("pool_address", postgresql.BYTEA(), nullable=True), - sa.Column("token0_address", postgresql.BYTEA(), nullable=True), - sa.Column("token1_address", postgresql.BYTEA(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("reorg", sa.BOOLEAN(), nullable=True, server_default=sa.text("false")), - sa.PrimaryKeyConstraint("position_token_address", "token_id", "block_timestamp", "block_number", "log_index"), - ) - op.create_index( - "af_uniswap_v3_token_collect_fee_hist_owner_index", - "af_uniswap_v3_token_collect_fee_hist", - ["owner"], - unique=False, - ) - op.create_index( - "af_uniswap_v3_token_collect_fee_hist_pool_index", - "af_uniswap_v3_token_collect_fee_hist", - ["pool_address"], - unique=False, - ) - op.create_index( - "af_uniswap_v3_token_collect_fee_hist_token0_index", - "af_uniswap_v3_token_collect_fee_hist", - ["token0_address"], - unique=False, - ) - op.create_index( - "af_uniswap_v3_token_collect_fee_hist_token1_index", - "af_uniswap_v3_token_collect_fee_hist", - ["token1_address"], - unique=False, - ) - op.create_index( - "af_uniswap_v3_token_collect_fee_hist_token_id_index", - "af_uniswap_v3_token_collect_fee_hist", - ["token_id"], - unique=False, - ) - op.create_table( - "af_uniswap_v3_token_data_current", - sa.Column("position_token_address", postgresql.BYTEA(), nullable=False), - sa.Column("token_id", sa.NUMERIC(precision=100), nullable=False), - sa.Column("block_number", sa.BIGINT(), nullable=True), - sa.Column("block_timestamp", sa.BIGINT(), nullable=True), - sa.Column("wallet_address", postgresql.BYTEA(), nullable=True), - sa.Column("pool_address", postgresql.BYTEA(), nullable=True), - sa.Column("liquidity", sa.NUMERIC(precision=100), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("position_token_address", "token_id"), - ) - op.create_index( - "af_uniswap_v3_token_data_current_wallet_desc_index", - "af_uniswap_v3_token_data_current", - [sa.text("wallet_address DESC")], - unique=False, - ) - op.create_table( - "af_uniswap_v3_token_data_hist", - sa.Column("position_token_address", postgresql.BYTEA(), nullable=False), - sa.Column("token_id", sa.NUMERIC(precision=100), nullable=False), - sa.Column("block_number", sa.BIGINT(), nullable=False), - sa.Column("block_timestamp", sa.BIGINT(), nullable=False), - sa.Column("wallet_address", postgresql.BYTEA(), nullable=True), - sa.Column("pool_address", postgresql.BYTEA(), nullable=True), - sa.Column("liquidity", sa.NUMERIC(precision=100), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("reorg", sa.BOOLEAN(), nullable=True, server_default=sa.text("false")), - sa.PrimaryKeyConstraint("position_token_address", "token_id", "block_timestamp", "block_number"), - ) - op.create_index( - "af_uniswap_v3_token_data_hist_token_block_desc_index", - "af_uniswap_v3_token_data_hist", - [sa.text("position_token_address DESC"), sa.text("block_timestamp DESC")], - unique=False, - ) - op.create_index( - "af_uniswap_v3_token_data_hist_wallet_token_block_desc_index", - "af_uniswap_v3_token_data_hist", - [sa.text("wallet_address DESC"), sa.text("position_token_address DESC"), sa.text("block_timestamp DESC")], - unique=False, - ) - op.create_table( - "af_uniswap_v3_token_liquidity_hist", - sa.Column("position_token_address", postgresql.BYTEA(), nullable=False), - sa.Column("token_id", sa.NUMERIC(precision=100), nullable=False), - sa.Column("block_number", sa.BIGINT(), nullable=False), - sa.Column("block_timestamp", sa.BIGINT(), nullable=False), - sa.Column("log_index", sa.INTEGER(), nullable=False), - sa.Column("transaction_hash", postgresql.BYTEA(), nullable=True), - sa.Column("owner", postgresql.BYTEA(), nullable=True), - sa.Column("liquidity", sa.NUMERIC(precision=100), nullable=True), - sa.Column("amount0", sa.NUMERIC(precision=100), nullable=True), - sa.Column("amount1", sa.NUMERIC(precision=100), nullable=True), - sa.Column("pool_address", postgresql.BYTEA(), nullable=True), - sa.Column("token0_address", postgresql.BYTEA(), nullable=True), - sa.Column("token1_address", postgresql.BYTEA(), nullable=True), - sa.Column("action_type", sa.VARCHAR(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("reorg", sa.BOOLEAN(), nullable=True, server_default=sa.text("false")), - sa.PrimaryKeyConstraint("position_token_address", "token_id", "block_timestamp", "block_number", "log_index"), - ) - op.create_index( - "af_uniswap_v3_token_liquidity_hist_owner_index", "af_uniswap_v3_token_liquidity_hist", ["owner"], unique=False - ) - op.create_index( - "af_uniswap_v3_token_liquidity_hist_pool_index", - "af_uniswap_v3_token_liquidity_hist", - ["pool_address"], - unique=False, - ) - op.create_index( - "af_uniswap_v3_token_liquidity_hist_token0_index", - "af_uniswap_v3_token_liquidity_hist", - ["token0_address"], - unique=False, - ) - op.create_index( - "af_uniswap_v3_token_liquidity_hist_token1_index", - "af_uniswap_v3_token_liquidity_hist", - ["token1_address"], - unique=False, - ) - op.create_index( - "af_uniswap_v3_token_liquidity_hist_token_id_index", - "af_uniswap_v3_token_liquidity_hist", - ["token_id"], - unique=False, - ) - op.create_table( - "af_uniswap_v3_tokens", - sa.Column("position_token_address", postgresql.BYTEA(), nullable=False), - sa.Column("token_id", sa.NUMERIC(precision=100), nullable=False), - sa.Column("pool_address", postgresql.BYTEA(), nullable=True), - sa.Column("tick_lower", sa.NUMERIC(precision=100), nullable=True), - sa.Column("tick_upper", sa.NUMERIC(precision=100), nullable=True), - sa.Column("fee", sa.NUMERIC(precision=100), nullable=True), - sa.Column("block_number", sa.BIGINT(), nullable=True), - sa.Column("block_timestamp", sa.BIGINT(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("position_token_address", "token_id"), - ) - op.create_index("af_uniswap_v3_tokens_nft_index", "af_uniswap_v3_tokens", ["position_token_address"], unique=False) - op.drop_index("feature_uniswap_v3_tokens_nft_index", table_name="feature_uniswap_v3_tokens") - op.drop_table("feature_uniswap_v3_tokens") - op.drop_table("feature_uniswap_v3_pools") - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table( - "feature_uniswap_v3_pools", - sa.Column("position_token_address", postgresql.BYTEA(), autoincrement=False, nullable=False), - sa.Column("pool_address", postgresql.BYTEA(), autoincrement=False, nullable=False), - sa.Column("token0_address", postgresql.BYTEA(), autoincrement=False, nullable=True), - sa.Column("token1_address", postgresql.BYTEA(), autoincrement=False, nullable=True), - sa.Column("fee", sa.NUMERIC(precision=100, scale=0), autoincrement=False, nullable=True), - sa.Column("tick_spacing", sa.NUMERIC(precision=100, scale=0), autoincrement=False, nullable=True), - sa.Column( - "create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), autoincrement=False, nullable=True - ), - sa.Column( - "update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), autoincrement=False, nullable=True - ), - sa.Column("called_block_number", sa.BIGINT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint("position_token_address", "pool_address", name="feature_uniswap_v3_pools_pkey"), - ) - op.create_table( - "feature_uniswap_v3_tokens", - sa.Column("position_token_address", postgresql.BYTEA(), autoincrement=False, nullable=False), - sa.Column("token_id", sa.NUMERIC(precision=100, scale=0), autoincrement=False, nullable=False), - sa.Column("pool_address", postgresql.BYTEA(), autoincrement=False, nullable=True), - sa.Column("tick_lower", sa.NUMERIC(precision=100, scale=0), autoincrement=False, nullable=True), - sa.Column("tick_upper", sa.NUMERIC(precision=100, scale=0), autoincrement=False, nullable=True), - sa.Column("fee", sa.NUMERIC(precision=100, scale=0), autoincrement=False, nullable=True), - sa.Column( - "create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), autoincrement=False, nullable=True - ), - sa.Column( - "update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), autoincrement=False, nullable=True - ), - sa.Column("called_block_number", sa.BIGINT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint("position_token_address", "token_id", name="feature_uniswap_v3_tokens_pkey"), - ) - op.create_index( - "feature_uniswap_v3_tokens_nft_index", "feature_uniswap_v3_tokens", ["position_token_address"], unique=False - ) - - op.drop_index("af_uniswap_v3_tokens_nft_index", table_name="af_uniswap_v3_tokens") - op.drop_table("af_uniswap_v3_tokens") - op.drop_index("af_uniswap_v3_token_liquidity_hist_token_id_index", table_name="af_uniswap_v3_token_liquidity_hist") - op.drop_index("af_uniswap_v3_token_liquidity_hist_token1_index", table_name="af_uniswap_v3_token_liquidity_hist") - op.drop_index("af_uniswap_v3_token_liquidity_hist_token0_index", table_name="af_uniswap_v3_token_liquidity_hist") - op.drop_index("af_uniswap_v3_token_liquidity_hist_pool_index", table_name="af_uniswap_v3_token_liquidity_hist") - op.drop_index("af_uniswap_v3_token_liquidity_hist_owner_index", table_name="af_uniswap_v3_token_liquidity_hist") - op.drop_table("af_uniswap_v3_token_liquidity_hist") - op.drop_index( - "af_uniswap_v3_token_data_hist_wallet_token_block_desc_index", table_name="af_uniswap_v3_token_data_hist" - ) - op.drop_index("af_uniswap_v3_token_data_hist_token_block_desc_index", table_name="af_uniswap_v3_token_data_hist") - op.drop_table("af_uniswap_v3_token_data_hist") - op.drop_index("af_uniswap_v3_token_data_current_wallet_desc_index", table_name="af_uniswap_v3_token_data_current") - op.drop_table("af_uniswap_v3_token_data_current") - op.drop_index( - "af_uniswap_v3_token_collect_fee_hist_token_id_index", table_name="af_uniswap_v3_token_collect_fee_hist" - ) - op.drop_index( - "af_uniswap_v3_token_collect_fee_hist_token1_index", table_name="af_uniswap_v3_token_collect_fee_hist" - ) - op.drop_index( - "af_uniswap_v3_token_collect_fee_hist_token0_index", table_name="af_uniswap_v3_token_collect_fee_hist" - ) - op.drop_index("af_uniswap_v3_token_collect_fee_hist_pool_index", table_name="af_uniswap_v3_token_collect_fee_hist") - op.drop_index("af_uniswap_v3_token_collect_fee_hist_owner_index", table_name="af_uniswap_v3_token_collect_fee_hist") - op.drop_table("af_uniswap_v3_token_collect_fee_hist") - op.drop_table("af_uniswap_v3_pools") - op.drop_table("af_uniswap_v3_pool_swap_hist") - op.drop_table("af_uniswap_v3_pool_prices_hist") - op.drop_table("af_uniswap_v3_pool_prices_current") - # ### end Alembic commands ### diff --git a/hemera/migrations/versions/20240910_add_address_index.py b/hemera/migrations/versions/20240910_add_address_index.py deleted file mode 100644 index 74f917e48..000000000 --- a/hemera/migrations/versions/20240910_add_address_index.py +++ /dev/null @@ -1,170 +0,0 @@ -"""add address index - -Revision ID: e8f78802f27a -Revises: f4efa18760cc -Create Date: 2024-09-10 16:27:52.477748 - -""" - -from typing import Sequence, Union - -import sqlalchemy as sa -from alembic import op -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision: str = "e8f78802f27a" -down_revision: Union[str, None] = "f4efa18760cc" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table( - "address_nft_1155_holders", - sa.Column("address", postgresql.BYTEA(), nullable=False), - sa.Column("token_address", postgresql.BYTEA(), nullable=False), - sa.Column("token_id", sa.NUMERIC(precision=100), nullable=False), - sa.Column("balance_of", sa.NUMERIC(precision=100), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("address", "token_address", "token_id"), - if_not_exists=True, - ) - op.create_index( - "address_nft_1155_holders_token_address_balance_of_idx", - "address_nft_1155_holders", - ["token_address", "token_id", sa.text("balance_of DESC")], - unique=False, - if_not_exists=True, - ) - op.create_table( - "address_nft_transfers", - sa.Column("address", postgresql.BYTEA(), nullable=False), - sa.Column("block_number", sa.INTEGER(), nullable=False), - sa.Column("log_index", sa.INTEGER(), nullable=False), - sa.Column("transaction_hash", postgresql.BYTEA(), nullable=True), - sa.Column("block_timestamp", postgresql.TIMESTAMP(), nullable=False), - sa.Column("block_hash", postgresql.BYTEA(), nullable=False), - sa.Column("token_address", postgresql.BYTEA(), nullable=True), - sa.Column("related_address", postgresql.BYTEA(), nullable=True), - sa.Column("transfer_type", sa.SMALLINT(), nullable=True), - sa.Column("token_id", sa.NUMERIC(precision=100), nullable=False), - sa.Column("value", sa.NUMERIC(precision=100), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("address", "block_number", "log_index", "block_timestamp", "block_hash", "token_id"), - if_not_exists=True, - ) - op.create_table( - "address_token_holders", - sa.Column("address", postgresql.BYTEA(), nullable=False), - sa.Column("token_address", postgresql.BYTEA(), nullable=False), - sa.Column("balance_of", sa.NUMERIC(precision=100), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("address", "token_address"), - if_not_exists=True, - ) - op.create_index( - "address_token_holders_token_address_balance_of_idx", - "address_token_holders", - ["token_address", sa.text("balance_of DESC")], - unique=False, - if_not_exists=True, - ) - op.create_table( - "address_token_transfers", - sa.Column("address", postgresql.BYTEA(), nullable=False), - sa.Column("block_number", sa.INTEGER(), nullable=False), - sa.Column("log_index", sa.INTEGER(), nullable=False), - sa.Column("transaction_hash", postgresql.BYTEA(), nullable=True), - sa.Column("block_timestamp", postgresql.TIMESTAMP(), nullable=False), - sa.Column("block_hash", postgresql.BYTEA(), nullable=False), - sa.Column("token_address", postgresql.BYTEA(), nullable=True), - sa.Column("related_address", postgresql.BYTEA(), nullable=True), - sa.Column("transfer_type", sa.SMALLINT(), nullable=True), - sa.Column("value", sa.NUMERIC(precision=100), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("address", "block_number", "log_index", "block_timestamp", "block_hash"), - if_not_exists=True, - ) - op.create_table( - "address_transactions", - sa.Column("address", postgresql.BYTEA(), nullable=False), - sa.Column("block_number", sa.INTEGER(), nullable=False), - sa.Column("transaction_index", sa.INTEGER(), nullable=False), - sa.Column("transaction_hash", postgresql.BYTEA(), nullable=True), - sa.Column("block_timestamp", postgresql.TIMESTAMP(), nullable=False), - sa.Column("block_hash", postgresql.BYTEA(), nullable=True), - sa.Column("txn_type", sa.SMALLINT(), nullable=True), - sa.Column("related_address", postgresql.BYTEA(), nullable=True), - sa.Column("value", sa.NUMERIC(precision=100), nullable=True), - sa.Column("transaction_fee", sa.NUMERIC(precision=100), nullable=True), - sa.Column("receipt_status", sa.INTEGER(), nullable=True), - sa.Column("method", sa.TEXT(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("address", "block_number", "transaction_index", "block_timestamp"), - if_not_exists=True, - ) - op.create_index( - "address_transactions_address_block_timestamp_block_number_t_idx", - "address_transactions", - ["address", sa.text("block_timestamp DESC"), sa.text("block_number DESC"), sa.text("transaction_index DESC")], - unique=False, - if_not_exists=True, - ) - op.create_index( - "address_transactions_address_txn_type_block_timestamp_block_idx", - "address_transactions", - [ - "address", - "txn_type", - sa.text("block_timestamp DESC"), - sa.text("block_number DESC"), - sa.text("transaction_index DESC"), - ], - unique=False, - if_not_exists=True, - ) - op.create_table( - "token_address_nft_inventories", - sa.Column("token_address", postgresql.BYTEA(), nullable=False), - sa.Column("token_id", sa.NUMERIC(precision=100), nullable=False), - sa.Column("wallet_address", postgresql.BYTEA(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("token_address", "token_id"), - if_not_exists=True, - ) - op.create_index( - "token_address_nft_inventories_wallet_address_token_address__idx", - "token_address_nft_inventories", - ["wallet_address", "token_address", "token_id"], - unique=False, - if_not_exists=True, - ) - - -# ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_index( - "token_address_nft_inventories_wallet_address_token_address__idx", table_name="token_address_nft_inventories" - ) - op.drop_table("token_address_nft_inventories") - op.drop_index("address_transactions_address_txn_type_block_timestamp_block_idx", table_name="address_transactions") - op.drop_index("address_transactions_address_block_timestamp_block_number_t_idx", table_name="address_transactions") - op.drop_table("address_transactions") - op.drop_table("address_token_transfers") - op.drop_index("address_token_holders_token_address_balance_of_idx", table_name="address_token_holders") - op.drop_table("address_token_holders") - op.drop_table("address_nft_transfers") - op.drop_index("address_nft_1155_holders_token_address_balance_of_idx", table_name="address_nft_1155_holders") - op.drop_table("address_nft_1155_holders") - # ### end Alembic commands ### diff --git a/hemera/migrations/versions/20240911_add_opensea.py b/hemera/migrations/versions/20240911_add_opensea.py deleted file mode 100644 index c9d8211ae..000000000 --- a/hemera/migrations/versions/20240911_add_opensea.py +++ /dev/null @@ -1,188 +0,0 @@ -"""add opensea - -Revision ID: 3dd9b90d2e31 -Revises: e8f78802f27a -Create Date: 2024-09-11 11:30:31.566920 - -""" - -from typing import Sequence, Union - -import sqlalchemy as sa -from alembic import op -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision: str = "3dd9b90d2e31" -down_revision: Union[str, None] = "e8f78802f27a" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table( - "af_opensea__transactions", - sa.Column("address", postgresql.BYTEA(), nullable=False), - sa.Column("is_offer", sa.BOOLEAN(), nullable=False), - sa.Column("related_address", postgresql.BYTEA(), nullable=True), - sa.Column("transaction_type", sa.SMALLINT(), nullable=True), - sa.Column("order_hash", postgresql.BYTEA(), nullable=True), - sa.Column("zone", postgresql.BYTEA(), nullable=True), - sa.Column("offer", postgresql.JSONB(astext_type=sa.Text()), nullable=True), - sa.Column("consideration", postgresql.JSONB(astext_type=sa.Text()), nullable=True), - sa.Column("fee", postgresql.JSONB(astext_type=sa.Text()), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("transaction_hash", postgresql.BYTEA(), nullable=True), - sa.Column("block_number", sa.BIGINT(), nullable=False), - sa.Column("log_index", sa.BIGINT(), nullable=False), - sa.Column("block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("block_hash", postgresql.BYTEA(), nullable=False), - sa.Column("reorg", sa.BOOLEAN(), server_default=sa.text("false"), nullable=True), - sa.Column("protocol_version", sa.VARCHAR(), server_default="1.6", nullable=True), - sa.PrimaryKeyConstraint("address", "is_offer", "block_number", "log_index", "block_hash"), - if_not_exists=True, - ) - op.create_index( - "af_opensea__transactions_address_block_number_log_index_blo_idx", - "af_opensea__transactions", - ["address", sa.text("block_number DESC"), sa.text("log_index DESC"), sa.text("block_timestamp DESC")], - unique=False, - if_not_exists=True, - ) - op.create_index( - "af_opensea__transactions_address_block_timestamp_idx", - "af_opensea__transactions", - ["address", sa.text("block_timestamp DESC")], - unique=False, - if_not_exists=True, - ) - op.create_index( - "af_opensea__transactions_block_timestamp_idx", - "af_opensea__transactions", - [sa.text("block_timestamp DESC")], - unique=False, - if_not_exists=True, - ) - op.create_table( - "af_opensea_daily_transactions", - sa.Column("address", postgresql.BYTEA(), nullable=False), - sa.Column("block_date", sa.DATE(), nullable=False), - sa.Column("buy_txn_count", sa.INTEGER(), nullable=True), - sa.Column("sell_txn_count", sa.INTEGER(), nullable=True), - sa.Column("swap_txn_count", sa.INTEGER(), nullable=True), - sa.Column("buy_opensea_order_count", sa.INTEGER(), nullable=True), - sa.Column("sell_opensea_order_count", sa.INTEGER(), nullable=True), - sa.Column("swap_opensea_order_count", sa.INTEGER(), nullable=True), - sa.Column("buy_nft_stats", postgresql.JSONB(astext_type=sa.Text()), nullable=True), - sa.Column("sell_nft_stats", postgresql.JSONB(astext_type=sa.Text()), nullable=True), - sa.Column("buy_volume_crypto", postgresql.JSONB(astext_type=sa.Text()), nullable=True), - sa.Column("sell_volume_crypto", postgresql.JSONB(astext_type=sa.Text()), nullable=True), - sa.Column("buy_volume_usd", sa.NUMERIC(), nullable=True), - sa.Column("sell_volume_usd", sa.NUMERIC(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("address", "block_date"), - if_not_exists=True, - ) - op.create_table( - "af_opensea_na_crypto_token_mapping", - sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column("address_var", sa.VARCHAR(length=42), nullable=True), - sa.Column("price_symbol", sa.VARCHAR(), nullable=True), - sa.Column("decimals", sa.INTEGER(), server_default=sa.text("18"), nullable=False), - sa.PrimaryKeyConstraint("id"), - if_not_exists=True, - ) - op.create_table( - "af_opensea_na_orders", - sa.Column("order_hash", postgresql.BYTEA(), nullable=True), - sa.Column("zone", postgresql.BYTEA(), nullable=True), - sa.Column("offerer", postgresql.BYTEA(), nullable=True), - sa.Column("recipient", postgresql.BYTEA(), nullable=True), - sa.Column("offer", postgresql.JSON(astext_type=sa.Text()), nullable=True), - sa.Column("consideration", postgresql.JSON(astext_type=sa.Text()), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("transaction_hash", postgresql.BYTEA(), nullable=True), - sa.Column("block_number", sa.BIGINT(), nullable=False), - sa.Column("log_index", sa.INTEGER(), nullable=False), - sa.Column("block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column("block_hash", postgresql.BYTEA(), nullable=False), - sa.Column("reorg", sa.BOOLEAN(), server_default=sa.text("false"), nullable=True), - sa.Column("protocol_version", sa.VARCHAR(), server_default="1.6", nullable=True), - sa.PrimaryKeyConstraint("block_number", "log_index", "block_hash"), - if_not_exists=True, - ) - op.create_index( - "idx_order_hash", - "af_opensea_na_orders", - ["order_hash"], - unique=False, - if_not_exists=True, - ) - op.create_table( - "af_opensea_na_scheduled_metadata", - sa.Column("id", sa.INTEGER(), nullable=False), - sa.Column("dag_id", sa.VARCHAR(), nullable=True), - sa.Column("execution_date", postgresql.TIMESTAMP(), nullable=True), - sa.Column("last_data_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.PrimaryKeyConstraint("id"), - if_not_exists=True, - ) - op.create_table( - "af_opensea_profile", - sa.Column("address", postgresql.BYTEA(), nullable=False), - sa.Column("buy_txn_count", sa.INTEGER(), server_default=sa.text("0"), nullable=True), - sa.Column("sell_txn_count", sa.INTEGER(), server_default=sa.text("0"), nullable=True), - sa.Column("swap_txn_count", sa.INTEGER(), server_default=sa.text("0"), nullable=True), - sa.Column("buy_opensea_order_count", sa.INTEGER(), server_default=sa.text("0"), nullable=True), - sa.Column("sell_opensea_order_count", sa.INTEGER(), server_default=sa.text("0"), nullable=True), - sa.Column("swap_opensea_order_count", sa.INTEGER(), server_default=sa.text("0"), nullable=True), - sa.Column("buy_nft_stats", postgresql.JSONB(astext_type=sa.Text()), nullable=True), - sa.Column("sell_nft_stats", postgresql.JSONB(astext_type=sa.Text()), nullable=True), - sa.Column("buy_volume_usd", sa.NUMERIC(), nullable=True), - sa.Column("sell_volume_usd", sa.NUMERIC(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("first_transaction_hash", postgresql.BYTEA(), nullable=True), - sa.Column("first_block_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.Column( - "txn_count", - sa.INTEGER(), - sa.Computed( - "(buy_txn_count + sell_txn_count) + swap_txn_count", - ), - nullable=True, - ), - sa.Column( - "opensea_order_count", - sa.INTEGER(), - sa.Computed( - "(buy_opensea_order_count + sell_opensea_order_count) + swap_opensea_order_count", - ), - nullable=True, - ), - sa.Column("volume_usd", sa.NUMERIC(), server_default=sa.text("0"), nullable=True), - sa.PrimaryKeyConstraint("address"), - if_not_exists=True, - ) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table("af_opensea_profile") - op.drop_table("af_opensea_na_scheduled_metadata") - op.drop_index("idx_order_hash", table_name="af_opensea_na_orders") - op.drop_table("af_opensea_na_orders") - op.drop_table("af_opensea_na_crypto_token_mapping") - op.drop_table("af_opensea_daily_transactions") - op.drop_index("af_opensea__transactions_block_timestamp_idx", table_name="af_opensea__transactions") - op.drop_index("af_opensea__transactions_address_block_timestamp_idx", table_name="af_opensea__transactions") - op.drop_index( - "af_opensea__transactions_address_block_number_log_index_blo_idx", table_name="af_opensea__transactions" - ) - op.drop_table("af_opensea__transactions") - # ### end Alembic commands ### diff --git a/hemera/migrations/versions/20240912_add_merchant_and_uniswap_daily_table.py b/hemera/migrations/versions/20240912_add_merchant_and_uniswap_daily_table.py deleted file mode 100644 index 086263e0d..000000000 --- a/hemera/migrations/versions/20240912_add_merchant_and_uniswap_daily_table.py +++ /dev/null @@ -1,242 +0,0 @@ -"""add merchant-moe and uniswap daily table - -Revision ID: c609922eae7a -Revises: 3dd9b90d2e31 -Create Date: 2024-09-12 14:44:05.528650 - -""" - -from typing import Sequence, Union - -import sqlalchemy as sa -from alembic import op -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision: str = "c609922eae7a" -down_revision: Union[str, None] = "3dd9b90d2e31" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table( - "af_merchant_moe_pools", - sa.Column("position_token_address", postgresql.BYTEA(), nullable=False), - sa.Column("block_timestamp", sa.BIGINT(), nullable=True), - sa.Column("block_number", sa.BIGINT(), nullable=True), - sa.Column("token0_address", postgresql.BYTEA(), nullable=True), - sa.Column("token1_address", postgresql.BYTEA(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("reorg", sa.BOOLEAN(), server_default=sa.text("false"), nullable=True), - sa.PrimaryKeyConstraint("position_token_address"), - ) - op.create_table( - "af_merchant_moe_token_bin_current", - sa.Column("position_token_address", postgresql.BYTEA(), nullable=False), - sa.Column("token_id", sa.NUMERIC(precision=100), nullable=False), - sa.Column("block_timestamp", sa.BIGINT(), nullable=True), - sa.Column("block_number", sa.BIGINT(), nullable=True), - sa.Column("reserve0_bin", sa.NUMERIC(precision=100), nullable=True), - sa.Column("reserve1_bin", sa.NUMERIC(precision=100), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("position_token_address", "token_id"), - ) - op.create_index( - "af_merchant_moe_token_bin_current_token_id_index", - "af_merchant_moe_token_bin_current", - [sa.text("position_token_address DESC"), sa.text("token_id ASC")], - unique=False, - ) - op.create_table( - "af_merchant_moe_token_bin_hist", - sa.Column("position_token_address", postgresql.BYTEA(), nullable=False), - sa.Column("token_id", sa.NUMERIC(precision=100), nullable=False), - sa.Column("block_timestamp", sa.BIGINT(), nullable=False), - sa.Column("block_number", sa.BIGINT(), nullable=False), - sa.Column("reserve0_bin", sa.NUMERIC(precision=100), nullable=True), - sa.Column("reserve1_bin", sa.NUMERIC(precision=100), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("reorg", sa.BOOLEAN(), server_default=sa.text("false"), nullable=True), - sa.PrimaryKeyConstraint("position_token_address", "token_id", "block_timestamp", "block_number"), - ) - op.create_index( - "af_merchant_moe_token_bin_hist_token_block_desc_index", - "af_merchant_moe_token_bin_hist", - [sa.text("position_token_address DESC"), sa.text("block_timestamp DESC")], - unique=False, - ) - op.create_table( - "af_merchant_moe_token_supply_current", - sa.Column("position_token_address", postgresql.BYTEA(), nullable=False), - sa.Column("token_id", sa.NUMERIC(precision=100), nullable=False), - sa.Column("block_timestamp", sa.BIGINT(), nullable=True), - sa.Column("block_number", sa.BIGINT(), nullable=True), - sa.Column("total_supply", sa.NUMERIC(precision=100), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("position_token_address", "token_id"), - ) - op.create_table( - "af_merchant_moe_token_supply_hist", - sa.Column("position_token_address", postgresql.BYTEA(), nullable=False), - sa.Column("token_id", sa.NUMERIC(precision=100), nullable=False), - sa.Column("block_timestamp", sa.BIGINT(), nullable=False), - sa.Column("block_number", sa.BIGINT(), nullable=False), - sa.Column("total_supply", sa.NUMERIC(precision=100), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("reorg", sa.BOOLEAN(), server_default=sa.text("false"), nullable=True), - sa.PrimaryKeyConstraint("position_token_address", "token_id", "block_timestamp", "block_number"), - ) - op.create_index( - "af_merchant_moe_token_supply_hist_token_block_desc_index", - "af_merchant_moe_token_supply_hist", - [sa.text("position_token_address DESC"), sa.text("block_timestamp DESC")], - unique=False, - ) - op.create_table( - "af_holding_balance_merchantmoe_period", - sa.Column("period_date", sa.DATE(), nullable=False), - sa.Column("protocol_id", sa.VARCHAR(), nullable=False), - sa.Column("position_token_address", postgresql.BYTEA(), nullable=False), - sa.Column("token_id", sa.NUMERIC(), nullable=False), - sa.Column("wallet_address", postgresql.BYTEA(), nullable=False), - sa.Column("token0_address", postgresql.BYTEA(), nullable=False), - sa.Column("token0_symbol", sa.VARCHAR(), nullable=False), - sa.Column("token0_balance", sa.NUMERIC(precision=100, scale=18), nullable=True), - sa.Column("token1_address", postgresql.BYTEA(), nullable=False), - sa.Column("token1_symbol", sa.VARCHAR(), nullable=False), - sa.Column("token1_balance", sa.NUMERIC(precision=100, scale=18), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("period_date", "protocol_id", "position_token_address", "token_id", "wallet_address"), - ) - op.create_index( - "af_holding_balance_merchantmoe_period_period_date", - "af_holding_balance_merchantmoe_period", - ["period_date"], - unique=False, - ) - op.create_table( - "af_holding_balance_uniswap_v3_period", - sa.Column("period_date", sa.DATE(), nullable=False), - sa.Column("protocol_id", sa.VARCHAR(), nullable=False), - sa.Column("position_token_address", postgresql.BYTEA(), nullable=False), - sa.Column("token_id", sa.INTEGER(), nullable=False), - sa.Column("wallet_address", postgresql.BYTEA(), nullable=False), - sa.Column("token0_address", postgresql.BYTEA(), nullable=False), - sa.Column("token0_symbol", sa.VARCHAR(), nullable=False), - sa.Column("token0_balance", sa.NUMERIC(precision=100, scale=18), nullable=True), - sa.Column("token1_address", postgresql.BYTEA(), nullable=False), - sa.Column("token1_symbol", sa.VARCHAR(), nullable=False), - sa.Column("token1_balance", sa.NUMERIC(precision=100, scale=18), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("period_date", "protocol_id", "position_token_address", "token_id"), - ) - op.create_index( - "af_holding_balance_uniswap_v3_period_period_date", - "af_holding_balance_uniswap_v3_period", - ["period_date"], - unique=False, - ) - op.create_table( - "af_merchant_moe_token_bin_hist_period", - sa.Column("period_date", sa.DATE(), nullable=False), - sa.Column("position_token_address", postgresql.BYTEA(), nullable=False), - sa.Column("token_id", sa.NUMERIC(precision=100), nullable=False), - sa.Column("reserve0_bin", sa.NUMERIC(precision=100), nullable=True), - sa.Column("reserve1_bin", sa.NUMERIC(precision=100), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("period_date", "position_token_address", "token_id"), - ) - op.create_table( - "af_uniswap_v3_pool_prices_daily", - sa.Column("block_date", sa.DATE(), nullable=False), - sa.Column("pool_address", postgresql.BYTEA(), nullable=False), - sa.Column("sqrt_price_x96", sa.NUMERIC(precision=78), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("block_date", "pool_address"), - ) - op.create_index( - "af_uniswap_v3_pool_prices_daily_block_date_index", - "af_uniswap_v3_pool_prices_daily", - ["block_date"], - unique=False, - ) - op.create_table( - "af_uniswap_v3_pool_prices_period", - sa.Column("period_date", sa.DATE(), nullable=False), - sa.Column("pool_address", postgresql.BYTEA(), nullable=False), - sa.Column("sqrt_price_x96", sa.NUMERIC(precision=78), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("period_date", "pool_address"), - ) - op.create_index( - "af_uniswap_v3_pool_prices_period_period_date_index", - "af_uniswap_v3_pool_prices_period", - ["period_date"], - unique=False, - ) - op.create_table( - "af_uniswap_v3_token_data_daily", - sa.Column("block_date", sa.DATE(), nullable=False), - sa.Column("position_token_address", postgresql.BYTEA(), nullable=False), - sa.Column("token_id", sa.INTEGER(), nullable=False), - sa.Column("wallet_address", postgresql.BYTEA(), nullable=False), - sa.Column("pool_address", postgresql.BYTEA(), nullable=False), - sa.Column("liquidity", sa.NUMERIC(precision=78), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("block_date", "position_token_address", "token_id"), - ) - op.create_index( - "af_uniswap_v3_token_data_daily_index", "af_uniswap_v3_token_data_daily", ["block_date"], unique=False - ) - op.create_table( - "af_uniswap_v3_token_data_period", - sa.Column("period_date", sa.DATE(), nullable=False), - sa.Column("position_token_address", postgresql.BYTEA(), nullable=False), - sa.Column("token_id", sa.INTEGER(), nullable=False), - sa.Column("wallet_address", postgresql.BYTEA(), nullable=False), - sa.Column("pool_address", postgresql.BYTEA(), nullable=False), - sa.Column("liquidity", sa.NUMERIC(precision=78), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("period_date", "position_token_address", "token_id"), - ) - op.create_index( - "af_uniswap_v3_token_data_period_date_index", "af_uniswap_v3_token_data_period", ["period_date"], unique=False - ) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_index("af_uniswap_v3_token_data_period_date_index", table_name="af_uniswap_v3_token_data_period") - op.drop_table("af_uniswap_v3_token_data_period") - op.drop_index("af_uniswap_v3_token_data_daily_index", table_name="af_uniswap_v3_token_data_daily") - op.drop_table("af_uniswap_v3_token_data_daily") - op.drop_index("af_uniswap_v3_pool_prices_period_period_date_index", table_name="af_uniswap_v3_pool_prices_period") - op.drop_table("af_uniswap_v3_pool_prices_period") - op.drop_index("af_uniswap_v3_pool_prices_daily_block_date_index", table_name="af_uniswap_v3_pool_prices_daily") - op.drop_table("af_uniswap_v3_pool_prices_daily") - op.drop_table("af_merchant_moe_token_bin_hist_period") - op.drop_index("af_holding_balance_uniswap_v3_period_period_date", table_name="af_holding_balance_uniswap_v3_period") - op.drop_table("af_holding_balance_uniswap_v3_period") - op.drop_index( - "af_holding_balance_merchantmoe_period_period_date", table_name="af_holding_balance_merchantmoe_period" - ) - op.drop_table("af_holding_balance_merchantmoe_period") - op.drop_index( - "af_merchant_moe_token_supply_hist_token_block_desc_index", table_name="af_merchant_moe_token_supply_hist" - ) - op.drop_table("af_merchant_moe_token_supply_hist") - op.drop_table("af_merchant_moe_token_supply_current") - op.drop_index("af_merchant_moe_token_bin_hist_token_block_desc_index", table_name="af_merchant_moe_token_bin_hist") - op.drop_table("af_merchant_moe_token_bin_hist") - op.drop_index("af_merchant_moe_token_bin_current_token_id_index", table_name="af_merchant_moe_token_bin_current") - op.drop_table("af_merchant_moe_token_bin_current") - op.drop_table("af_merchant_moe_pools") - # ### end Alembic commands ### diff --git a/hemera/migrations/versions/20240927_add_merchant_moe_table.py b/hemera/migrations/versions/20240927_add_merchant_moe_table.py deleted file mode 100644 index cf5c33a15..000000000 --- a/hemera/migrations/versions/20240927_add_merchant_moe_table.py +++ /dev/null @@ -1,112 +0,0 @@ -"""add merchant moe table - -Revision ID: 67015d9fa59b -Revises: c609922eae7a -Create Date: 2024-09-27 17:00:46.320469 - -""" - -from typing import Sequence, Union - -import sqlalchemy as sa -from alembic import op -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision: str = "67015d9fa59b" -down_revision: Union[str, None] = "c609922eae7a" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table( - "af_merchant_moe_pool_data_current", - sa.Column("pool_address", postgresql.BYTEA(), nullable=False), - sa.Column("block_timestamp", sa.BIGINT(), nullable=True), - sa.Column("block_number", sa.BIGINT(), nullable=True), - sa.Column("active_id", sa.BIGINT(), nullable=True), - sa.Column("bin_step", sa.BIGINT(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("reorg", sa.BOOLEAN(), nullable=True), - sa.PrimaryKeyConstraint("pool_address"), - ) - op.create_table( - "af_merchant_moe_pool_data_hist", - sa.Column("pool_address", postgresql.BYTEA(), nullable=False), - sa.Column("block_timestamp", sa.BIGINT(), nullable=False), - sa.Column("block_number", sa.BIGINT(), nullable=False), - sa.Column("active_id", sa.BIGINT(), nullable=True), - sa.Column("bin_step", sa.BIGINT(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("reorg", sa.BOOLEAN(), nullable=True), - sa.PrimaryKeyConstraint("pool_address", "block_timestamp", "block_number"), - ) - op.create_table( - "af_staked_fbtc_current", - sa.Column("vault_address", postgresql.BYTEA(), nullable=False), - sa.Column("wallet_address", postgresql.BYTEA(), nullable=False), - sa.Column("block_number", sa.BIGINT(), nullable=True), - sa.Column("block_timestamp", sa.BIGINT(), nullable=True), - sa.Column("amount", sa.NUMERIC(precision=100), nullable=True), - sa.Column("changed_amount", sa.NUMERIC(precision=100), nullable=True), - sa.Column("protocol_id", sa.VARCHAR(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("vault_address", "wallet_address"), - ) - op.create_index( - "af_staked_fbtc_current_protocol_block_desc_index", - "af_staked_fbtc_current", - [sa.text("protocol_id DESC")], - unique=False, - ) - op.create_index( - "af_staked_fbtc_current_wallet_block_desc_index", - "af_staked_fbtc_current", - [sa.text("wallet_address DESC")], - unique=False, - ) - op.create_table( - "af_staked_fbtc_detail_hist", - sa.Column("vault_address", postgresql.BYTEA(), nullable=False), - sa.Column("wallet_address", postgresql.BYTEA(), nullable=False), - sa.Column("block_number", sa.BIGINT(), nullable=False), - sa.Column("block_timestamp", sa.BIGINT(), nullable=False), - sa.Column("amount", sa.NUMERIC(precision=100), nullable=True), - sa.Column("changed_amount", sa.NUMERIC(precision=100), nullable=True), - sa.Column("protocol_id", sa.VARCHAR(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("reorg", sa.BOOLEAN(), nullable=True), - sa.PrimaryKeyConstraint("vault_address", "wallet_address", "block_timestamp", "block_number"), - ) - op.create_index( - "af_staked_fbtc_detail_hist_protocol_block_desc_index", - "af_staked_fbtc_detail_hist", - [sa.text("protocol_id DESC"), sa.text("block_timestamp DESC")], - unique=False, - ) - op.create_index( - "af_staked_fbtc_detail_hist_wallet_block_desc_index", - "af_staked_fbtc_detail_hist", - [sa.text("wallet_address DESC"), sa.text("block_timestamp DESC")], - unique=False, - ) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_index("af_staked_fbtc_detail_hist_wallet_block_desc_index", table_name="af_staked_fbtc_detail_hist") - op.drop_index("af_staked_fbtc_detail_hist_protocol_block_desc_index", table_name="af_staked_fbtc_detail_hist") - op.drop_table("af_staked_fbtc_detail_hist") - op.drop_index("af_staked_fbtc_current_wallet_block_desc_index", table_name="af_staked_fbtc_current") - op.drop_index("af_staked_fbtc_current_protocol_block_desc_index", table_name="af_staked_fbtc_current") - op.drop_table("af_staked_fbtc_current") - op.drop_table("af_merchant_moe_pool_data_hist") - op.drop_table("af_merchant_moe_pool_data_current") - # ### end Alembic commands ### diff --git a/hemera/migrations/versions/20241017_earlier_table_change.py b/hemera/migrations/versions/20241017_earlier_table_change.py deleted file mode 100644 index fa4bf00c2..000000000 --- a/hemera/migrations/versions/20241017_earlier_table_change.py +++ /dev/null @@ -1,31 +0,0 @@ -"""earlier table change - -Revision ID: bc23aa19668e -Revises: 67015d9fa59b -Create Date: 2024-10-17 17:11:13.452322 - -""" - -from typing import Sequence, Union - -import sqlalchemy as sa -from alembic import op -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision: str = "bc23aa19668e" -down_revision: Union[str, None] = "67015d9fa59b" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.add_column("af_ens_node_current", sa.Column("block_number", sa.BIGINT(), nullable=True)) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_column("af_ens_node_current", "block_number") - # ### end Alembic commands ### diff --git a/hemera/migrations/versions/20241105_add_address_index_and_stats.py b/hemera/migrations/versions/20241105_add_address_index_and_stats.py deleted file mode 100644 index 0932ed7e6..000000000 --- a/hemera/migrations/versions/20241105_add_address_index_and_stats.py +++ /dev/null @@ -1,588 +0,0 @@ -"""add_address_index_and_stats - -Revision ID: 3bd2e3099bae -Revises: bc23aa19668e -Create Date: 2024-11-05 14:40:03.570136 - -""" - -from typing import Sequence, Union - -import sqlalchemy as sa -from alembic import op -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision: str = "3bd2e3099bae" -down_revision: Union[str, None] = "bc23aa19668e" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table( - "address_contract_operations", - sa.Column("address", postgresql.BYTEA(), nullable=False), - sa.Column("trace_from_address", postgresql.BYTEA(), nullable=True), - sa.Column("contract_address", postgresql.BYTEA(), nullable=True), - sa.Column("trace_id", sa.TEXT(), nullable=False), - sa.Column("block_number", sa.INTEGER(), nullable=False), - sa.Column("transaction_index", sa.INTEGER(), nullable=False), - sa.Column("transaction_hash", postgresql.BYTEA(), nullable=True), - sa.Column("block_timestamp", postgresql.TIMESTAMP(), nullable=False), - sa.Column("block_hash", postgresql.BYTEA(), nullable=True), - sa.Column("error", sa.TEXT(), nullable=True), - sa.Column("status", sa.INTEGER(), nullable=True), - sa.Column("creation_code", postgresql.BYTEA(), nullable=True), - sa.Column("deployed_code", postgresql.BYTEA(), nullable=True), - sa.Column("gas", sa.NUMERIC(precision=100), nullable=True), - sa.Column("gas_used", sa.NUMERIC(precision=100), nullable=True), - sa.Column("trace_type", sa.TEXT(), nullable=True), - sa.Column("call_type", sa.TEXT(), nullable=True), - sa.Column("transaction_receipt_status", sa.INTEGER(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("address", "trace_id", "block_number", "transaction_index", "block_timestamp"), - if_not_exists=True, - ) - op.create_index( - "address_contract_operations_address_block_tn_t_idx", - "address_contract_operations", - ["address", sa.text("block_timestamp DESC"), sa.text("block_number DESC"), sa.text("transaction_index DESC")], - unique=False, - if_not_exists=True, - ) - op.create_table( - "address_internal_transactions", - sa.Column("address", postgresql.BYTEA(), nullable=False), - sa.Column("trace_id", sa.TEXT(), nullable=False), - sa.Column("block_number", sa.INTEGER(), nullable=False), - sa.Column("transaction_index", sa.INTEGER(), nullable=False), - sa.Column("transaction_hash", postgresql.BYTEA(), nullable=True), - sa.Column("block_timestamp", postgresql.TIMESTAMP(), nullable=False), - sa.Column("block_hash", postgresql.BYTEA(), nullable=True), - sa.Column("error", sa.TEXT(), nullable=True), - sa.Column("status", sa.INTEGER(), nullable=True), - sa.Column("input_method", sa.TEXT(), nullable=True), - sa.Column("value", sa.NUMERIC(precision=100), nullable=True), - sa.Column("gas", sa.NUMERIC(precision=100), nullable=True), - sa.Column("gas_used", sa.NUMERIC(precision=100), nullable=True), - sa.Column("trace_type", sa.TEXT(), nullable=True), - sa.Column("call_type", sa.TEXT(), nullable=True), - sa.Column("txn_type", sa.SMALLINT(), nullable=True), - sa.Column("related_address", postgresql.BYTEA(), nullable=True), - sa.Column("transaction_receipt_status", sa.INTEGER(), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("address", "trace_id", "block_number", "transaction_index", "block_timestamp"), - if_not_exists=True, - ) - op.create_index( - "address_internal_transactions_address_nt_t_idx", - "address_internal_transactions", - ["address", sa.text("block_timestamp DESC"), sa.text("block_number DESC"), sa.text("transaction_index DESC")], - unique=False, - if_not_exists=True, - ) - op.create_table( - "af_erc1155_token_holdings_current", - sa.Column("position_token_address", postgresql.BYTEA(), nullable=False), - sa.Column("token_id", sa.NUMERIC(precision=100), nullable=False), - sa.Column("wallet_address", postgresql.BYTEA(), nullable=False), - sa.Column("block_number", sa.BIGINT(), nullable=True), - sa.Column("block_timestamp", sa.BIGINT(), nullable=True), - sa.Column("balance", sa.NUMERIC(precision=100), nullable=True), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.PrimaryKeyConstraint("position_token_address", "token_id", "wallet_address"), - if_not_exists=True, - ) - op.create_index( - "af_erc1155_token_holdings_current_token_block_desc_index", - "af_erc1155_token_holdings_current", - [sa.text("position_token_address DESC"), sa.text("block_timestamp DESC")], - unique=False, - if_not_exists=True, - ) - op.create_index( - "af_erc1155_token_holdings_current_wallet_block_desc_index", - "af_erc1155_token_holdings_current", - [sa.text("wallet_address DESC"), sa.text("block_timestamp DESC")], - unique=False, - if_not_exists=True, - ) - op.create_table( - "af_erc1155_token_holdings_hist", - sa.Column("position_token_address", postgresql.BYTEA(), nullable=False), - sa.Column("token_id", sa.NUMERIC(precision=100), nullable=False), - sa.Column("wallet_address", postgresql.BYTEA(), nullable=False), - sa.Column("balance", sa.NUMERIC(precision=100), nullable=True), - sa.Column("block_number", sa.BIGINT(), nullable=False), - sa.Column("block_timestamp", sa.BIGINT(), nullable=False), - sa.Column("create_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), server_default=sa.text("now()"), nullable=True), - sa.Column("reorg", sa.BOOLEAN(), nullable=True), - sa.PrimaryKeyConstraint( - "position_token_address", "token_id", "wallet_address", "block_timestamp", "block_number" - ), - if_not_exists=True, - ) - op.create_index( - "feature_erc1155_token_holding_token_block_desc_index", - "af_erc1155_token_holdings_hist", - [sa.text("position_token_address DESC"), sa.text("block_timestamp DESC")], - unique=False, - if_not_exists=True, - ) - op.create_index( - "feature_erc1155_token_holding_token_wallet_block_desc_index", - "af_erc1155_token_holdings_hist", - [sa.text("position_token_address DESC"), sa.text("wallet_address DESC"), sa.text("block_number DESC")], - unique=False, - if_not_exists=True, - ) - op.create_table( - "af_index_daily_stats", - sa.Column("address", postgresql.BYTEA(), nullable=False), - sa.Column("block_date", sa.DATE(), nullable=False), - sa.Column("transaction_in_count", sa.INTEGER(), nullable=True), - sa.Column("transaction_out_count", sa.INTEGER(), nullable=True), - sa.Column("transaction_self_count", sa.INTEGER(), nullable=True), - sa.Column("transaction_in_value", sa.BIGINT(), nullable=True), - sa.Column("transaction_out_value", sa.BIGINT(), nullable=True), - sa.Column("transaction_self_value", sa.BIGINT(), nullable=True), - sa.Column("transaction_in_fee", sa.NUMERIC(), nullable=True), - sa.Column("transaction_out_fee", sa.NUMERIC(), nullable=True), - sa.Column("transaction_self_fee", sa.NUMERIC(), nullable=True), - sa.Column("internal_transaction_in_count", sa.INTEGER(), nullable=True), - sa.Column("internal_transaction_out_count", sa.INTEGER(), nullable=True), - sa.Column("internal_transaction_self_count", sa.INTEGER(), nullable=True), - sa.Column("internal_transaction_in_value", sa.BIGINT(), nullable=True), - sa.Column("internal_transaction_out_value", sa.BIGINT(), nullable=True), - sa.Column("internal_transaction_self_value", sa.BIGINT(), nullable=True), - sa.Column("erc20_transfer_in_count", sa.INTEGER(), nullable=True), - sa.Column("erc20_transfer_out_count", sa.INTEGER(), nullable=True), - sa.Column("erc20_transfer_self_count", sa.INTEGER(), nullable=True), - sa.Column("nft_transfer_in_count", sa.INTEGER(), nullable=True), - sa.Column("nft_transfer_out_count", sa.INTEGER(), nullable=True), - sa.Column("nft_transfer_self_count", sa.INTEGER(), nullable=True), - sa.Column("nft_721_transfer_in_count", sa.INTEGER(), nullable=True), - sa.Column("nft_721_transfer_out_count", sa.INTEGER(), nullable=True), - sa.Column("nft_721_transfer_self_count", sa.INTEGER(), nullable=True), - sa.Column("nft_1155_transfer_in_count", sa.INTEGER(), nullable=True), - sa.Column("nft_1155_transfer_out_count", sa.INTEGER(), nullable=True), - sa.Column("nft_1155_transfer_self_count", sa.INTEGER(), nullable=True), - sa.Column("contract_creation_count", sa.INTEGER(), nullable=True), - sa.Column("contract_destruction_count", sa.INTEGER(), nullable=True), - sa.Column("contract_operation_count", sa.INTEGER(), nullable=True), - sa.Column("transaction_count", sa.INTEGER(), nullable=True), - sa.Column("internal_transaction_count", sa.INTEGER(), nullable=True), - sa.Column("erc20_transfer_count", sa.INTEGER(), nullable=True), - sa.Column("nft_transfer_count", sa.INTEGER(), nullable=True), - sa.Column("nft_721_transfer_count", sa.INTEGER(), nullable=True), - sa.Column("nft_1155_transfer_count", sa.INTEGER(), nullable=True), - sa.PrimaryKeyConstraint("address", "block_date"), - if_not_exists=True, - ) - op.create_table( - "af_index_na_scheduled_metadata", - sa.Column("id", sa.INTEGER(), nullable=False), - sa.Column("dag_id", sa.VARCHAR(), nullable=True), - sa.Column("execution_date", postgresql.TIMESTAMP(), nullable=True), - sa.Column("last_data_timestamp", postgresql.TIMESTAMP(), nullable=True), - sa.PrimaryKeyConstraint("id"), - if_not_exists=True, - ) - op.create_table( - "af_index_stats", - sa.Column("address", postgresql.BYTEA(), nullable=False), - sa.Column("transaction_in_count", sa.INTEGER(), nullable=True), - sa.Column("transaction_out_count", sa.INTEGER(), nullable=True), - sa.Column("transaction_self_count", sa.INTEGER(), nullable=True), - sa.Column("transaction_in_value", sa.NUMERIC(), nullable=True), - sa.Column("transaction_out_value", sa.NUMERIC(), nullable=True), - sa.Column("transaction_self_value", sa.NUMERIC(), nullable=True), - sa.Column("transaction_in_fee", sa.NUMERIC(), nullable=True), - sa.Column("transaction_out_fee", sa.NUMERIC(), nullable=True), - sa.Column("transaction_self_fee", sa.NUMERIC(), nullable=True), - sa.Column("internal_transaction_in_count", sa.INTEGER(), nullable=True), - sa.Column("internal_transaction_out_count", sa.INTEGER(), nullable=True), - sa.Column("internal_transaction_self_count", sa.INTEGER(), nullable=True), - sa.Column("internal_transaction_in_value", sa.NUMERIC(), nullable=True), - sa.Column("internal_transaction_out_value", sa.NUMERIC(), nullable=True), - sa.Column("internal_transaction_self_value", sa.NUMERIC(), nullable=True), - sa.Column("erc20_transfer_in_count", sa.INTEGER(), nullable=True), - sa.Column("erc20_transfer_out_count", sa.INTEGER(), nullable=True), - sa.Column("erc20_transfer_self_count", sa.INTEGER(), nullable=True), - sa.Column("nft_transfer_in_count", sa.INTEGER(), nullable=True), - sa.Column("nft_transfer_out_count", sa.INTEGER(), nullable=True), - sa.Column("nft_transfer_self_count", sa.INTEGER(), nullable=True), - sa.Column("nft_721_transfer_in_count", sa.INTEGER(), nullable=True), - sa.Column("nft_721_transfer_out_count", sa.INTEGER(), nullable=True), - sa.Column("nft_721_transfer_self_count", sa.INTEGER(), nullable=True), - sa.Column("nft_1155_transfer_in_count", sa.INTEGER(), nullable=True), - sa.Column("nft_1155_transfer_out_count", sa.INTEGER(), nullable=True), - sa.Column("nft_1155_transfer_self_count", sa.INTEGER(), nullable=True), - sa.Column("contract_creation_count", sa.INTEGER(), nullable=True), - sa.Column("contract_destruction_count", sa.INTEGER(), nullable=True), - sa.Column("contract_operation_count", sa.INTEGER(), nullable=True), - sa.Column("transaction_count", sa.INTEGER(), nullable=True), - sa.Column("internal_transaction_count", sa.INTEGER(), nullable=True), - sa.Column("erc20_transfer_count", sa.INTEGER(), nullable=True), - sa.Column("nft_transfer_count", sa.INTEGER(), nullable=True), - sa.Column("nft_721_transfer_count", sa.INTEGER(), nullable=True), - sa.Column("nft_1155_transfer_count", sa.INTEGER(), nullable=True), - sa.Column("tag", sa.VARCHAR(), nullable=True), - sa.PrimaryKeyConstraint("address"), - if_not_exists=True, - ) - op.create_table( - "af_index_token_address_daily_stats", - sa.Column("address", postgresql.BYTEA(), nullable=False), - sa.Column("token_holder_count", sa.INTEGER(), nullable=True), - sa.Column("token_transfer_count", sa.INTEGER(), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), nullable=True), - sa.PrimaryKeyConstraint("address"), - if_not_exists=True, - ) - op.create_table( - "af_index_token_address_stats", - sa.Column("address", postgresql.BYTEA(), nullable=False), - sa.Column("token_holder_count", sa.INTEGER(), nullable=True), - sa.Column("token_transfer_count", sa.INTEGER(), nullable=True), - sa.Column("update_time", postgresql.TIMESTAMP(), nullable=True), - sa.PrimaryKeyConstraint("address"), - if_not_exists=True, - ) - op.create_table( - "af_stats_na_daily_addresses", - sa.Column("block_date", sa.DATE(), nullable=False), - sa.Column("active_address_cnt", sa.BIGINT(), nullable=True), - sa.Column("receiver_address_cnt", sa.BIGINT(), nullable=True), - sa.Column("sender_address_cnt", sa.BIGINT(), nullable=True), - sa.Column("total_address_cnt", sa.BIGINT(), nullable=True), - sa.Column("new_address_cnt", sa.BIGINT(), nullable=True), - sa.PrimaryKeyConstraint("block_date"), - if_not_exists=True, - ) - op.create_table( - "af_stats_na_daily_blocks", - sa.Column("block_date", sa.DATE(), nullable=False), - sa.Column("cnt", sa.BIGINT(), nullable=True), - sa.Column("avg_size", sa.NUMERIC(), nullable=True), - sa.Column("avg_gas_limit", sa.NUMERIC(), nullable=True), - sa.Column("avg_gas_used", sa.NUMERIC(), nullable=True), - sa.Column("total_gas_used", sa.BIGINT(), nullable=True), - sa.Column("avg_gas_used_percentage", sa.NUMERIC(), nullable=True), - sa.Column("avg_txn_cnt", sa.NUMERIC(), nullable=True), - sa.Column("total_cnt", sa.BIGINT(), nullable=True), - sa.Column("block_interval", sa.NUMERIC(), nullable=True), - sa.PrimaryKeyConstraint("block_date"), - if_not_exists=True, - ) - op.create_table( - "af_stats_na_daily_bridge_transactions", - sa.Column("block_date", sa.DATE(), nullable=False), - sa.Column("deposit_cnt", sa.BIGINT(), nullable=True), - sa.Column("withdraw_cnt", sa.BIGINT(), nullable=True), - sa.PrimaryKeyConstraint("block_date"), - if_not_exists=True, - ) - op.create_table( - "af_stats_na_daily_tokens", - sa.Column("block_date", sa.DATE(), nullable=False), - sa.Column("erc20_active_address_cnt", sa.INTEGER(), nullable=True), - sa.Column("erc20_total_transfer_cnt", sa.BIGINT(), nullable=True), - sa.Column("erc721_active_address_cnt", sa.INTEGER(), nullable=True), - sa.Column("erc721_total_transfer_cnt", sa.BIGINT(), nullable=True), - sa.Column("erc1155_active_address_cnt", sa.INTEGER(), nullable=True), - sa.Column("erc1155_total_transfer_cnt", sa.BIGINT(), nullable=True), - sa.PrimaryKeyConstraint("block_date"), - if_not_exists=True, - ) - op.create_table( - "af_stats_na_daily_transactions", - sa.Column("block_date", sa.DATE(), nullable=False), - sa.Column("cnt", sa.BIGINT(), nullable=True), - sa.Column("total_cnt", sa.BIGINT(), nullable=True), - sa.Column("txn_error_cnt", sa.BIGINT(), nullable=True), - sa.Column("avg_transaction_fee", sa.NUMERIC(), nullable=True), - sa.Column("avg_gas_price", sa.NUMERIC(), nullable=True), - sa.Column("max_gas_price", sa.NUMERIC(), nullable=True), - sa.Column("min_gas_price", sa.NUMERIC(), nullable=True), - sa.Column("avg_receipt_l1_fee", sa.NUMERIC(), nullable=True), - sa.Column("max_receipt_l1_fee", sa.NUMERIC(), nullable=True), - sa.Column("min_receipt_l1_fee", sa.NUMERIC(), nullable=True), - sa.Column("avg_receipt_l1_gas_price", sa.NUMERIC(), nullable=True), - sa.Column("max_receipt_l1_gas_price", sa.NUMERIC(), nullable=True), - sa.Column("min_receipt_l1_gas_price", sa.NUMERIC(), nullable=True), - sa.PrimaryKeyConstraint("block_date"), - if_not_exists=True, - ) - op.create_table( - "coin_prices", - sa.Column("block_date", sa.DateTime(), nullable=False), - sa.Column("price", sa.Numeric(), nullable=True), - sa.PrimaryKeyConstraint("block_date"), - if_not_exists=True, - ) - op.create_table( - "scheduled_metadata", - sa.Column("id", sa.INTEGER(), nullable=False), - sa.Column("dag_id", sa.VARCHAR(), nullable=True), - sa.Column("execution_date", sa.DateTime(), nullable=True), - sa.Column("last_data_timestamp", sa.DateTime(), nullable=True), - sa.PrimaryKeyConstraint("id"), - if_not_exists=True, - ) - op.drop_table("daily_contract_interacted_aggregates", if_exists=True) - op.drop_table("scheduled_token_count_metadata", if_exists=True) - op.drop_table("daily_addresses_aggregates", if_exists=True) - op.drop_table("daily_transactions_aggregates", if_exists=True) - op.drop_table("scheduled_wallet_count_metadata", if_exists=True) - op.drop_table("daily_wallet_addresses_aggregates", if_exists=True) - op.drop_table("statistics_wallet_addresses", if_exists=True) - op.drop_table("daily_tokens_aggregates", if_exists=True) - op.drop_table("daily_blocks_aggregates", if_exists=True) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table( - "daily_blocks_aggregates", - sa.Column("block_date", sa.DATE(), autoincrement=False, nullable=False), - sa.Column("cnt", sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column("avg_size", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("avg_gas_limit", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("avg_gas_used", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("total_gas_used", sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column("avg_gas_used_percentage", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("avg_txn_cnt", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("total_cnt", sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column("block_interval", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint("block_date", name="daily_blocks_aggregates_pkey"), - if_not_exists=True, - ) - op.create_table( - "daily_tokens_aggregates", - sa.Column("block_date", sa.DATE(), autoincrement=False, nullable=False), - sa.Column("erc20_active_address_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("erc20_total_transfer_cnt", sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column("erc721_active_address_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("erc721_total_transfer_cnt", sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column("erc1155_active_address_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("erc1155_total_transfer_cnt", sa.BIGINT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint("block_date", name="daily_tokens_aggregates_pkey"), - if_not_exists=True, - ) - op.create_table( - "statistics_wallet_addresses", - sa.Column("address", postgresql.BYTEA(), autoincrement=False, nullable=False), - sa.Column("txn_in_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("txn_out_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("txn_in_value", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True), - sa.Column("txn_out_value", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True), - sa.Column("internal_txn_in_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("internal_txn_out_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("internal_txn_in_value", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True), - sa.Column("internal_txn_out_value", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True), - sa.Column("erc20_transfer_in_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("erc721_transfer_in_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("erc1155_transfer_in_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("erc20_transfer_out_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("erc721_transfer_out_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("erc1155_transfer_out_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("txn_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("internal_txn_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("erc20_transfer_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("erc721_transfer_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("erc1155_transfer_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("deposit_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("withdraw_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("tag", sa.VARCHAR(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint("address", name="statistics_wallet_addresses_pkey"), - if_not_exists=True, - ) - op.create_table( - "daily_wallet_addresses_aggregates", - sa.Column("address", postgresql.BYTEA(), autoincrement=False, nullable=False), - sa.Column("block_date", sa.DATE(), autoincrement=False, nullable=False), - sa.Column("txn_in_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("txn_out_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("txn_in_value", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True), - sa.Column("txn_out_value", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True), - sa.Column("internal_txn_in_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("internal_txn_out_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("internal_txn_in_value", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True), - sa.Column("internal_txn_out_value", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True), - sa.Column("erc20_transfer_in_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("erc721_transfer_in_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("erc1155_transfer_in_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("erc20_transfer_out_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("erc721_transfer_out_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("erc1155_transfer_out_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column( - "internal_txn_cnt", - sa.INTEGER(), - sa.Computed("(internal_txn_in_cnt + internal_txn_out_cnt)", persisted=True), - autoincrement=False, - nullable=True, - ), - sa.Column( - "erc20_transfer_cnt", - sa.INTEGER(), - sa.Computed("(erc20_transfer_in_cnt + erc20_transfer_out_cnt)", persisted=True), - autoincrement=False, - nullable=True, - ), - sa.Column( - "erc721_transfer_cnt", - sa.INTEGER(), - sa.Computed("(erc721_transfer_in_cnt + erc721_transfer_out_cnt)", persisted=True), - autoincrement=False, - nullable=True, - ), - sa.Column( - "erc1155_transfer_cnt", - sa.INTEGER(), - sa.Computed("(erc1155_transfer_in_cnt + erc1155_transfer_out_cnt)", persisted=True), - autoincrement=False, - nullable=True, - ), - sa.Column("txn_self_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("txn_in_error_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("txn_out_error_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("txn_self_error_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column( - "txn_cnt", - sa.INTEGER(), - sa.Computed("((txn_in_cnt + txn_out_cnt) - txn_self_cnt)", persisted=True), - autoincrement=False, - nullable=True, - ), - sa.Column("deposit_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("withdraw_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("gas_in_used", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True), - sa.Column("l2_txn_in_fee", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True), - sa.Column("l1_txn_in_fee", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True), - sa.Column("txn_in_fee", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True), - sa.Column("gas_out_used", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True), - sa.Column("l2_txn_out_fee", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True), - sa.Column("l1_txn_out_fee", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True), - sa.Column("txn_out_fee", sa.NUMERIC(precision=78, scale=0), autoincrement=False, nullable=True), - sa.Column("contract_deployed_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("from_address_unique_interacted_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column("to_address_unique_interacted_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint("address", "block_date", name="daily_wallet_addresses_aggregates_pkey"), - if_not_exists=True, - ) - op.create_table( - "scheduled_wallet_count_metadata", - sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column("dag_id", sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column("execution_date", postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column("last_data_timestamp", postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint("id", name="scheduled_wallet_count_metadata_pkey"), - if_not_exists=True, - ) - op.create_table( - "daily_transactions_aggregates", - sa.Column("block_date", sa.DATE(), autoincrement=False, nullable=False), - sa.Column("cnt", sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column("total_cnt", sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column("txn_error_cnt", sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column("avg_transaction_fee", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("avg_gas_price", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("max_gas_price", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("min_gas_price", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("avg_receipt_l1_fee", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("max_receipt_l1_fee", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("min_receipt_l1_fee", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("avg_receipt_l1_gas_used", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("max_receipt_l1_gas_used", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("min_receipt_l1_gas_used", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("avg_receipt_l1_gas_price", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("max_receipt_l1_gas_price", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("min_receipt_l1_gas_price", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("avg_receipt_l1_fee_scalar", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("max_receipt_l1_fee_scalar", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.Column("min_receipt_l1_fee_scalar", sa.NUMERIC(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint("block_date", name="daily_transactions_aggregates_pkey"), - if_not_exists=True, - ) - op.create_table( - "daily_addresses_aggregates", - sa.Column("block_date", sa.DATE(), autoincrement=False, nullable=False), - sa.Column("active_address_cnt", sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column("receiver_address_cnt", sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column("sender_address_cnt", sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column("total_address_cnt", sa.BIGINT(), autoincrement=False, nullable=True), - sa.Column("new_address_cnt", sa.BIGINT(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint("block_date", name="daily_addresses_aggregates_pkey"), - if_not_exists=True, - ) - op.create_table( - "scheduled_token_count_metadata", - sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column("dag_id", sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column("execution_date", postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column("last_data_timestamp", postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint("id", name="scheduled_token_count_metadata_pkey"), - if_not_exists=True, - ) - op.create_table( - "daily_contract_interacted_aggregates", - sa.Column("block_date", sa.DATE(), autoincrement=False, nullable=False), - sa.Column("from_address", postgresql.BYTEA(), autoincrement=False, nullable=False), - sa.Column("to_address", postgresql.BYTEA(), autoincrement=False, nullable=False), - sa.Column("contract_interacted_cnt", sa.INTEGER(), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint( - "block_date", "from_address", "to_address", name="daily_contract_interacted_aggregates_pkey" - ), - if_not_exists=True, - ) - op.drop_table("scheduled_metadata", if_exists=True) - op.drop_table("coin_prices", if_exists=True) - op.drop_table("af_stats_na_daily_transactions", if_exists=True) - op.drop_table("af_stats_na_daily_tokens", if_exists=True) - op.drop_table("af_stats_na_daily_bridge_transactions", if_exists=True) - op.drop_table("af_stats_na_daily_blocks", if_exists=True) - op.drop_table("af_stats_na_daily_addresses", if_exists=True) - op.drop_table("af_index_token_address_stats", if_exists=True) - op.drop_table("af_index_token_address_daily_stats", if_exists=True) - op.drop_table("af_index_stats", if_exists=True) - op.drop_table("af_index_na_scheduled_metadata", if_exists=True) - op.drop_table("af_index_daily_stats", if_exists=True) - op.drop_index( - "feature_erc1155_token_holding_token_wallet_block_desc_index", - table_name="af_erc1155_token_holdings_hist", - if_exists=True, - ) - op.drop_index( - "feature_erc1155_token_holding_token_block_desc_index", - table_name="af_erc1155_token_holdings_hist", - if_exists=True, - ) - op.drop_table("af_erc1155_token_holdings_hist", if_exists=True) - op.drop_index( - "af_erc1155_token_holdings_current_wallet_block_desc_index", - table_name="af_erc1155_token_holdings_current", - if_exists=True, - ) - op.drop_index( - "af_erc1155_token_holdings_current_token_block_desc_index", - table_name="af_erc1155_token_holdings_current", - if_exists=True, - ) - op.drop_table("af_erc1155_token_holdings_current", if_exists=True) - op.drop_index( - "address_internal_transactions_address_nt_t_idx", table_name="address_internal_transactions", if_exists=True - ) - op.drop_table("address_internal_transactions", if_exists=True) - op.drop_index( - "address_contract_operations_address_block_tn_t_idx", table_name="address_contract_operations", if_exists=True - ) - op.drop_table("address_contract_operations", if_exists=True) - # ### end Alembic commands ### diff --git a/hemera/migrations/versions/20241121_add_failure_records_table.py b/hemera/migrations/versions/20241121_add_failure_records_table.py deleted file mode 100644 index c8f24c39a..000000000 --- a/hemera/migrations/versions/20241121_add_failure_records_table.py +++ /dev/null @@ -1,41 +0,0 @@ -"""add_failure_records_table -Revision ID: f846e3abeb18 -Revises: 3bd2e3099bae -Create Date: 2024-11-21 21:37:25.662986 -""" - -from typing import Sequence, Union - -import sqlalchemy as sa -from alembic import op -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision: str = "f846e3abeb18" -down_revision: Union[str, None] = "3bd2e3099bae" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table( - "failure_records", - sa.Column("record_id", sa.BIGINT(), autoincrement=True, nullable=False), - sa.Column("mission_sign", sa.VARCHAR(), nullable=True), - sa.Column("output_types", sa.VARCHAR(), nullable=True), - sa.Column("start_block_number", sa.BIGINT(), nullable=True), - sa.Column("end_block_number", sa.BIGINT(), nullable=True), - sa.Column("exception_stage", sa.VARCHAR(), nullable=True), - sa.Column("exception", postgresql.JSON(astext_type=sa.Text()), nullable=True), - sa.Column("crash_time", postgresql.TIMESTAMP(), nullable=True), - sa.PrimaryKeyConstraint("record_id"), - if_not_exists=True, - ) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table("failure_records", if_exists=True) - # ### end Alembic commands ### diff --git a/hemera/migrations/versions/20241128_update_table_for_0.6.0.py b/hemera/migrations/versions/20241128_update_table_for_0.6.0.py deleted file mode 100644 index defe78283..000000000 --- a/hemera/migrations/versions/20241128_update_table_for_0.6.0.py +++ /dev/null @@ -1,59 +0,0 @@ -"""add_failure_records_table -Revision ID: f846e3abeb18 -Revises: f846e3abeb18 -Create Date: 2024-11-28 18:37:25.662986 -""" - -from typing import Sequence, Union - -import sqlalchemy as sa -from alembic import op -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision: str = "3c7ea7b95dc5" -down_revision: Union[str, None] = "f846e3abeb18" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_constraint("logs_pkey", "logs", type_="primary") - op.create_primary_key( - "logs_pkey", - "logs", - ["transaction_hash", "block_hash", "log_index"], - ) - - op.drop_constraint( - "af_holding_balance_uniswap_v3_period_pkey", "af_holding_balance_uniswap_v3_period", type_="primary" - ) - op.alter_column("af_holding_balance_uniswap_v3_period", "position_token_address", new_column_name="pool_address") - op.create_primary_key( - "af_holding_balance_uniswap_v3_period_pkey", - "af_holding_balance_uniswap_v3_period", - ["period_date", "protocol_id", "pool_address", "token_id"], - ) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_constraint( - "af_holding_balance_uniswap_v3_period_pkey", "af_holding_balance_uniswap_v3_period", type_="primary" - ) - op.alter_column("af_holding_balance_uniswap_v3_period", "pool_address", new_column_name="position_token_address") - op.create_primary_key( - "af_holding_balance_uniswap_v3_period_pkey", - "af_holding_balance_uniswap_v3_period", - ["period_date", "protocol_id", "position_token_address", "token_id"], - ) - - op.drop_constraint("logs_pkey", "logs", type_="primary") - op.create_primary_key( - "logs_pkey", - "logs", - ["log_index", "transaction_hash"], - ) - # ### end Alembic commands ### diff --git a/hemera/resource/hemera.ini.example b/hemera/resource/hemera.ini.example deleted file mode 100644 index 4c392c84a..000000000 --- a/hemera/resource/hemera.ini.example +++ /dev/null @@ -1,42 +0,0 @@ -[alembic] -script_location = migrations -prepend_sys_path = . -version_path_separator = os # Use os.pathsep. Default configuration used for new projects. -sqlalchemy.url = postgresql+psycopg2://postgres:admin@localhost:5432/bt1 - -[post_write_hooks] - -[loggers] -keys = root,sqlalchemy,alembic - -[handlers] -keys = console - -[formatters] -keys = generic - -[logger_root] -level = WARN -handlers = console -qualname = - -[logger_sqlalchemy] -level = WARN -handlers = -qualname = sqlalchemy.engine - -[logger_alembic] -level = INFO -handlers = -qualname = alembic - -[handler_console] -class = StreamHandler -args = (sys.stderr,) -level = NOTSET -formatter = generic - -[formatter_generic] -format = %(levelname)-5.5s [%(name)s] %(message)s -datefmt = %H:%M:%S - diff --git a/hemera_udf/aci_features/common_utils.py b/hemera_udf/aci_features/common_utils.py index 782756e53..4d39a8676 100644 --- a/hemera_udf/aci_features/common_utils.py +++ b/hemera_udf/aci_features/common_utils.py @@ -1,7 +1,7 @@ import logging from typing import cast -from web3.types import ABIFunction +from eth_typing import ABIFunction from hemera.common.utils.abi_code_utils import encode_data from hemera.indexer.utils.abi import function_abi_to_4byte_selector_str diff --git a/hemera_udf/address_index/jobs/address_index_job.py b/hemera_udf/address_index/jobs/address_index_job.py index 5b2b45cb8..e13e7a3ae 100644 --- a/hemera_udf/address_index/jobs/address_index_job.py +++ b/hemera_udf/address_index/jobs/address_index_job.py @@ -3,6 +3,7 @@ from itertools import groupby from typing import List, Union +from hemera.common.enumeration.txn_type import InternalTransactionType from hemera.common.utils.web3_utils import ZERO_ADDRESS from hemera.indexer.domains.contract_internal_transaction import ContractInternalTransaction from hemera.indexer.domains.token_id_infos import UpdateERC721TokenIdDetail @@ -10,8 +11,8 @@ from hemera.indexer.domains.transaction import Transaction from hemera.indexer.executors.batch_work_executor import BatchWorkExecutor from hemera.indexer.jobs.base_job import ExtensionJob +from hemera.indexer.jobs.export_nft_infos_job import generate_token_id_info from hemera.indexer.jobs.export_token_balances_job import extract_token_parameters -from hemera.indexer.jobs.export_token_id_infos_job import generate_token_id_info from hemera.indexer.utils.collection_utils import distinct_collections_by_group from hemera.indexer.utils.token_fetcher import TokenFetcher from hemera_udf.address_index.domains import * @@ -19,42 +20,6 @@ logger = logging.getLogger(__name__) -class InternalTransactionType(Enum): - SELF_CALL = 0 - SENDER = 1 - RECEIVER = 2 - - -class AddressTransactionType(Enum): - SELF_CALL = 0 - - SENDER = 1 - RECEIVER = 2 - - CREATOR = 3 - BEEN_CREATED = 4 - - -class AddressTokenTransferType(Enum): - SELF_CALL = 0 - - SENDER = 1 - RECEIVER = 2 - - DEPOSITOR = 3 - WITHDRAWER = 4 - - -class AddressNftTransferType(Enum): - SELF_CALL = 0 - - SENDER = 1 - RECEIVER = 2 - - BURNER = 3 - MINTER = 4 - - def create_address_internal_transaction( internal_transaction: ContractInternalTransaction, address: str, diff --git a/hemera_udf/address_index/models/address_contract_operation.py b/hemera_udf/address_index/models/address_contract_operation.py deleted file mode 100644 index 904acae06..000000000 --- a/hemera_udf/address_index/models/address_contract_operation.py +++ /dev/null @@ -1,50 +0,0 @@ -from sqlalchemy import Column, Index, desc, func -from sqlalchemy.dialects.postgresql import BYTEA, INTEGER, NUMERIC, TEXT, TIMESTAMP - -from hemera.common.models import HemeraModel, general_converter -from hemera_udf.address_index.domains import AddressContractOperation - - -class AddressContractOperations(HemeraModel): - __tablename__ = "address_contract_operations" - - address = Column(BYTEA, primary_key=True) - trace_from_address = Column(BYTEA) - contract_address = Column(BYTEA) - trace_id = Column(TEXT, primary_key=True) - block_number = Column(INTEGER, primary_key=True) - transaction_index = Column(INTEGER, primary_key=True) - transaction_hash = Column(BYTEA) - block_timestamp = Column(TIMESTAMP, primary_key=True) - block_hash = Column(BYTEA) - error = Column(TEXT) - status = Column(INTEGER) - creation_code = Column(BYTEA) - deployed_code = Column(BYTEA) - gas = Column(NUMERIC(100)) - gas_used = Column(NUMERIC(100)) - trace_type = Column(TEXT) - call_type = Column(TEXT) - transaction_receipt_status = Column(INTEGER) - create_time = Column(TIMESTAMP, server_default=func.now()) - update_time = Column(TIMESTAMP, server_default=func.now()) - - @staticmethod - def model_domain_mapping(): - return [ - { - "domain": AddressContractOperation, - "conflict_do_update": True, - "update_strategy": None, - "converter": general_converter, - } - ] - - -Index( - "address_contract_operations_address_block_tn_t_idx", - AddressContractOperations.address, - desc(AddressContractOperations.block_timestamp), - desc(AddressContractOperations.block_number), - desc(AddressContractOperations.transaction_index), -) diff --git a/hemera_udf/address_index/models/address_index_daily_stats.py b/hemera_udf/address_index/models/address_index_daily_stats.py deleted file mode 100644 index daf8ad64c..000000000 --- a/hemera_udf/address_index/models/address_index_daily_stats.py +++ /dev/null @@ -1,65 +0,0 @@ -from sqlalchemy import Column -from sqlalchemy.dialects.postgresql import BIGINT, BYTEA, DATE, INTEGER, NUMERIC - -from hemera.common.models import HemeraModel - - -class AddressIndexDailyStats(HemeraModel): - __tablename__ = "af_index_daily_stats" - - address = Column(BYTEA, primary_key=True) - block_date = Column(DATE, primary_key=True) - - transaction_in_count = Column(INTEGER) - transaction_out_count = Column(INTEGER) - transaction_self_count = Column(INTEGER) - - transaction_in_value = Column(NUMERIC) - transaction_out_value = Column(NUMERIC) - transaction_self_value = Column(NUMERIC) - - transaction_in_fee = Column(NUMERIC) - transaction_out_fee = Column(NUMERIC) - transaction_self_fee = Column(NUMERIC) - - internal_transaction_in_count = Column(INTEGER) - internal_transaction_out_count = Column(INTEGER) - internal_transaction_self_count = Column(INTEGER) - - internal_transaction_in_value = Column(NUMERIC) - internal_transaction_out_value = Column(NUMERIC) - internal_transaction_self_value = Column(NUMERIC) - - erc20_transfer_in_count = Column(INTEGER) - erc20_transfer_out_count = Column(INTEGER) - erc20_transfer_self_count = Column(INTEGER) - - nft_transfer_in_count = Column(INTEGER) - nft_transfer_out_count = Column(INTEGER) - nft_transfer_self_count = Column(INTEGER) - - nft_721_transfer_in_count = Column(INTEGER) - nft_721_transfer_out_count = Column(INTEGER) - nft_721_transfer_self_count = Column(INTEGER) - - nft_1155_transfer_in_count = Column(INTEGER) - nft_1155_transfer_out_count = Column(INTEGER) - nft_1155_transfer_self_count = Column(INTEGER) - - contract_creation_count = Column(INTEGER) - contract_destruction_count = Column(INTEGER) - contract_operation_count = Column(INTEGER) - - transaction_count = Column( - INTEGER, - ) - internal_transaction_count = Column( - INTEGER, - ) - erc20_transfer_count = Column( - INTEGER, - ) - - nft_transfer_count = Column(INTEGER) - nft_721_transfer_count = Column(INTEGER) - nft_1155_transfer_count = Column(INTEGER) diff --git a/hemera_udf/address_index/models/address_index_stats.py b/hemera_udf/address_index/models/address_index_stats.py deleted file mode 100644 index 2ff1e44af..000000000 --- a/hemera_udf/address_index/models/address_index_stats.py +++ /dev/null @@ -1,66 +0,0 @@ -from sqlalchemy import Column -from sqlalchemy.dialects.postgresql import BYTEA, INTEGER, NUMERIC, VARCHAR - -from hemera.common.models import HemeraModel - - -class AddressIndexStats(HemeraModel): - __tablename__ = "af_index_stats" - - address = Column(BYTEA, primary_key=True) - - transaction_in_count = Column(INTEGER) - transaction_out_count = Column(INTEGER) - transaction_self_count = Column(INTEGER) - - transaction_in_value = Column(NUMERIC) - transaction_out_value = Column(NUMERIC) - transaction_self_value = Column(NUMERIC) - - transaction_in_fee = Column(NUMERIC) - transaction_out_fee = Column(NUMERIC) - transaction_self_fee = Column(NUMERIC) - - internal_transaction_in_count = Column(INTEGER) - internal_transaction_out_count = Column(INTEGER) - internal_transaction_self_count = Column(INTEGER) - - internal_transaction_in_value = Column(NUMERIC) - internal_transaction_out_value = Column(NUMERIC) - internal_transaction_self_value = Column(NUMERIC) - - erc20_transfer_in_count = Column(INTEGER) - erc20_transfer_out_count = Column(INTEGER) - erc20_transfer_self_count = Column(INTEGER) - - nft_transfer_in_count = Column(INTEGER) - nft_transfer_out_count = Column(INTEGER) - nft_transfer_self_count = Column(INTEGER) - - nft_721_transfer_in_count = Column(INTEGER) - nft_721_transfer_out_count = Column(INTEGER) - nft_721_transfer_self_count = Column(INTEGER) - - nft_1155_transfer_in_count = Column(INTEGER) - nft_1155_transfer_out_count = Column(INTEGER) - nft_1155_transfer_self_count = Column(INTEGER) - - contract_creation_count = Column(INTEGER) - contract_destruction_count = Column(INTEGER) - contract_operation_count = Column(INTEGER) - - transaction_count = Column( - INTEGER, - ) - internal_transaction_count = Column( - INTEGER, - ) - erc20_transfer_count = Column( - INTEGER, - ) - - nft_transfer_count = Column(INTEGER) - nft_721_transfer_count = Column(INTEGER) - nft_1155_transfer_count = Column(INTEGER) - - tag = Column(VARCHAR) diff --git a/hemera_udf/address_index/models/address_internal_transaciton.py b/hemera_udf/address_index/models/address_internal_transaciton.py deleted file mode 100644 index a2c4c8ec5..000000000 --- a/hemera_udf/address_index/models/address_internal_transaciton.py +++ /dev/null @@ -1,50 +0,0 @@ -from sqlalchemy import Column, Index, desc, func -from sqlalchemy.dialects.postgresql import BYTEA, INTEGER, NUMERIC, SMALLINT, TEXT, TIMESTAMP - -from hemera.common.models import HemeraModel, general_converter -from hemera_udf.address_index.domains import AddressInternalTransaction - - -class AddressInternalTransactions(HemeraModel): - __tablename__ = "address_internal_transactions" - - address = Column(BYTEA, primary_key=True) - trace_id = Column(TEXT, primary_key=True) - block_number = Column(INTEGER, primary_key=True) - transaction_index = Column(INTEGER, primary_key=True) - transaction_hash = Column(BYTEA) - block_timestamp = Column(TIMESTAMP, primary_key=True) - block_hash = Column(BYTEA) - error = Column(TEXT) - status = Column(INTEGER) - input_method = Column(TEXT) - value = Column(NUMERIC(100)) - gas = Column(NUMERIC(100)) - gas_used = Column(NUMERIC(100)) - trace_type = Column(TEXT) - call_type = Column(TEXT) - txn_type = Column(SMALLINT) - related_address = Column(BYTEA) - transaction_receipt_status = Column(INTEGER) - create_time = Column(TIMESTAMP, server_default=func.now()) - update_time = Column(TIMESTAMP, server_default=func.now()) - - @staticmethod - def model_domain_mapping(): - return [ - { - "domain": AddressInternalTransaction, - "conflict_do_update": True, - "update_strategy": None, - "converter": general_converter, - } - ] - - -Index( - "address_internal_transactions_address_nt_t_idx", - AddressInternalTransactions.address, - desc(AddressInternalTransactions.block_timestamp), - desc(AddressInternalTransactions.block_number), - desc(AddressInternalTransactions.transaction_index), -) diff --git a/hemera_udf/address_index/models/address_nft_1155_holders.py b/hemera_udf/address_index/models/address_nft_1155_holders.py deleted file mode 100644 index 58cc926bc..000000000 --- a/hemera_udf/address_index/models/address_nft_1155_holders.py +++ /dev/null @@ -1,35 +0,0 @@ -from sqlalchemy import Column, Index, desc, func -from sqlalchemy.dialects.postgresql import BYTEA, NUMERIC, TIMESTAMP - -from hemera.common.models import HemeraModel, general_converter -from hemera_udf.address_index.domains import AddressNft1155Holder - - -class AddressNftTokenHolders(HemeraModel): - __tablename__ = "address_nft_1155_holders" - - address = Column(BYTEA, primary_key=True) - token_address = Column(BYTEA, primary_key=True) - token_id = Column(NUMERIC(100), primary_key=True) - balance_of = Column(NUMERIC(100)) - create_time = Column(TIMESTAMP, server_default=func.now()) - update_time = Column(TIMESTAMP, server_default=func.now()) - - @staticmethod - def model_domain_mapping(): - return [ - { - "domain": AddressNft1155Holder, - "conflict_do_update": True, - "update_strategy": None, - "converter": general_converter, - } - ] - - -Index( - "address_nft_1155_holders_token_address_balance_of_idx", - AddressNftTokenHolders.token_address, - AddressNftTokenHolders.token_id, - desc(AddressNftTokenHolders.balance_of), -) diff --git a/hemera_udf/address_index/models/address_nft_transfers.py b/hemera_udf/address_index/models/address_nft_transfers.py deleted file mode 100644 index a65dbf25f..000000000 --- a/hemera_udf/address_index/models/address_nft_transfers.py +++ /dev/null @@ -1,34 +0,0 @@ -from sqlalchemy import Column, func -from sqlalchemy.dialects.postgresql import BYTEA, INTEGER, NUMERIC, SMALLINT, TIMESTAMP - -from hemera.common.models import HemeraModel, general_converter -from hemera_udf.address_index.domains import AddressNftTransfer - - -class AddressNftTransfers(HemeraModel): - __tablename__ = "address_nft_transfers" - - address = Column(BYTEA, primary_key=True) - block_number = Column(INTEGER, primary_key=True) - log_index = Column(INTEGER, primary_key=True) - transaction_hash = Column(BYTEA, primary_key=True) - block_timestamp = Column(TIMESTAMP, primary_key=True) - block_hash = Column(BYTEA, primary_key=True) - token_address = Column(BYTEA) - related_address = Column(BYTEA) - transfer_type = Column(SMALLINT) - token_id = Column(NUMERIC(100), primary_key=True) - value = Column(NUMERIC(100)) - create_time = Column(TIMESTAMP, server_default=func.now()) - update_time = Column(TIMESTAMP, server_default=func.now()) - - @staticmethod - def model_domain_mapping(): - return [ - { - "domain": AddressNftTransfer, - "conflict_do_update": True, - "update_strategy": None, - "converter": general_converter, - } - ] diff --git a/hemera_udf/address_index/models/address_token_holders.py b/hemera_udf/address_index/models/address_token_holders.py deleted file mode 100644 index ac0ccd212..000000000 --- a/hemera_udf/address_index/models/address_token_holders.py +++ /dev/null @@ -1,33 +0,0 @@ -from sqlalchemy import Column, Index, desc, func -from sqlalchemy.dialects.postgresql import BYTEA, NUMERIC, TIMESTAMP - -from hemera.common.models import HemeraModel, general_converter -from hemera_udf.address_index.domains import AddressTokenHolder - - -class AddressTokenHolders(HemeraModel): - __tablename__ = "address_token_holders" - - address = Column(BYTEA, primary_key=True) - token_address = Column(BYTEA, primary_key=True) - balance_of = Column(NUMERIC(100)) - create_time = Column(TIMESTAMP, server_default=func.now()) - update_time = Column(TIMESTAMP, server_default=func.now()) - - @staticmethod - def model_domain_mapping(): - return [ - { - "domain": AddressTokenHolder, - "conflict_do_update": True, - "update_strategy": None, - "converter": general_converter, - } - ] - - -Index( - "address_token_holders_token_address_balance_of_idx", - AddressTokenHolders.token_address, - desc(AddressTokenHolders.balance_of), -) diff --git a/hemera_udf/address_index/models/address_token_transfers.py b/hemera_udf/address_index/models/address_token_transfers.py deleted file mode 100644 index 369230b0b..000000000 --- a/hemera_udf/address_index/models/address_token_transfers.py +++ /dev/null @@ -1,33 +0,0 @@ -from sqlalchemy import Column, func -from sqlalchemy.dialects.postgresql import BYTEA, INTEGER, NUMERIC, SMALLINT, TIMESTAMP - -from hemera.common.models import HemeraModel, general_converter -from hemera_udf.address_index.domains import AddressTokenTransfer - - -class AddressTokenTransfers(HemeraModel): - __tablename__ = "address_token_transfers" - - address = Column(BYTEA, primary_key=True) - block_number = Column(INTEGER, primary_key=True) - log_index = Column(INTEGER, primary_key=True) - transaction_hash = Column(BYTEA, primary_key=True) - block_timestamp = Column(TIMESTAMP, primary_key=True) - block_hash = Column(BYTEA, primary_key=True) - token_address = Column(BYTEA) - related_address = Column(BYTEA) - transfer_type = Column(SMALLINT) - value = Column(NUMERIC(100)) - create_time = Column(TIMESTAMP, server_default=func.now()) - update_time = Column(TIMESTAMP, server_default=func.now()) - - @staticmethod - def model_domain_mapping(): - return [ - { - "domain": AddressTokenTransfer, - "conflict_do_update": True, - "update_strategy": None, - "converter": general_converter, - } - ] diff --git a/hemera_udf/address_index/models/address_transactions.py b/hemera_udf/address_index/models/address_transactions.py deleted file mode 100644 index cf552ec96..000000000 --- a/hemera_udf/address_index/models/address_transactions.py +++ /dev/null @@ -1,53 +0,0 @@ -from sqlalchemy import Column, Index, desc, func -from sqlalchemy.dialects.postgresql import BYTEA, INTEGER, NUMERIC, SMALLINT, TEXT, TIMESTAMP - -from hemera.common.models import HemeraModel, general_converter -from hemera_udf.address_index.domains import AddressTransaction - - -class AddressTransactions(HemeraModel): - __tablename__ = "address_transactions" - - address = Column(BYTEA, primary_key=True) - block_number = Column(INTEGER, primary_key=True) - transaction_index = Column(INTEGER, primary_key=True) - transaction_hash = Column(BYTEA) - block_timestamp = Column(TIMESTAMP, primary_key=True) - block_hash = Column(BYTEA) - txn_type = Column(SMALLINT) - related_address = Column(BYTEA) - value = Column(NUMERIC(100)) - transaction_fee = Column(NUMERIC(100)) - receipt_status = Column(INTEGER) - method = Column(TEXT) - create_time = Column(TIMESTAMP, server_default=func.now()) - update_time = Column(TIMESTAMP, server_default=func.now()) - - @staticmethod - def model_domain_mapping(): - return [ - { - "domain": AddressTransaction, - "conflict_do_update": True, - "update_strategy": None, - "converter": general_converter, - } - ] - - -Index( - "address_transactions_address_block_timestamp_block_number_t_idx", - AddressTransactions.address, - desc(AddressTransactions.block_timestamp), - desc(AddressTransactions.block_number), - desc(AddressTransactions.transaction_index), -) - -Index( - "address_transactions_address_txn_type_block_timestamp_block_idx", - AddressTransactions.address, - AddressTransactions.txn_type, - desc(AddressTransactions.block_timestamp), - desc(AddressTransactions.block_number), - desc(AddressTransactions.transaction_index), -) diff --git a/hemera_udf/address_index/models/token_address_index.py b/hemera_udf/address_index/models/token_address_index.py deleted file mode 100644 index dced1d7b9..000000000 --- a/hemera_udf/address_index/models/token_address_index.py +++ /dev/null @@ -1,15 +0,0 @@ -from sqlalchemy import DATE, Column, func -from sqlalchemy.dialects.postgresql import BIGINT, BYTEA, INTEGER, NUMERIC, TIMESTAMP - -from hemera.common.models import HemeraModel - - -class TokenAddressIndexStats(HemeraModel): - __tablename__ = "af_index_token_address_stats" - - address = Column(BYTEA, primary_key=True) - - token_holder_count = Column(INTEGER) - token_transfer_count = Column(INTEGER) - - update_time = Column(TIMESTAMP, server_onupdate=func.now()) diff --git a/hemera_udf/address_index/models/token_address_index_daily_stats.py b/hemera_udf/address_index/models/token_address_index_daily_stats.py deleted file mode 100644 index 0ae74f2d2..000000000 --- a/hemera_udf/address_index/models/token_address_index_daily_stats.py +++ /dev/null @@ -1,15 +0,0 @@ -from sqlalchemy import Column, func -from sqlalchemy.dialects.postgresql import BYTEA, INTEGER, TIMESTAMP - -from hemera.common.models import HemeraModel - - -class TokenAddressIndexStats(HemeraModel): - __tablename__ = "af_index_token_address_daily_stats" - - address = Column(BYTEA, primary_key=True) - - token_holder_count = Column(INTEGER) - token_transfer_count = Column(INTEGER) - - update_time = Column(TIMESTAMP, server_onupdate=func.now()) diff --git a/hemera_udf/address_index/models/token_address_nft_inventories.py b/hemera_udf/address_index/models/token_address_nft_inventories.py deleted file mode 100644 index e0df281df..000000000 --- a/hemera_udf/address_index/models/token_address_nft_inventories.py +++ /dev/null @@ -1,36 +0,0 @@ -from sqlalchemy import Column, Index, PrimaryKeyConstraint, func -from sqlalchemy.dialects.postgresql import BYTEA, NUMERIC, TIMESTAMP - -from hemera.common.models import HemeraModel, general_converter -from hemera_udf.address_index.domains import TokenAddressNftInventory - - -class TokenAddressNftInventories(HemeraModel): - __tablename__ = "token_address_nft_inventories" - - token_address = Column(BYTEA, primary_key=True) - token_id = Column(NUMERIC(100), primary_key=True) - wallet_address = Column(BYTEA) - create_time = Column(TIMESTAMP, server_default=func.now()) - update_time = Column(TIMESTAMP, server_default=func.now()) - - __table_args__ = (PrimaryKeyConstraint("token_address", "token_id"),) - - @staticmethod - def model_domain_mapping(): - return [ - { - "domain": TokenAddressNftInventory, - "conflict_do_update": True, - "update_strategy": None, - "converter": general_converter, - } - ] - - -Index( - "token_address_nft_inventories_wallet_address_token_address__idx", - TokenAddressNftInventories.wallet_address, - TokenAddressNftInventories.token_address, - TokenAddressNftInventories.token_id, -) diff --git a/hemera_udf/address_index/utils/helpers.py b/hemera_udf/address_index/utils/helpers.py index 4b8f3f65c..0fda1a5d7 100644 --- a/hemera_udf/address_index/utils/helpers.py +++ b/hemera_udf/address_index/utils/helpers.py @@ -16,10 +16,11 @@ from hemera.api.app.utils.token_utils import get_coin_prices, get_latest_coin_prices, get_token_price from hemera.api.app.utils.web3_utils import get_balance from hemera.common.enumeration.token_type import TokenType +from hemera.common.enumeration.txn_type import AddressTokenTransferType, AddressTransactionType from hemera.common.models import db -from hemera.common.models.contracts import Contracts -from hemera.common.models.scheduled_metadata import ScheduledMetadata -from hemera.common.models.tokens import Tokens +from hemera.common.models.token import Tokens +from hemera.common.models.trace.contracts import Contracts +from hemera.common.models.utils.scheduled_metadata import ScheduledMetadata from hemera.common.utils.db_utils import app_config, build_entities from hemera.common.utils.exception_control import APIError from hemera.common.utils.format_utils import ( @@ -30,14 +31,8 @@ hex_str_to_bytes, ) from hemera.common.utils.web3_utils import ZERO_ADDRESS -from hemera_udf.address_index.jobs.address_index_job import ( - AddressTokenTransferType, - AddressTransactionType, - InternalTransactionType, -) from hemera_udf.address_index.models.address_contract_operation import AddressContractOperations from hemera_udf.address_index.models.address_index_daily_stats import AddressIndexDailyStats -from hemera_udf.address_index.models.address_internal_transaciton import AddressInternalTransactions from hemera_udf.address_index.models.address_nft_1155_holders import AddressNftTokenHolders from hemera_udf.address_index.models.address_token_holders import AddressTokenHolders from hemera_udf.address_index.models.address_token_transfers import AddressTokenTransfers diff --git a/hemera_udf/bridge/arbitrum/arb_parser.py b/hemera_udf/bridge/arbitrum/arb_parser.py index 11ddf6191..78592280b 100644 --- a/hemera_udf/bridge/arbitrum/arb_parser.py +++ b/hemera_udf/bridge/arbitrum/arb_parser.py @@ -9,9 +9,9 @@ from enum import Enum from typing import Any, Dict, Optional, cast +from eth_typing import ABIEvent, ABIFunction from web3 import Web3 from web3._utils.contracts import decode_transaction_data -from web3.types import ABIEvent, ABIFunction from hemera.common.utils.abi_code_utils import decode_log from hemera.indexer.utils.abi import event_log_abi_to_topic, function_abi_to_4byte_selector_str diff --git a/hemera_udf/bridge/bedrock/parser/bedrock_bridge_parser.py b/hemera_udf/bridge/bedrock/parser/bedrock_bridge_parser.py index 3b980aa9c..46ddca386 100644 --- a/hemera_udf/bridge/bedrock/parser/bedrock_bridge_parser.py +++ b/hemera_udf/bridge/bedrock/parser/bedrock_bridge_parser.py @@ -2,9 +2,9 @@ from dataclasses import dataclass, field from typing import Any, Dict, List, Optional, cast +from eth_typing import ABIEvent, ABIFunction from web3._utils.contracts import decode_transaction_data from web3.auto import w3 -from web3.types import ABIEvent, ABIFunction from hemera.common.utils.abi_code_utils import decode_log from hemera.common.utils.format_utils import bytes_to_hex_str diff --git a/hemera_udf/bridge/bedrock/parser/function_parser/__init__.py b/hemera_udf/bridge/bedrock/parser/function_parser/__init__.py index c6b1a70f4..29751421c 100644 --- a/hemera_udf/bridge/bedrock/parser/function_parser/__init__.py +++ b/hemera_udf/bridge/bedrock/parser/function_parser/__init__.py @@ -2,7 +2,7 @@ from enum import Enum from typing import Callable, List, Optional -from web3.types import ABIFunction +from eth_typing import ABIFunction from hemera.common.utils.format_utils import bytes_to_hex_str from hemera.indexer.utils.abi import function_abi_to_4byte_selector_str diff --git a/hemera_udf/bridge/bedrock/parser/function_parser/finalize_bridge_erc20.py b/hemera_udf/bridge/bedrock/parser/function_parser/finalize_bridge_erc20.py index 799d9559f..a89bc8cbd 100644 --- a/hemera_udf/bridge/bedrock/parser/function_parser/finalize_bridge_erc20.py +++ b/hemera_udf/bridge/bedrock/parser/function_parser/finalize_bridge_erc20.py @@ -1,8 +1,8 @@ import json from typing import cast +from eth_typing import ABIFunction from web3._utils.contracts import decode_transaction_data -from web3.types import ABIFunction from hemera.common.utils.format_utils import bytes_to_hex_str from hemera.indexer.utils.abi import function_abi_to_4byte_selector_str diff --git a/hemera_udf/bridge/bedrock/parser/function_parser/finalize_bridge_erc721.py b/hemera_udf/bridge/bedrock/parser/function_parser/finalize_bridge_erc721.py index afa2c8417..f98e4f1d4 100644 --- a/hemera_udf/bridge/bedrock/parser/function_parser/finalize_bridge_erc721.py +++ b/hemera_udf/bridge/bedrock/parser/function_parser/finalize_bridge_erc721.py @@ -1,8 +1,8 @@ import json from typing import cast +from eth_typing import ABIFunction from web3._utils.contracts import decode_transaction_data -from web3.types import ABIFunction from hemera.common.utils.format_utils import bytes_to_hex_str from hemera.indexer.utils.abi import function_abi_to_4byte_selector_str diff --git a/hemera_udf/bridge/bedrock/parser/function_parser/finalize_bridge_eth.py b/hemera_udf/bridge/bedrock/parser/function_parser/finalize_bridge_eth.py index c340831d6..d2c23f6cd 100644 --- a/hemera_udf/bridge/bedrock/parser/function_parser/finalize_bridge_eth.py +++ b/hemera_udf/bridge/bedrock/parser/function_parser/finalize_bridge_eth.py @@ -1,8 +1,8 @@ import json from typing import cast +from eth_typing import ABIFunction from web3._utils.contracts import decode_transaction_data -from web3.types import ABIFunction from hemera.common.utils.exception_control import FastShutdownError from hemera.common.utils.format_utils import bytes_to_hex_str diff --git a/hemera_udf/bridge/morphl2/abi/function.py b/hemera_udf/bridge/morphl2/abi/function.py index 3783e8f8f..7eebafac2 100644 --- a/hemera_udf/bridge/morphl2/abi/function.py +++ b/hemera_udf/bridge/morphl2/abi/function.py @@ -1,7 +1,7 @@ import logging from typing import Any, Callable, Dict, Optional -from web3.types import ABIFunction +from eth_typing import ABIFunction from hemera.common.utils.abi_code_utils import Function, FunctionCollection from hemera.common.utils.format_utils import bytes_to_hex_str diff --git a/hemera_udf/cyber_id/abi/function.py b/hemera_udf/cyber_id/abi/function.py index 88304467f..ad833c88c 100644 --- a/hemera_udf/cyber_id/abi/function.py +++ b/hemera_udf/cyber_id/abi/function.py @@ -1,4 +1,4 @@ -from web3.types import ABIFunction +from eth_typing import ABIFunction from hemera.common.utils.abi_code_utils import Function from hemera.indexer.domains.transaction import Transaction diff --git a/hemera_udf/day_mining/job/export_activeness.py b/hemera_udf/day_mining/job/export_activeness.py index 26d51ad41..cb55a7e3b 100644 --- a/hemera_udf/day_mining/job/export_activeness.py +++ b/hemera_udf/day_mining/job/export_activeness.py @@ -1,6 +1,6 @@ from collections import defaultdict -from hemera.common.models.contracts import Contracts +from hemera.common.models.trace.contracts import Contracts from hemera.common.utils.format_utils import bytes_to_hex_str from hemera.indexer.domains.contract import Contract from hemera.indexer.domains.transaction import Transaction diff --git a/hemera_udf/deposit_to_l2/deposit_parser.py b/hemera_udf/deposit_to_l2/deposit_parser.py index 6351c5500..8dda0b778 100644 --- a/hemera_udf/deposit_to_l2/deposit_parser.py +++ b/hemera_udf/deposit_to_l2/deposit_parser.py @@ -1,9 +1,8 @@ import json from typing import List, cast -from eth_typing import HexStr +from eth_typing import ABIEvent, ABIFunction, HexStr from web3._utils.contracts import decode_transaction_data -from web3.types import ABIEvent, ABIFunction from hemera.indexer.domains.transaction import Transaction from hemera.indexer.utils.abi import event_log_abi_to_topic, function_abi_to_4byte_selector_str diff --git a/hemera_udf/deposit_to_l2/deposit_to_l2_job.py b/hemera_udf/deposit_to_l2/deposit_to_l2_job.py index cfc458865..7bea2684a 100644 --- a/hemera_udf/deposit_to_l2/deposit_to_l2_job.py +++ b/hemera_udf/deposit_to_l2/deposit_to_l2_job.py @@ -3,8 +3,8 @@ import os from typing import List, cast +from eth_typing import ABIFunction from eth_utils import to_normalized_address -from web3.types import ABIFunction from hemera.common.utils.cache_utils import BlockToLiveDict, TimeToLiveDict from hemera.common.utils.db_utils import build_domains_by_sql diff --git a/hemera_udf/eigen_layer/endpoint/routes.py b/hemera_udf/eigen_layer/endpoint/routes.py index 0a737a035..0beb6cacb 100644 --- a/hemera_udf/eigen_layer/endpoint/routes.py +++ b/hemera_udf/eigen_layer/endpoint/routes.py @@ -3,7 +3,7 @@ from hemera.api.app.address.features import register_feature from hemera.common.models import db -from hemera.common.models.tokens import Tokens +from hemera.common.models.token import Tokens from hemera.common.utils.format_utils import bytes_to_hex_str, format_value_for_json, hex_str_to_bytes from hemera_udf.eigen_layer.models.af_eigen_layer_address_current import AfEigenLayerAddressCurrent diff --git a/hemera_udf/merchant_moe/endpoints/routes.py b/hemera_udf/merchant_moe/endpoints/routes.py index c2fd8e923..d6d41d58a 100644 --- a/hemera_udf/merchant_moe/endpoints/routes.py +++ b/hemera_udf/merchant_moe/endpoints/routes.py @@ -2,7 +2,7 @@ from hemera.common.models import db from hemera.common.models.current_token_balances import CurrentTokenBalances -from hemera.common.models.tokens import Tokens +from hemera.common.models.token import Tokens from hemera.common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes from hemera_udf.merchant_moe.endpoints import merchant_moe_namespace from hemera_udf.merchant_moe.models.feature_erc1155_token_current_supply import FeatureErc1155TokenCurrentSupplyStatus diff --git a/hemera_udf/opensea/endpoint/routes.py b/hemera_udf/opensea/endpoint/routes.py index 0de299eba..d0ab26f69 100644 --- a/hemera_udf/opensea/endpoint/routes.py +++ b/hemera_udf/opensea/endpoint/routes.py @@ -9,8 +9,8 @@ from hemera.api.app.address.features import register_feature from hemera.api.app.cache import cache from hemera.common.models import db +from hemera.common.models.token import Tokens from hemera.common.models.token_hourly_price import TokenHourlyPrices -from hemera.common.models.tokens import Tokens from hemera.common.utils.format_utils import as_dict, bytes_to_hex_str, format_to_dict, hex_str_to_bytes from hemera_udf.opensea.endpoint import opensea_namespace from hemera_udf.opensea.jobs.opensea_job import ( diff --git a/hemera_udf/staking_fbtc/endpoints/routes.py b/hemera_udf/staking_fbtc/endpoints/routes.py index 6278de96c..007b25698 100644 --- a/hemera_udf/staking_fbtc/endpoints/routes.py +++ b/hemera_udf/staking_fbtc/endpoints/routes.py @@ -2,7 +2,7 @@ from sqlalchemy import func from hemera.common.models import db -from hemera.common.models.tokens import Tokens +from hemera.common.models.token import Tokens from hemera.common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes from hemera_udf.staking_fbtc.endpoints import staking_namespace from hemera_udf.staking_fbtc.models.feature_staked_fbtc_detail_records import FeatureStakedFBTCDetailRecords diff --git a/hemera_udf/stats/__init__.py b/hemera_udf/stats/__init__.py deleted file mode 100644 index 4632ff849..000000000 --- a/hemera_udf/stats/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -from __future__ import annotations - -import packaging.version - -from hemera import __version__ as hemera_version - -__all__ = ["__version__"] - -__version__ = "0.1.0" - -if packaging.version.parse(packaging.version.parse(hemera_version).base_version) < packaging.version.parse("1.0.0"): - raise RuntimeError(f"The package `hemera-modules-xxx:{__version__}` needs Hemera 1.0.0+") diff --git a/hemera_udf/stats/models/daily_addresses_stats.py b/hemera_udf/stats/models/daily_addresses_stats.py deleted file mode 100644 index b2839efc7..000000000 --- a/hemera_udf/stats/models/daily_addresses_stats.py +++ /dev/null @@ -1,16 +0,0 @@ -from sqlalchemy import Column -from sqlalchemy.dialects.postgresql import BIGINT, DATE - -from hemera.common.models import HemeraModel - - -class DailyAddressesStats(HemeraModel): - - __tablename__ = "af_stats_na_daily_addresses" - - block_date = Column(DATE, primary_key=True) - active_address_cnt = Column(BIGINT) - receiver_address_cnt = Column(BIGINT) - sender_address_cnt = Column(BIGINT) - total_address_cnt = Column(BIGINT) - new_address_cnt = Column(BIGINT) diff --git a/hemera_udf/stats/models/daily_blocks_stats.py b/hemera_udf/stats/models/daily_blocks_stats.py deleted file mode 100644 index f4a540fd7..000000000 --- a/hemera_udf/stats/models/daily_blocks_stats.py +++ /dev/null @@ -1,20 +0,0 @@ -from sqlalchemy import NUMERIC, Column -from sqlalchemy.dialects.postgresql import BIGINT, DATE - -from hemera.common.models import HemeraModel - - -class DailyBlocksStats(HemeraModel): - - __tablename__ = "af_stats_na_daily_blocks" - - block_date = Column(DATE, primary_key=True) - cnt = Column(BIGINT) - avg_size = Column(NUMERIC) - avg_gas_limit = Column(NUMERIC) - avg_gas_used = Column(NUMERIC) - total_gas_used = Column(BIGINT) - avg_gas_used_percentage = Column(NUMERIC) - avg_txn_cnt = Column(NUMERIC) - total_cnt = Column(BIGINT) - block_interval = Column(NUMERIC) diff --git a/hemera_udf/stats/models/daily_bridge_transactions_stats.py b/hemera_udf/stats/models/daily_bridge_transactions_stats.py deleted file mode 100644 index 78a3463f0..000000000 --- a/hemera_udf/stats/models/daily_bridge_transactions_stats.py +++ /dev/null @@ -1,13 +0,0 @@ -from sqlalchemy import Column -from sqlalchemy.dialects.postgresql import BIGINT, DATE - -from hemera.common.models import HemeraModel - - -class DailyBridgeTransactionsAggregates(HemeraModel): - - __tablename__ = "af_stats_na_daily_bridge_transactions" - - block_date = Column(DATE, primary_key=True) - deposit_cnt = Column(BIGINT) - withdraw_cnt = Column(BIGINT) diff --git a/hemera_udf/stats/models/daily_tokens_stats.py b/hemera_udf/stats/models/daily_tokens_stats.py deleted file mode 100644 index fc308e95d..000000000 --- a/hemera_udf/stats/models/daily_tokens_stats.py +++ /dev/null @@ -1,17 +0,0 @@ -from sqlalchemy import Column -from sqlalchemy.dialects.postgresql import BIGINT, DATE, INTEGER - -from hemera.common.models import HemeraModel - - -class DailyTokensStats(HemeraModel): - - __tablename__ = "af_stats_na_daily_tokens" - - block_date = Column(DATE, primary_key=True) - erc20_active_address_cnt = Column(INTEGER) - erc20_total_transfer_cnt = Column(BIGINT) - erc721_active_address_cnt = Column(INTEGER) - erc721_total_transfer_cnt = Column(BIGINT) - erc1155_active_address_cnt = Column(INTEGER) - erc1155_total_transfer_cnt = Column(BIGINT) diff --git a/hemera_udf/stats/models/daily_transactions_stats.py b/hemera_udf/stats/models/daily_transactions_stats.py deleted file mode 100644 index dfa488750..000000000 --- a/hemera_udf/stats/models/daily_transactions_stats.py +++ /dev/null @@ -1,24 +0,0 @@ -from sqlalchemy import Column -from sqlalchemy.dialects.postgresql import BIGINT, DATE, NUMERIC - -from hemera.common.models import HemeraModel - - -class DailyTransactionsStats(HemeraModel): - - __tablename__ = "af_stats_na_daily_transactions" - - block_date = Column(DATE, primary_key=True) - cnt = Column(BIGINT) - total_cnt = Column(BIGINT) - txn_error_cnt = Column(BIGINT) - avg_transaction_fee = Column(NUMERIC) - avg_gas_price = Column(NUMERIC) - max_gas_price = Column(NUMERIC) - min_gas_price = Column(NUMERIC) - avg_receipt_l1_fee = Column(NUMERIC) - max_receipt_l1_fee = Column(NUMERIC) - min_receipt_l1_fee = Column(NUMERIC) - avg_receipt_l1_gas_price = Column(NUMERIC) - max_receipt_l1_gas_price = Column(NUMERIC) - min_receipt_l1_gas_price = Column(NUMERIC) diff --git a/hemera_udf/uniswap_v3/endpoints/routes.py b/hemera_udf/uniswap_v3/endpoints/routes.py index 722620e55..46612ffa0 100644 --- a/hemera_udf/uniswap_v3/endpoints/routes.py +++ b/hemera_udf/uniswap_v3/endpoints/routes.py @@ -11,8 +11,8 @@ from hemera.api.app.cache import cache from hemera.api.app.db_service.tokens import get_token_price_map_by_symbol_list from hemera.common.models import db +from hemera.common.models.token import Tokens from hemera.common.models.token_hourly_price import TokenHourlyPrices -from hemera.common.models.tokens import Tokens from hemera.common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes from hemera_udf.opensea.endpoint.routes import get_token_daily_price from hemera_udf.uniswap_v3.endpoints import uniswap_v3_namespace diff --git a/hemera_udf/user_ops/jobs/export_user_ops_job.py b/hemera_udf/user_ops/jobs/export_user_ops_job.py index 2c63d0d6c..2503b43c1 100644 --- a/hemera_udf/user_ops/jobs/export_user_ops_job.py +++ b/hemera_udf/user_ops/jobs/export_user_ops_job.py @@ -1,9 +1,9 @@ import json from typing import cast +from eth_typing import ABIEvent, ABIFunction from web3._utils.contracts import decode_transaction_data from web3._utils.normalizers import BASE_RETURN_NORMALIZERS -from web3.types import ABIEvent, ABIFunction from hemera.common.utils.abi_code_utils import decode_log from hemera.indexer.domains.log import Log diff --git a/poetry.lock b/poetry.lock index 74e433336..05a989b25 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. [[package]] name = "aiohappyeyeballs" @@ -6,17 +6,34 @@ version = "2.4.4" description = "Happy Eyeballs for asyncio" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version < \"3.10\"" files = [ {file = "aiohappyeyeballs-2.4.4-py3-none-any.whl", hash = "sha256:a980909d50efcd44795c4afeca523296716d50cd756ddca6af8c65b996e27de8"}, {file = "aiohappyeyeballs-2.4.4.tar.gz", hash = "sha256:5fdd7d87889c63183afc18ce9271f9b0a7d32c2303e394468dd45d514a757745"}, ] +[[package]] +name = "aiohappyeyeballs" +version = "2.4.8" +description = "Happy Eyeballs for asyncio" +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version >= \"3.10\"" +files = [ + {file = "aiohappyeyeballs-2.4.8-py3-none-any.whl", hash = "sha256:6cac4f5dd6e34a9644e69cf9021ef679e4394f54e58a183056d12009e42ea9e3"}, + {file = "aiohappyeyeballs-2.4.8.tar.gz", hash = "sha256:19728772cb12263077982d2f55453babd8bec6a052a926cd5c0c42796da8bf62"}, +] + [[package]] name = "aiohttp" version = "3.10.11" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version < \"3.10\"" files = [ {file = "aiohttp-3.10.11-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5077b1a5f40ffa3ba1f40d537d3bec4383988ee51fbba6b74aa8fb1bc466599e"}, {file = "aiohttp-3.10.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8d6a14a4d93b5b3c2891fca94fa9d41b2322a68194422bef0dd5ec1e57d7d298"}, @@ -121,7 +138,112 @@ multidict = ">=4.5,<7.0" yarl = ">=1.12.0,<2.0" [package.extras] -speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"] +speedups = ["Brotli ; platform_python_implementation == \"CPython\"", "aiodns (>=3.2.0) ; sys_platform == \"linux\" or sys_platform == \"darwin\"", "brotlicffi ; platform_python_implementation != \"CPython\""] + +[[package]] +name = "aiohttp" +version = "3.11.13" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version >= \"3.10\"" +files = [ + {file = "aiohttp-3.11.13-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a4fe27dbbeec445e6e1291e61d61eb212ee9fed6e47998b27de71d70d3e8777d"}, + {file = "aiohttp-3.11.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9e64ca2dbea28807f8484c13f684a2f761e69ba2640ec49dacd342763cc265ef"}, + {file = "aiohttp-3.11.13-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9840be675de208d1f68f84d578eaa4d1a36eee70b16ae31ab933520c49ba1325"}, + {file = "aiohttp-3.11.13-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28a772757c9067e2aee8a6b2b425d0efaa628c264d6416d283694c3d86da7689"}, + {file = "aiohttp-3.11.13-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b88aca5adbf4625e11118df45acac29616b425833c3be7a05ef63a6a4017bfdb"}, + {file = "aiohttp-3.11.13-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ce10ddfbe26ed5856d6902162f71b8fe08545380570a885b4ab56aecfdcb07f4"}, + {file = "aiohttp-3.11.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa48dac27f41b36735c807d1ab093a8386701bbf00eb6b89a0f69d9fa26b3671"}, + {file = "aiohttp-3.11.13-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89ce611b1eac93ce2ade68f1470889e0173d606de20c85a012bfa24be96cf867"}, + {file = "aiohttp-3.11.13-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:78e4dd9c34ec7b8b121854eb5342bac8b02aa03075ae8618b6210a06bbb8a115"}, + {file = "aiohttp-3.11.13-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:66047eacbc73e6fe2462b77ce39fc170ab51235caf331e735eae91c95e6a11e4"}, + {file = "aiohttp-3.11.13-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5ad8f1c19fe277eeb8bc45741c6d60ddd11d705c12a4d8ee17546acff98e0802"}, + {file = "aiohttp-3.11.13-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:64815c6f02e8506b10113ddbc6b196f58dbef135751cc7c32136df27b736db09"}, + {file = "aiohttp-3.11.13-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:967b93f21b426f23ca37329230d5bd122f25516ae2f24a9cea95a30023ff8283"}, + {file = "aiohttp-3.11.13-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cf1f31f83d16ec344136359001c5e871915c6ab685a3d8dee38e2961b4c81730"}, + {file = "aiohttp-3.11.13-cp310-cp310-win32.whl", hash = "sha256:00c8ac69e259c60976aa2edae3f13d9991cf079aaa4d3cd5a49168ae3748dee3"}, + {file = "aiohttp-3.11.13-cp310-cp310-win_amd64.whl", hash = "sha256:90d571c98d19a8b6e793b34aa4df4cee1e8fe2862d65cc49185a3a3d0a1a3996"}, + {file = "aiohttp-3.11.13-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6b35aab22419ba45f8fc290d0010898de7a6ad131e468ffa3922b1b0b24e9d2e"}, + {file = "aiohttp-3.11.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f81cba651db8795f688c589dd11a4fbb834f2e59bbf9bb50908be36e416dc760"}, + {file = "aiohttp-3.11.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f55d0f242c2d1fcdf802c8fabcff25a9d85550a4cf3a9cf5f2a6b5742c992839"}, + {file = "aiohttp-3.11.13-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4bea08a6aad9195ac9b1be6b0c7e8a702a9cec57ce6b713698b4a5afa9c2e33"}, + {file = "aiohttp-3.11.13-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6070bcf2173a7146bb9e4735b3c62b2accba459a6eae44deea0eb23e0035a23"}, + {file = "aiohttp-3.11.13-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:718d5deb678bc4b9d575bfe83a59270861417da071ab44542d0fcb6faa686636"}, + {file = "aiohttp-3.11.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f6b2c5b4a4d22b8fb2c92ac98e0747f5f195e8e9448bfb7404cd77e7bfa243f"}, + {file = "aiohttp-3.11.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:747ec46290107a490d21fe1ff4183bef8022b848cf9516970cb31de6d9460088"}, + {file = "aiohttp-3.11.13-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:01816f07c9cc9d80f858615b1365f8319d6a5fd079cd668cc58e15aafbc76a54"}, + {file = "aiohttp-3.11.13-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:a08ad95fcbd595803e0c4280671d808eb170a64ca3f2980dd38e7a72ed8d1fea"}, + {file = "aiohttp-3.11.13-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:c97be90d70f7db3aa041d720bfb95f4869d6063fcdf2bb8333764d97e319b7d0"}, + {file = "aiohttp-3.11.13-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ab915a57c65f7a29353c8014ac4be685c8e4a19e792a79fe133a8e101111438e"}, + {file = "aiohttp-3.11.13-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:35cda4e07f5e058a723436c4d2b7ba2124ab4e0aa49e6325aed5896507a8a42e"}, + {file = "aiohttp-3.11.13-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:af55314407714fe77a68a9ccaab90fdb5deb57342585fd4a3a8102b6d4370080"}, + {file = "aiohttp-3.11.13-cp311-cp311-win32.whl", hash = "sha256:42d689a5c0a0c357018993e471893e939f555e302313d5c61dfc566c2cad6185"}, + {file = "aiohttp-3.11.13-cp311-cp311-win_amd64.whl", hash = "sha256:b73a2b139782a07658fbf170fe4bcdf70fc597fae5ffe75e5b67674c27434a9f"}, + {file = "aiohttp-3.11.13-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:2eabb269dc3852537d57589b36d7f7362e57d1ece308842ef44d9830d2dc3c90"}, + {file = "aiohttp-3.11.13-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7b77ee42addbb1c36d35aca55e8cc6d0958f8419e458bb70888d8c69a4ca833d"}, + {file = "aiohttp-3.11.13-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55789e93c5ed71832e7fac868167276beadf9877b85697020c46e9a75471f55f"}, + {file = "aiohttp-3.11.13-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c929f9a7249a11e4aa5c157091cfad7f49cc6b13f4eecf9b747104befd9f56f2"}, + {file = "aiohttp-3.11.13-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d33851d85537bbf0f6291ddc97926a754c8f041af759e0aa0230fe939168852b"}, + {file = "aiohttp-3.11.13-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9229d8613bd8401182868fe95688f7581673e1c18ff78855671a4b8284f47bcb"}, + {file = "aiohttp-3.11.13-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:669dd33f028e54fe4c96576f406ebb242ba534dd3a981ce009961bf49960f117"}, + {file = "aiohttp-3.11.13-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c1b20a1ace54af7db1f95af85da530fe97407d9063b7aaf9ce6a32f44730778"}, + {file = "aiohttp-3.11.13-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5724cc77f4e648362ebbb49bdecb9e2b86d9b172c68a295263fa072e679ee69d"}, + {file = "aiohttp-3.11.13-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:aa36c35e94ecdb478246dd60db12aba57cfcd0abcad43c927a8876f25734d496"}, + {file = "aiohttp-3.11.13-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9b5b37c863ad5b0892cc7a4ceb1e435e5e6acd3f2f8d3e11fa56f08d3c67b820"}, + {file = "aiohttp-3.11.13-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:e06cf4852ce8c4442a59bae5a3ea01162b8fcb49ab438d8548b8dc79375dad8a"}, + {file = "aiohttp-3.11.13-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5194143927e494616e335d074e77a5dac7cd353a04755330c9adc984ac5a628e"}, + {file = "aiohttp-3.11.13-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:afcb6b275c2d2ba5d8418bf30a9654fa978b4f819c2e8db6311b3525c86fe637"}, + {file = "aiohttp-3.11.13-cp312-cp312-win32.whl", hash = "sha256:7104d5b3943c6351d1ad7027d90bdd0ea002903e9f610735ac99df3b81f102ee"}, + {file = "aiohttp-3.11.13-cp312-cp312-win_amd64.whl", hash = "sha256:47dc018b1b220c48089b5b9382fbab94db35bef2fa192995be22cbad3c5730c8"}, + {file = "aiohttp-3.11.13-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:9862d077b9ffa015dbe3ce6c081bdf35135948cb89116e26667dd183550833d1"}, + {file = "aiohttp-3.11.13-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fbfef0666ae9e07abfa2c54c212ac18a1f63e13e0760a769f70b5717742f3ece"}, + {file = "aiohttp-3.11.13-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:93a1f7d857c4fcf7cabb1178058182c789b30d85de379e04f64c15b7e88d66fb"}, + {file = "aiohttp-3.11.13-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba40b7ae0f81c7029583a338853f6607b6d83a341a3dcde8bed1ea58a3af1df9"}, + {file = "aiohttp-3.11.13-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b5b95787335c483cd5f29577f42bbe027a412c5431f2f80a749c80d040f7ca9f"}, + {file = "aiohttp-3.11.13-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7d474c5c1f0b9405c1565fafdc4429fa7d986ccbec7ce55bc6a330f36409cad"}, + {file = "aiohttp-3.11.13-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e83fb1991e9d8982b3b36aea1e7ad27ea0ce18c14d054c7a404d68b0319eebb"}, + {file = "aiohttp-3.11.13-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4586a68730bd2f2b04a83e83f79d271d8ed13763f64b75920f18a3a677b9a7f0"}, + {file = "aiohttp-3.11.13-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9fe4eb0e7f50cdb99b26250d9328faef30b1175a5dbcfd6d0578d18456bac567"}, + {file = "aiohttp-3.11.13-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:2a8a6bc19818ac3e5596310ace5aa50d918e1ebdcc204dc96e2f4d505d51740c"}, + {file = "aiohttp-3.11.13-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7f27eec42f6c3c1df09cfc1f6786308f8b525b8efaaf6d6bd76c1f52c6511f6a"}, + {file = "aiohttp-3.11.13-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:2a4a13dfbb23977a51853b419141cd0a9b9573ab8d3a1455c6e63561387b52ff"}, + {file = "aiohttp-3.11.13-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:02876bf2f69b062584965507b07bc06903c2dc93c57a554b64e012d636952654"}, + {file = "aiohttp-3.11.13-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b992778d95b60a21c4d8d4a5f15aaab2bd3c3e16466a72d7f9bfd86e8cea0d4b"}, + {file = "aiohttp-3.11.13-cp313-cp313-win32.whl", hash = "sha256:507ab05d90586dacb4f26a001c3abf912eb719d05635cbfad930bdbeb469b36c"}, + {file = "aiohttp-3.11.13-cp313-cp313-win_amd64.whl", hash = "sha256:5ceb81a4db2decdfa087381b5fc5847aa448244f973e5da232610304e199e7b2"}, + {file = "aiohttp-3.11.13-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:51c3ff9c7a25f3cad5c09d9aacbc5aefb9267167c4652c1eb737989b554fe278"}, + {file = "aiohttp-3.11.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e271beb2b1dabec5cd84eb488bdabf9758d22ad13471e9c356be07ad139b3012"}, + {file = "aiohttp-3.11.13-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0e9eb7e5764abcb49f0e2bd8f5731849b8728efbf26d0cac8e81384c95acec3f"}, + {file = "aiohttp-3.11.13-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baae005092e3f200de02699314ac8933ec20abf998ec0be39448f6605bce93df"}, + {file = "aiohttp-3.11.13-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1982c98ac62c132d2b773d50e2fcc941eb0b8bad3ec078ce7e7877c4d5a2dce7"}, + {file = "aiohttp-3.11.13-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2b25b2eeb35707113b2d570cadc7c612a57f1c5d3e7bb2b13870fe284e08fc0"}, + {file = "aiohttp-3.11.13-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b27961d65639128336b7a7c3f0046dcc62a9443d5ef962e3c84170ac620cec47"}, + {file = "aiohttp-3.11.13-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a01fe9f1e05025eacdd97590895e2737b9f851d0eb2e017ae9574d9a4f0b6252"}, + {file = "aiohttp-3.11.13-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fa1fb1b61881c8405829c50e9cc5c875bfdbf685edf57a76817dfb50643e4a1a"}, + {file = "aiohttp-3.11.13-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:25de43bb3cf83ad83efc8295af7310219af6dbe4c543c2e74988d8e9c8a2a917"}, + {file = "aiohttp-3.11.13-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fe7065e2215e4bba63dc00db9ae654c1ba3950a5fff691475a32f511142fcddb"}, + {file = "aiohttp-3.11.13-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:7836587eef675a17d835ec3d98a8c9acdbeb2c1d72b0556f0edf4e855a25e9c1"}, + {file = "aiohttp-3.11.13-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:85fa0b18558eb1427090912bd456a01f71edab0872f4e0f9e4285571941e4090"}, + {file = "aiohttp-3.11.13-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a86dc177eb4c286c19d1823ac296299f59ed8106c9536d2b559f65836e0fb2c6"}, + {file = "aiohttp-3.11.13-cp39-cp39-win32.whl", hash = "sha256:684eea71ab6e8ade86b9021bb62af4bf0881f6be4e926b6b5455de74e420783a"}, + {file = "aiohttp-3.11.13-cp39-cp39-win_amd64.whl", hash = "sha256:82c249f2bfa5ecbe4a1a7902c81c0fba52ed9ebd0176ab3047395d02ad96cfcb"}, + {file = "aiohttp-3.11.13.tar.gz", hash = "sha256:8ce789231404ca8fff7f693cdce398abf6d90fd5dae2b1847477196c243b1fbb"}, +] + +[package.dependencies] +aiohappyeyeballs = ">=2.3.0" +aiosignal = ">=1.1.2" +async-timeout = {version = ">=4.0,<6.0", markers = "python_version < \"3.11\""} +attrs = ">=17.3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +propcache = ">=0.2.0" +yarl = ">=1.17.0,<2.0" + +[package.extras] +speedups = ["Brotli ; platform_python_implementation == \"CPython\"", "aiodns (>=3.2.0) ; sys_platform == \"linux\" or sys_platform == \"darwin\"", "brotlicffi ; platform_python_implementation != \"CPython\""] [[package]] name = "aiosignal" @@ -129,6 +251,8 @@ version = "1.3.1" description = "aiosignal: a list of registered asynchronous callbacks" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version < \"3.10\"" files = [ {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, @@ -138,39 +262,83 @@ files = [ frozenlist = ">=1.1.0" [[package]] -name = "alembic" -version = "1.13.3" -description = "A database migration tool for SQLAlchemy." +name = "aiosignal" +version = "1.3.2" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version >= \"3.10\"" +files = [ + {file = "aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5"}, + {file = "aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "alembic-1.13.3-py3-none-any.whl", hash = "sha256:908e905976d15235fae59c9ac42c4c5b75cfcefe3d27c0fbf7ae15a37715d80e"}, - {file = "alembic-1.13.3.tar.gz", hash = "sha256:203503117415561e203aa14541740643a611f641517f0209fcae63e9fa09f1a2"}, + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, ] [package.dependencies] -importlib-metadata = {version = "*", markers = "python_version < \"3.9\""} -importlib-resources = {version = "*", markers = "python_version < \"3.9\""} -Mako = "*" -SQLAlchemy = ">=1.3.0" -typing-extensions = ">=4" +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} + +[[package]] +name = "anyio" +version = "4.5.2" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version < \"3.10\"" +files = [ + {file = "anyio-4.5.2-py3-none-any.whl", hash = "sha256:c011ee36bc1e8ba40e5a81cb9df91925c218fe9b778554e0b56a21e1b5d4716f"}, + {file = "anyio-4.5.2.tar.gz", hash = "sha256:23009af4ed04ce05991845451e11ef02fc7c5ed29179ac9a420e5ad0ac7ddc5b"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} [package.extras] -tz = ["backports.zoneinfo"] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21.0b1) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\""] +trio = ["trio (>=0.26.1)"] [[package]] -name = "aniso8601" -version = "10.0.0" -description = "A library for parsing ISO 8601 strings." +name = "anyio" +version = "4.8.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false -python-versions = "*" +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version >= \"3.10\"" files = [ - {file = "aniso8601-10.0.0-py2.py3-none-any.whl", hash = "sha256:3c943422efaa0229ebd2b0d7d223effb5e7c89e24d2267ebe76c61a2d8e290cb"}, - {file = "aniso8601-10.0.0.tar.gz", hash = "sha256:ff1d0fc2346688c62c0151547136ac30e322896ed8af316ef7602c47da9426cf"}, + {file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"}, + {file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"}, ] +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" +typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} + [package.extras] -dev = ["black", "coverage", "isort", "pre-commit", "pyenchant", "pylint"] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""] +trio = ["trio (>=0.26.1)"] [[package]] name = "async-timeout" @@ -178,6 +346,8 @@ version = "5.0.1" description = "Timeout context manager for asyncio programs" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_full_version <= \"3.11.2\"" files = [ {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, @@ -189,163 +359,192 @@ version = "25.1.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "attrs-25.1.0-py3-none-any.whl", hash = "sha256:c75a69e28a550a7e93789579c22aa26b0f5b83b75dc4e08fe092980051e1090a"}, {file = "attrs-25.1.0.tar.gz", hash = "sha256:1c97078a80c814273a76b2a298a932eb681c87415c11dee0a6921de7f1b02c3e"}, ] [package.extras] -benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] +tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] + +[[package]] +name = "backports-zoneinfo" +version = "0.2.1" +description = "Backport of the standard library zoneinfo module" +optional = false +python-versions = ">=3.6" +groups = ["dev"] +markers = "python_version < \"3.9\"" +files = [ + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:da6013fd84a690242c310d77ddb8441a559e9cb3d3d59ebac9aca1a57b2e18bc"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:89a48c0d158a3cc3f654da4c2de1ceba85263fafb861b98b59040a5086259722"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:1c5742112073a563c81f786e77514969acb58649bcdf6cdf0b4ed31a348d4546"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win32.whl", hash = "sha256:e8236383a20872c0cdf5a62b554b27538db7fa1bbec52429d8d106effbaeca08"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win_amd64.whl", hash = "sha256:8439c030a11780786a2002261569bdf362264f605dfa4d65090b64b05c9f79a7"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:f04e857b59d9d1ccc39ce2da1021d196e47234873820cbeaad210724b1ee28ac"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:17746bd546106fa389c51dbea67c8b7c8f0d14b5526a579ca6ccf5ed72c526cf"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5c144945a7752ca544b4b78c8c41544cdfaf9786f25fe5ffb10e838e19a27570"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win32.whl", hash = "sha256:e55b384612d93be96506932a786bbcde5a2db7a9e6a4bb4bffe8b733f5b9036b"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a76b38c52400b762e48131494ba26be363491ac4f9a04c1b7e92483d169f6582"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:8961c0f32cd0336fb8e8ead11a1f8cd99ec07145ec2931122faaac1c8f7fd987"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e81b76cace8eda1fca50e345242ba977f9be6ae3945af8d46326d776b4cf78d1"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7b0a64cda4145548fed9efc10322770f929b944ce5cee6c0dfe0c87bf4c0c8c9"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-win32.whl", hash = "sha256:1b13e654a55cd45672cb54ed12148cd33628f672548f373963b0bff67b217328"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:4a0f800587060bf8880f954dbef70de6c11bbe59c673c3d818921f042f9954a6"}, + {file = "backports.zoneinfo-0.2.1.tar.gz", hash = "sha256:fadbfe37f74051d024037f223b8e001611eac868b5c5b06144ef4d8b799862f2"}, +] + +[package.extras] +tzdata = ["tzdata"] [[package]] name = "bitarray" -version = "3.0.0" +version = "3.1.0" description = "efficient arrays of booleans -- C extension" optional = false python-versions = "*" +groups = ["main"] files = [ - {file = "bitarray-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5ddbf71a97ad1d6252e6e93d2d703b624d0a5b77c153b12f9ea87d83e1250e0c"}, - {file = "bitarray-3.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0e7f24a0b01e6e6a0191c50b06ca8edfdec1988d9d2b264d669d2487f4f4680"}, - {file = "bitarray-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:150b7b29c36d9f1a24779aea723fdfc73d1c1c161dc0ea14990da27d4e947092"}, - {file = "bitarray-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8330912be6cb8e2fbfe8eb69f82dee139d605730cadf8d50882103af9ac83bb4"}, - {file = "bitarray-3.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e56ba8be5f17dee0ffa6d6ce85251e062ded2faa3cbd2558659c671e6c3bf96d"}, - {file = "bitarray-3.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ffd94b4803811c738e504a4b499fb2f848b2f7412d71e6b517508217c1d7929d"}, - {file = "bitarray-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0255bd05ec7165e512c115423a5255a3f301417973d20a80fc5bfc3f3640bcb"}, - {file = "bitarray-3.0.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe606e728842389943a939258809dc5db2de831b1d2e0118515059e87f7bbc1a"}, - {file = "bitarray-3.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e89ea59a3ed86a6eb150d016ed28b1bedf892802d0ed32b5659d3199440f3ced"}, - {file = "bitarray-3.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:cf0cc2e91dd38122dec2e6541efa99aafb0a62e118179218181eff720b4b8153"}, - {file = "bitarray-3.0.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:2d9fe3ee51afeb909b68f97e14c6539ace3f4faa99b21012e610bbe7315c388d"}, - {file = "bitarray-3.0.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:37be5482b9df3105bad00fdf7dc65244e449b130867c3879c9db1db7d72e508b"}, - {file = "bitarray-3.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0027b8f3bb2bba914c79115e96a59b9924aafa1a578223a7c4f0a7242d349842"}, - {file = "bitarray-3.0.0-cp310-cp310-win32.whl", hash = "sha256:628f93e9c2c23930bd1cfe21c634d6c84ec30f45f23e69aefe1fcd262186d7bb"}, - {file = "bitarray-3.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:0b655c3110e315219e266b2732609fddb0857bc69593de29f3c2ba74b7d3f51a"}, - {file = "bitarray-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:44c3e78b60070389b824d5a654afa1c893df723153c81904088d4922c3cfb6ac"}, - {file = "bitarray-3.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:545d36332de81e4742a845a80df89530ff193213a50b4cbef937ed5a44c0e5e5"}, - {file = "bitarray-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8a9eb510cde3fa78c2e302bece510bf5ed494ec40e6b082dec753d6e22d5d1b1"}, - {file = "bitarray-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e3727ab63dfb6bde00b281934e2212bb7529ea3006c0031a556a84d2268bea5"}, - {file = "bitarray-3.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2055206ed653bee0b56628f6a4d248d53e5660228d355bbec0014bdfa27050ae"}, - {file = "bitarray-3.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:147542299f458bdb177f798726e5f7d39ab8491de4182c3c6d9885ed275a3c2b"}, - {file = "bitarray-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f761184b93092077c7f6b7dad7bd4e671c1620404a76620da7872ceb576a94"}, - {file = "bitarray-3.0.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e008b7b4ce6c7f7a54b250c45c28d4243cc2a3bbfd5298fa7dac92afda229842"}, - {file = "bitarray-3.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dfea514e665af278b2e1d4deb542de1cd4f77413bee83dd15ae16175976ea8d5"}, - {file = "bitarray-3.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:66d6134b7bb737b88f1d16478ad0927c571387f6054f4afa5557825a4c1b78e2"}, - {file = "bitarray-3.0.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:3cd565253889940b4ec4768d24f101d9fe111cad4606fdb203ea16f9797cf9ed"}, - {file = "bitarray-3.0.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:4800c91a14656789d2e67d9513359e23e8a534c8ee1482bb9b517a4cfc845200"}, - {file = "bitarray-3.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c2945e0390d1329c585c584c6b6d78be017d9c6a1288f9c92006fe907f69cc28"}, - {file = "bitarray-3.0.0-cp311-cp311-win32.whl", hash = "sha256:c23286abba0cb509733c6ce8f4013cd951672c332b2e184dbefbd7331cd234c8"}, - {file = "bitarray-3.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:ca79f02a98cbda1472449d440592a2fe2ad96fe55515a0447fa8864a38017cf8"}, - {file = "bitarray-3.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:184972c96e1c7e691be60c3792ca1a51dd22b7f25d96ebea502fe3c9b554f25d"}, - {file = "bitarray-3.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:787db8da5e9e29be712f7a6bce153c7bc8697ccc2c38633e347bb9c82475d5c9"}, - {file = "bitarray-3.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2da91ab3633c66999c2a352f0ca9ae064f553e5fc0eca231d28e7e305b83e942"}, - {file = "bitarray-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7edb83089acbf2c86c8002b96599071931dc4ea5e1513e08306f6f7df879a48b"}, - {file = "bitarray-3.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996d1b83eb904589f40974538223eaed1ab0f62be8a5105c280b9bd849e685c4"}, - {file = "bitarray-3.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4817d73d995bd2b977d9cde6050be8d407791cf1f84c8047fa0bea88c1b815bc"}, - {file = "bitarray-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d47bc4ff9b0e1624d613563c6fa7b80aebe7863c56c3df5ab238bb7134e8755"}, - {file = "bitarray-3.0.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aca0a9cd376beaccd9f504961de83e776dd209c2de5a4c78dc87a78edf61839b"}, - {file = "bitarray-3.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:572a61fba7e3a710a8324771322fba8488d134034d349dcd036a7aef74723a80"}, - {file = "bitarray-3.0.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a817ad70c1aff217530576b4f037dd9b539eb2926603354fcac605d824082ad1"}, - {file = "bitarray-3.0.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:2ac67b658fa5426503e9581a3fb44a26a3b346c1abd17105735f07db572195b3"}, - {file = "bitarray-3.0.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:12f19ede03e685c5c588ab5ed63167999295ffab5e1126c5fe97d12c0718c18f"}, - {file = "bitarray-3.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fcef31b062f756ba7eebcd7890c5d5de84b9d64ee877325257bcc9782288564a"}, - {file = "bitarray-3.0.0-cp312-cp312-win32.whl", hash = "sha256:656db7bdf1d81ec3b57b3cad7ec7276765964bcfd0eb81c5d1331f385298169c"}, - {file = "bitarray-3.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:f785af6b7cb07a9b1e5db0dea9ef9e3e8bb3d74874a0a61303eab9c16acc1999"}, - {file = "bitarray-3.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7cb885c043000924554fe2124d13084c8fdae03aec52c4086915cd4cb87fe8be"}, - {file = "bitarray-3.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7814c9924a0b30ecd401f02f082d8697fc5a5be3f8d407efa6e34531ff3c306a"}, - {file = "bitarray-3.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bcf524a087b143ba736aebbb054bb399d49e77cf7c04ed24c728e411adc82bfa"}, - {file = "bitarray-3.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1d5abf1d6d910599ac16afdd9a0ed3e24f3b46af57f3070cf2792f236f36e0b"}, - {file = "bitarray-3.0.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9929051feeaf8d948cc0b1c9ce57748079a941a1a15c89f6014edf18adaade84"}, - {file = "bitarray-3.0.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96cf0898f8060b2d3ae491762ae871b071212ded97ff9e1e3a5229e9fefe544c"}, - {file = "bitarray-3.0.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab37da66a8736ad5a75a58034180e92c41e864da0152b84e71fcc253a2f69cd4"}, - {file = "bitarray-3.0.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:beeb79e476d19b91fd6a3439853e4e5ba1b3b475920fa40d62bde719c8af786f"}, - {file = "bitarray-3.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f75fc0198c955d840b836059bd43e0993edbf119923029ca60c4fc017cefa54a"}, - {file = "bitarray-3.0.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f12cc7c7638074918cdcc7491aff897df921b092ffd877227892d2686e98f876"}, - {file = "bitarray-3.0.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:dbe1084935b942fab206e609fa1ed3f46ad1f2612fb4833e177e9b2a5e006c96"}, - {file = "bitarray-3.0.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:ac06dd72ee1e1b6e312504d06f75220b5894af1fb58f0c20643698f5122aea76"}, - {file = "bitarray-3.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:00f9a88c56e373009ac3c73c55205cfbd9683fbd247e2f9a64bae3da78795252"}, - {file = "bitarray-3.0.0-cp313-cp313-win32.whl", hash = "sha256:9c6e52005e91803eb4e08c0a08a481fb55ddce97f926bae1f6fa61b3396b5b61"}, - {file = "bitarray-3.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:cb98d5b6eac4b2cf2a5a69f60a9c499844b8bea207059e9fc45c752436e6bb49"}, - {file = "bitarray-3.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:eb27c01b747649afd7e1c342961680893df6d8d81f832a6f04d8c8e03a8a54cc"}, - {file = "bitarray-3.0.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4683bff52f5a0fd523fb5d3138161ef87611e63968e1fcb6cf4b0c6a86970fe0"}, - {file = "bitarray-3.0.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cb7302dbcfcb676f0b66f15891f091d0233c4fc23e1d4b9dc9b9e958156e347f"}, - {file = "bitarray-3.0.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:153d7c416a70951dcfa73487af05d2f49c632e95602f1620cd9a651fa2033695"}, - {file = "bitarray-3.0.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251cd5bd47f542893b2b61860eded54f34920ea47fd5bff038d85e7a2f7ae99b"}, - {file = "bitarray-3.0.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5fa4b4d9fa90124b33b251ef74e44e737021f253dc7a9174e1b39f097451f7ca"}, - {file = "bitarray-3.0.0-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:18abdce7ab5d2104437c39670821cba0b32fdb9b2da9e6d17a4ff295362bd9dc"}, - {file = "bitarray-3.0.0-cp36-cp36m-musllinux_1_2_i686.whl", hash = "sha256:2855cc01ee370f7e6e3ec97eebe44b1453c83fb35080313145e2c8c3c5243afb"}, - {file = "bitarray-3.0.0-cp36-cp36m-musllinux_1_2_ppc64le.whl", hash = "sha256:0cecaf2981c9cd2054547f651537b4f4939f9fe225d3fc2b77324b597c124e40"}, - {file = "bitarray-3.0.0-cp36-cp36m-musllinux_1_2_s390x.whl", hash = "sha256:22b00f65193fafb13aa644e16012c8b49e7d5cbb6bb72825105ff89aadaa01e3"}, - {file = "bitarray-3.0.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:20f30373f0af9cb583e4122348cefde93c82865dbcbccc4997108b3d575ece84"}, - {file = "bitarray-3.0.0-cp36-cp36m-win32.whl", hash = "sha256:aef404d5400d95c6ec86664df9924bde667c8865f8e33c9b7bd79823d53b3e5d"}, - {file = "bitarray-3.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:ec5b0f2d13da53e0975ac15ecbe8badb463bdb0bebaa09457f4df3320421915c"}, - {file = "bitarray-3.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:041c889e69c847b8a96346650e50f728b747ae176889199c49a3f31ae1de0e23"}, - {file = "bitarray-3.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc83ea003dd75e9ade3291ef0585577dd5524aec0c8c99305c0aaa2a7570d6db"}, - {file = "bitarray-3.0.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c33129b49196aa7965ac0f16fcde7b6ad8614b606caf01669a0277cef1afe1d"}, - {file = "bitarray-3.0.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ef5c787c8263c082a73219a69eb60a500e157a4ac69d1b8515ad836b0e71fb4"}, - {file = "bitarray-3.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e15c94d79810c5ab90ddf4d943f71f14332890417be896ca253f21fa3d78d2b1"}, - {file = "bitarray-3.0.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7cd021ada988e73d649289cee00428b75564c46d55fbdcb0e3402e504b0ae5ea"}, - {file = "bitarray-3.0.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7f1c24be7519f16a47b7e2ad1a1ef73023d34d8cbe1a3a59b185fc14baabb132"}, - {file = "bitarray-3.0.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:000df24c183011b5d27c23d79970f49b6762e5bb5aacd25da9c3e9695c693222"}, - {file = "bitarray-3.0.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:42bf1b222c698b467097f58b9f59dc850dfa694dde4e08237407a6a103757aa3"}, - {file = "bitarray-3.0.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:648e7ce794928e8d11343b5da8ecc5b910af75a82ea1a4264d5d0a55c3785faa"}, - {file = "bitarray-3.0.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:f536fc4d1a683025f9caef0bebeafd60384054579ffe0825bb9bd8c59f8c55b8"}, - {file = "bitarray-3.0.0-cp37-cp37m-win32.whl", hash = "sha256:a754c1464e7b946b1cac7300c582c6fba7d66e535cd1dab76d998ad285ac5a37"}, - {file = "bitarray-3.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e91d46d12781a14ccb8b284566b14933de4e3b29f8bc5e1c17de7a2001ad3b5b"}, - {file = "bitarray-3.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:904c1d5e3bd24f0c0d37a582d2461312033c91436a6a4f3bdeeceb4bea4a899d"}, - {file = "bitarray-3.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:47ccf9887bd595d4a0536f2310f0dcf89e17ab83b8befa7dc8727b8017120fda"}, - {file = "bitarray-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:71ad0139c95c9acf4fb62e203b428f9906157b15eecf3f30dc10b55919225896"}, - {file = "bitarray-3.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53e002ac1073ac70e323a7a4bfa9ab95e7e1a85c79160799e265563f342b1557"}, - {file = "bitarray-3.0.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:acc07211a59e2f245e9a06f28fa374d094fb0e71cf5366eef52abbb826ddc81e"}, - {file = "bitarray-3.0.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98a4070ddafabddaee70b2aa7cc6286cf73c37984169ab03af1782da2351059a"}, - {file = "bitarray-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7d09ef06ba57bea646144c29764bf6b870fb3c5558ca098191e07b6a1d40bf7"}, - {file = "bitarray-3.0.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce249ed981f428a8b61538ca82d3875847733d579dd40084ab8246549160f8a4"}, - {file = "bitarray-3.0.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ea40e98d751ed4b255db4a88fe8fb743374183f78470b9e9305aab186bf28ede"}, - {file = "bitarray-3.0.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:928b8b6dfcd015e1a81334cfdac02815da2a2407854492a80cf8a3a922b04052"}, - {file = "bitarray-3.0.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:fbb645477595ce2a0fbb678d1cfd08d3b896e5d56196d40fb9e114eeab9382b3"}, - {file = "bitarray-3.0.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:dc1937a0ff2671797d35243db4b596329842480d125a65e9fe964bcffaf16dfc"}, - {file = "bitarray-3.0.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:a4f49ac31734fe654a68e2515c0da7f5bbdf2d52755ba09a42ac406f1f08c9d0"}, - {file = "bitarray-3.0.0-cp38-cp38-win32.whl", hash = "sha256:6d2a2ce73f9897268f58857ad6893a1a6680c5a6b28f79d21c7d33285a5ae646"}, - {file = "bitarray-3.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:b1047999f1797c3ea7b7c85261649249c243308dcf3632840d076d18fa72f142"}, - {file = "bitarray-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:39b38a3d45dac39d528c87b700b81dfd5e8dc8e9e1a102503336310ef837c3fd"}, - {file = "bitarray-3.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0e104f9399144fab6a892d379ba1bb4275e56272eb465059beef52a77b4e5ce6"}, - {file = "bitarray-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0879f839ec8f079fa60c3255966c2e1aa7196699a234d4e5b7898fbc321901b5"}, - {file = "bitarray-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9502c2230d59a4ace2fddfd770dad8e8b414cbd99517e7e56c55c20997c28b8d"}, - {file = "bitarray-3.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:57d5ef854f8ec434f2ffd9ddcefc25a10848393fe2976e2be2c8c773cf5fef42"}, - {file = "bitarray-3.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a3c36b2fcfebe15ad1c10a90c1d52a42bebe960adcbce340fef867203028fbe7"}, - {file = "bitarray-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66a33a537e781eac3a352397ce6b07eedf3a8380ef4a804f8844f3f45e335544"}, - {file = "bitarray-3.0.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa54c7e1da8cf4be0aab941ea284ec64033ede5d6de3fd47d75e77cafe986e9d"}, - {file = "bitarray-3.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a667ea05ba1ea81b722682276dbef1d36990f8908cf51e570099fd505a89f931"}, - {file = "bitarray-3.0.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:d756bfeb62ca4fe65d2af7a39249d442c05070c047d03729ad6cd4c2e9b0f0bd"}, - {file = "bitarray-3.0.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c9e9fef0754867d88e948ce8351c9fd7e507d8514e0f242fd67c907b9cdf98b3"}, - {file = "bitarray-3.0.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:67a0b56dd02f2713f6f52cacb3f251afd67c94c5f0748026d307d87a81a8e15c"}, - {file = "bitarray-3.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d8c36ddc1923bcc4c11b9994c54eaae25034812a42400b7b8a86fe6d242166a2"}, - {file = "bitarray-3.0.0-cp39-cp39-win32.whl", hash = "sha256:1414a7102a3c4986f241480544f5c99f5d32258fb9b85c9c04e84e48c490ab35"}, - {file = "bitarray-3.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:8c9733d2ff9b7838ac04bf1048baea153174753e6a47312be14c83c6a395424b"}, - {file = "bitarray-3.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:fef4e3b3f2084b4dae3e5316b44cda72587dcc81f68b4eb2dbda1b8d15261b61"}, - {file = "bitarray-3.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7e9eee03f187cef1e54a4545124109ee0afc84398628b4b32ebb4852b4a66393"}, - {file = "bitarray-3.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cb5702dd667f4bb10fed056ffdc4ddaae8193a52cd74cb2cdb54e71f4ef2dd1"}, - {file = "bitarray-3.0.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:666e44b0458bb2894b64264a29f2cc7b5b2cbcc4c5e9cedfe1fdbde37a8e329a"}, - {file = "bitarray-3.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c756a92cf1c1abf01e56a4cc40cb89f0ff9147f2a0be5b557ec436a23ff464d8"}, - {file = "bitarray-3.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7e51e7f8289bf6bb631e1ef2a8f5e9ca287985ff518fe666abbdfdb6a848cb26"}, - {file = "bitarray-3.0.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3fa5d8e4b28388b337face6ce4029be73585651a44866901513df44be9a491ab"}, - {file = "bitarray-3.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3963b80a68aedcd722a9978d261ae53cb9bb6a8129cc29790f0f10ce5aca287a"}, - {file = "bitarray-3.0.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b555006a7dea53f6bebc616a4d0249cecbf8f1fadf77860120a2e5dbdc2f167"}, - {file = "bitarray-3.0.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:4ac2027ca650a7302864ed2528220d6cc6921501b383e9917afc7a2424a1e36d"}, - {file = "bitarray-3.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bf90aba4cff9e72e24ecdefe33bad608f147a23fa5c97790a5bab0e72fe62b6d"}, - {file = "bitarray-3.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1a199e6d7c3bad5ba9d0e4dc00dde70ee7d111c9dfc521247fa646ef59fa57e"}, - {file = "bitarray-3.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43b6c7c4f4a7b80e86e24a76f4c6b9b67d03229ea16d7d403520616535c32196"}, - {file = "bitarray-3.0.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34fc13da3518f14825b239374734fce93c1a9299ed7b558c3ec1d659ec7e4c70"}, - {file = "bitarray-3.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:369b6d457af94af901d632c7e625ca6caf0a7484110fc91c6290ce26bc4f1478"}, - {file = "bitarray-3.0.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ee040ad3b7dfa05e459713099f16373c1f2a6f68b43cb0575a66718e7a5daef4"}, - {file = "bitarray-3.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dad7ba2af80f9ec1dd988c3aca7992408ec0d0b4c215b65d353d95ab0070b10"}, - {file = "bitarray-3.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4839d3b64af51e4b8bb4a602563b98b9faeb34fd6c00ed23d7834e40a9d080fc"}, - {file = "bitarray-3.0.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f71f24b58e75a889b9915e3197865302467f13e7390efdea5b6afc7424b3a2ea"}, - {file = "bitarray-3.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:bcf0150ae0bcc4aa97bdfcb231b37bad1a59083c1b5012643b266012bf420e68"}, - {file = "bitarray-3.0.0.tar.gz", hash = "sha256:a2083dc20f0d828a7cdf7a16b20dae56aab0f43dc4f347a3b3039f6577992b03"}, + {file = "bitarray-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3c1fad6456993f604726dcb21d1f003430988d5138f858d79e5b8682f56dfad6"}, + {file = "bitarray-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0dc0f28340929fffa17fbd3eca5bfae5c1827f3000c0bd9312999d8c5b1464a0"}, + {file = "bitarray-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84de57891a80944e259956fd72bd542290b76c063182f0e85ce5380c49cfcfb6"}, + {file = "bitarray-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b3f4be402e7c611a0a7048f3d042b9e4d697ca6f721d08098118d1ae8bb8a69c"}, + {file = "bitarray-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eabaee3c7d411f6ed48f92752e61a409530a0ebf65498bd408432800a804e0b8"}, + {file = "bitarray-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cb6c270c49d6779af97586a955977493146626ae34710a4c1ec84cb0115f4d4"}, + {file = "bitarray-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:481a00d710c7811b38563f6fe22da20be2e6f722196f68873e12d23f4fdf82c1"}, + {file = "bitarray-3.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b81bbefe9d205af66a15a3097f8b66f29fe767fa9cb5dd5a7881f53d2505c2d5"}, + {file = "bitarray-3.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:506df7b12e1380f56cfa20b9c203518afe585f0b86e850a96e3691cf0175e4c0"}, + {file = "bitarray-3.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:43ec740770723de6c99f0a858807ef905664f31cf254e0b803f3dd2797537d7a"}, + {file = "bitarray-3.1.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:4deac3da54c3df136d6615a2410d5a6170353b4d08142273266296c376fb1889"}, + {file = "bitarray-3.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c58cbd0cc9f6347fffa36a6870fac695cd6e98c39fcfd8002d44a70fba03cd8c"}, + {file = "bitarray-3.1.0-cp310-cp310-win32.whl", hash = "sha256:e092a5c4cc9ae6bc757bc1fee8894cd0275f0a0689c8f57555b00e198cdbcecc"}, + {file = "bitarray-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:453395201790f16b22092c25bed6ecf3bafee7f36db840d4cfc07cd9b6f9628f"}, + {file = "bitarray-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a1cb97bdc51e3db8def039607ca42d12c6e55fb134a8d211cca2fcf0a7b3a986"}, + {file = "bitarray-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:92d8130efa6bebd4903b70f6d98832aabbd78c1031bc64c7c9d4e39b9db3bfa6"}, + {file = "bitarray-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37a9559ab050618902f8b8055c5b0b78dfc2a7974656fddee26b033ec56945f7"}, + {file = "bitarray-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2c57a4e8f0d9eb5cef7c83c8a4fd8bf06c35d641fc3cb24bf9511adb54e2d8a"}, + {file = "bitarray-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d9bcaa1764d0d302050a31b0f6879ea20e3900b2e73b4ea647ab570950c2062b"}, + {file = "bitarray-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:769184a57abfdfc24c37bb5d0f3836ffe65c7c5cdbeec431766520cd246a27c6"}, + {file = "bitarray-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f247387bb58b8c626088be2fa4ca5869fb12638b45acd29fab21aee48529560"}, + {file = "bitarray-3.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1303604e4069699cef3e969f31d6c3e8e5a0bc637569d90b3647469eff91dc7b"}, + {file = "bitarray-3.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f238c2eb7a0a6a50537e5d182edb629901af94746854108483f0b11b0e3d1a78"}, + {file = "bitarray-3.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e43a96b9b63348d9e5cca70fc31b5d0f07a420a70ef89b376aa4148b87d24b39"}, + {file = "bitarray-3.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5255fb9e4e4879adea5fb61cbde347767d346e29822a51ca21816f28340f4cdb"}, + {file = "bitarray-3.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:68bab51a827abc6f037ea76bb374ec28804a47b51716f9eb3f7da3d754bc8431"}, + {file = "bitarray-3.1.0-cp311-cp311-win32.whl", hash = "sha256:7ff061c714ff62357ff3be23d96c73944ad9181b8b21d49c29ed82fb2fb0efa4"}, + {file = "bitarray-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:b2f8aed62de760be852e0dee0ad507e0fe17fc12f83b36cc0bfe4900e0a0e915"}, + {file = "bitarray-3.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4c298fefda9dfe2ef24aee8fe48acc3a1a063b266791a814330c6932248785c4"}, + {file = "bitarray-3.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9211fb3a109ba76cb8bd0f76645d256f8a46e733693de22d0d7857941603cee"}, + {file = "bitarray-3.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b6bfe0a618301f0afed9004b8230a413ad399b58fef50a744a623461b96d8e3"}, + {file = "bitarray-3.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2db21d733e06d0d5b86fd3c848a84b8930e71250542aa7602e2c048b96fce163"}, + {file = "bitarray-3.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3807c38b37a500583eafd4ae866d2a051834acd4e11cd8557af9cf5379fa7e21"}, + {file = "bitarray-3.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcd15f43e846bfdaac8e5420006eba2d6adce29b6336492cf9d651e4a313ea86"}, + {file = "bitarray-3.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc2266e15f02d3192d1c2d29a790a4eccb230e23de0de6136815b32f9cdd5b4d"}, + {file = "bitarray-3.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:54a6a8525bc66228ed9cc69d562979319ef151dba2ed302bf7a21c7e124c137b"}, + {file = "bitarray-3.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:ffb6b3a6efdabe6a4f3042b16a68499711a7835e950847a5643f60a43de3335f"}, + {file = "bitarray-3.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:430302c3ca7ed0dbb629db87e24a3be082851d71777787d4c8ad424624363780"}, + {file = "bitarray-3.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:d4277999453fa63e96ed8a31698c9ac64ee5f38bd5c63205d69d9b73780fa152"}, + {file = "bitarray-3.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f8fab5053ff281954718dc56c44ee7d1462ccc7e296b84da504b72b01c6c0c41"}, + {file = "bitarray-3.1.0-cp312-cp312-win32.whl", hash = "sha256:598072dbe456cc57270c6f34fa6b9aafd56bb5291e0d14f8509c1522f6d77f32"}, + {file = "bitarray-3.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:4fed8491a77f9fa5303bf6a24a76fcafbf1b496bb22d72b213b673ef8ce6e201"}, + {file = "bitarray-3.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ea4a888e45fdabc89705837b10030a990ec4879d14fccc3b9450ab56ca9d7ec"}, + {file = "bitarray-3.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2695c23b9857105631e9d4f8ebbcc8a4e55c6c9f31bc80d9e5e018e377d650d8"}, + {file = "bitarray-3.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:577a1ffc6318cdcbf91e8492596004bb1572ce3c703aa42c16c8ac1188625b8d"}, + {file = "bitarray-3.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1221d5d1255a74397f938ff822d8df4b0cc416b74e5659d21c3fe5f3d06590a1"}, + {file = "bitarray-3.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:91e9309912700ab085cf78d162ad0bf0d3de35e5e084868400672b4ce3befbb0"}, + {file = "bitarray-3.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8c319f77118bac6aded58a0d2f172ac1b06e00bfa5391033848864e39bfd771"}, + {file = "bitarray-3.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:08004c22af11a234a73a8edf744524fb67e89ab4c80791f647c522687ddcb4b9"}, + {file = "bitarray-3.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:afb4d8876fff11cced4377f055498fd3c179e6e2bf7165fd055dabc494005252"}, + {file = "bitarray-3.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7c228c78bd20f2a46a2f3f2e349292c604ae26210dad56afe4b64ea65f72db75"}, + {file = "bitarray-3.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:139115998ffcffd8b883e1f21ced6f1d96947c5ace51f9656ceccedea5765557"}, + {file = "bitarray-3.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:7e3ade382c9a5d5d77635041dd2aaa4e59326c8f62e1fb33ce28ddaeaa6ae7b6"}, + {file = "bitarray-3.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5efdb6403d206068d434caf9deb7ab936732c06e5617a63e213c94ac750303f4"}, + {file = "bitarray-3.1.0-cp313-cp313-win32.whl", hash = "sha256:69721fef0427a7f56ed7eef16ff9ddec96c64cee4d7abb5e7511c9aedebafc0f"}, + {file = "bitarray-3.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:34d75765b5c558e7bffa247493412831c6edb5c40e639a6ac94999bc6f40a6c8"}, + {file = "bitarray-3.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6b85d01d8cf30937d9f55d2029a0f7575e9b559d606e6fff17bb79c2830e0afb"}, + {file = "bitarray-3.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:acdac94fa2853e74503248b4079a30f3c09ab993f1b2a393fd88071824bdb0de"}, + {file = "bitarray-3.1.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f851a69521240418fc1ab6b313ef5ef6533e451a46184077b1f5e410441c7d9a"}, + {file = "bitarray-3.1.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf6be47880210416d3cd2405fd9a5f28b8fd1896b77acf729024f64b4ecd2676"}, + {file = "bitarray-3.1.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf4d0759d431953324de2ab251b954123bd4dfa83c990370814a88306f1ffc53"}, + {file = "bitarray-3.1.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0e1e3836468b62c079a69dbbfcdb8f0abb8dc8540f5cfca48147a8178cc95ea"}, + {file = "bitarray-3.1.0-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:0b380164b3b2ff5aa833ee2cf00855645308f14694546ca02beca6b9069e3c75"}, + {file = "bitarray-3.1.0-cp36-cp36m-musllinux_1_2_i686.whl", hash = "sha256:de8ce30fb9e9d2fa063abf8f9443d7bdf77b5de52a5cffbfd007f6150e6bee96"}, + {file = "bitarray-3.1.0-cp36-cp36m-musllinux_1_2_ppc64le.whl", hash = "sha256:72b0f7cbc5c0bf008fde06b7777474a942cc0aa9c743dafc583a1dc430a07122"}, + {file = "bitarray-3.1.0-cp36-cp36m-musllinux_1_2_s390x.whl", hash = "sha256:ccd384e435d9c540c7e54c9149abbf462691e8a6c9287448d44da9f22230e09b"}, + {file = "bitarray-3.1.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:37d8da058472b2fedd27903781d5ea27264cde11dcb81a3237724427de83f241"}, + {file = "bitarray-3.1.0-cp36-cp36m-win32.whl", hash = "sha256:f78e2a4fd88b6fa383f44ca866cf6b5e1c0370de7ace7a0b4af7b3f11d806558"}, + {file = "bitarray-3.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:a394cfcd6d9f7fac71429df8e8c4b93b1f0db3ef7a06eb84d9f69d901b862ee4"}, + {file = "bitarray-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:756da43ac44b1cbf576a02b3e9710fa24f14823b38df95868ea8a3fea840c725"}, + {file = "bitarray-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:638026c90fb135f0014a47bc5e9d5ed8acff8bbc3283f05e811e9f84a1f32a0e"}, + {file = "bitarray-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4f90b57ea9aa9c6159c4a9110e8f787194583ede08b6ff5a6ea36c17b8973b75"}, + {file = "bitarray-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:93cb9b8e2cda9006d489bfa8e7f4fe5680860b6ff45fa9081a6554a9e661fa97"}, + {file = "bitarray-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2225ce0383a2e6950865f178513d8939c89782e909cbcfe4dd7c4f572b1e5a1"}, + {file = "bitarray-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bf22a95774a3d04200605bef8f436867df3ec87a13033fc482cce7ce17f62644"}, + {file = "bitarray-3.1.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:226297bb0a9f6636c3b74cfa39a1feec7fbd26c6b59fd9d5b78c4066a87aa571"}, + {file = "bitarray-3.1.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f4c28344774fadb554489f3a8c7a9c75237a85996a8a6d5d91ffa781bdd7b1ff"}, + {file = "bitarray-3.1.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:ce05239a8422ca3ebb1f00cf3577a1d255f07f7dd4c2ab20e8e009da393b86f3"}, + {file = "bitarray-3.1.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:9ce0f80904644ddbdad42eb1eb3945c19bc274b8b0e0844c7f7b9effafd279d0"}, + {file = "bitarray-3.1.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:c467bf00939b162144862d167dd55a651e44c256805f25e44edbd547fc0b0922"}, + {file = "bitarray-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:90b714bc59a9dd11d1bba0d68dd395a3e51f275c8547e9b68af561763a61196e"}, + {file = "bitarray-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4cbbe82edbe2f93f2598042d208caa87969025537ddce34cf647e4da6c4f6af4"}, + {file = "bitarray-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:34dd123c69a7f8520c20b45351df1d169429b9e636425c3c8278fdde80151fe6"}, + {file = "bitarray-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:03ae745110d4e343d78f5c3836058d510222b17469b1eb64362cdba7cbdfa26a"}, + {file = "bitarray-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfef13ba01363e185f3c7ffd8dc84c109a2be7da4d035b289c8f8a94b02c8183"}, + {file = "bitarray-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:798a71af5e1cded785dabc2f03bf65cd021a7e0ea54d8874a49cbb64669694e2"}, + {file = "bitarray-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d5eb9d2389c76469c4f10d636a79bcd13142d863a5d07a962738ae55fa60dfa"}, + {file = "bitarray-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a0a9c6a9fe679cd48204c719329de0ce72dede6f8e862744adcaf85b1d0c4c6"}, + {file = "bitarray-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a97cb1a6cf3538ecda5ce537111f246f468919ca38ff6cfe188f99ee3353c8f2"}, + {file = "bitarray-3.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6d49b0748e9aa0b3bcec4074675405d6a1b19288fb0e8ad8bc6db95c66840747"}, + {file = "bitarray-3.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:b57959f7538b93cb058aa8c467cfd8b82c0af88a2168a4f382ff77e3a90274ee"}, + {file = "bitarray-3.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:752e2de8b0094d56a6e98516609f4d2b9a2027d1a9038de3f021a7d1d13d0599"}, + {file = "bitarray-3.1.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:5b90b46cc73fc2db871985ae93132c7945f264aea6000348a124ba84b48af980"}, + {file = "bitarray-3.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:491d5c5b34b00c971be98b36736e090c847b0e2904a1b097a32bb02226fb11ba"}, + {file = "bitarray-3.1.0-cp38-cp38-win32.whl", hash = "sha256:515d954219d9b81b6442ab270cb8142100f32eac06f00f0c588da6c1416c9a1f"}, + {file = "bitarray-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:40c3f234b515aa8cfce0adc1df2b9c3b45e4307f8b756c9f3069cbc3effb5acb"}, + {file = "bitarray-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6fdd73f8f563b1f0d106c09949bd90b1104464b5cd148c02b7dae4e3efdb1dcd"}, + {file = "bitarray-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0c609504ce985754867a91db78fb1d5110df589447233badb5a1d453fb2e1714"}, + {file = "bitarray-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff398a6850da105e5cf13e4297f1bd181d90c16a4db9ce20e0608e1edf9c81b0"}, + {file = "bitarray-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39f32b9c1b03f2c2cec002513ef91a9818cf85e8da94069e430d71256c0b858c"}, + {file = "bitarray-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9d6e940dfefe604ecbd77849b107319c73f51dfb24ae18c80710dad9bdb59244"}, + {file = "bitarray-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19c3a7f80b286bd4df0fcbddbe97fd8b48028c0497678ee1f0bab820978295bd"}, + {file = "bitarray-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47b6cbbecd24d3b943cef3e704dbf9b510dec310251e1c279d10105cb40a33d8"}, + {file = "bitarray-3.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ab0063f8264cae001fc24238bf8b90a424be25b952951b749b647fd615011428"}, + {file = "bitarray-3.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:86544ffdf8d0cb7271acb5c3a560f4c7850f4c11057cc9c9acdadbea125aa68a"}, + {file = "bitarray-3.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:56ce6219922715fa8f9c65a40a504d4a5e64504ed7c4f3fac64071117559ef33"}, + {file = "bitarray-3.1.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:56fac415c19689b327f22ca6c02ffa908009f21aab931230d292d5239de40094"}, + {file = "bitarray-3.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e47bee13f43dfc5c5c3661e70b350a5dd37f62f25e196c9a20f504ca02774ac6"}, + {file = "bitarray-3.1.0-cp39-cp39-win32.whl", hash = "sha256:df4ea0d3035298716550b20396d831bc5871efbba7f9cc8e84eabd0906c0163e"}, + {file = "bitarray-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:7c96ecf342bf6093523477cf1eacd958df5206564aef347d3d2d8d4541a1c6a0"}, + {file = "bitarray-3.1.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:c0790bbbe08b141d6a93061e68d1954c86374ca4982e5586becdf7dc0d61e95c"}, + {file = "bitarray-3.1.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:dde6268797d6e3f0639b1a5048eee5e42aa63fd00c4477741f9a5b1720f1aa8f"}, + {file = "bitarray-3.1.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea7791224248fe8ad6f5533877f94850b5636cf957f245a2f4854da21d0be766"}, + {file = "bitarray-3.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea7319e68ad1cb081dd7d1b2fe1242c12fe8f9ee3c78812db010b18e9753f7bb"}, + {file = "bitarray-3.1.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3aab1395cd4a23b1815f87120e59e1a66a5f3e92cbe42e4653b9fae1320e0eeb"}, + {file = "bitarray-3.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:729ede2a88c0b379080606f2dd6ff4b4fc1798683f1fb244d79e929b298784c4"}, + {file = "bitarray-3.1.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fe0998dfa82776c17c262b20c2ff735f310aed4c85dcc2614ed389815a6bb4ef"}, + {file = "bitarray-3.1.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19f7b08cabfeed0190fbfcf7aad2cd71bb6934f319bd9f195b138616ebc56edf"}, + {file = "bitarray-3.1.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ba0aede899cd78952977db93cf24f5845163efbe85e4220de95298976d3c54b"}, + {file = "bitarray-3.1.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6370b01839750fcb3aea8639344fb3898f73dac42abe344bc5e39669b93fe110"}, + {file = "bitarray-3.1.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:37901e3b417ae482858c29be7b963ef1e6cf0ac16f03f4852295aee1aa77ac18"}, + {file = "bitarray-3.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7eb06912a72f5728864808a68abc1e247bc24c9983a1f3e400f9954245715fac"}, + {file = "bitarray-3.1.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b9fe9c40c13c0c796bee8d6416185c59f270eb9d22dfb7f6b1cadd0d8be3b4c8"}, + {file = "bitarray-3.1.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44b93406e302f1077c9b4c4b990a40a7afbebab66e4579dce2b06d7f55d8e7c7"}, + {file = "bitarray-3.1.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:984803aa15421f3bade7c05fd44ab6acdcebe16b646f3201f5ccf1a111ba7661"}, + {file = "bitarray-3.1.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d39a36b6b95889a4cbc1d3083053cebe3315c13ff9a8760c14a3fe9cd8a102b1"}, + {file = "bitarray-3.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:41282599c51e26bc491a3d1471a785a0158c529a2e99757a353a708a9df66655"}, + {file = "bitarray-3.1.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:4653e4cab6001a97c6689a4bfe3517128a74656eaa5df22f6ce01fd9b264db30"}, + {file = "bitarray-3.1.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:8d6ef7906263c12a27361ec06e89180a01f8ac57ffa0e1cdf7953948505fea4e"}, + {file = "bitarray-3.1.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:868e0a4fc33333b9cfaa9a0c6b847ce64e7d2064162c2183608244a649487d0a"}, + {file = "bitarray-3.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c64123ae24065478a17d2c6ed08e7e9ea6131be054326a01c9b13f5e36ac1a95"}, + {file = "bitarray-3.1.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6dcceb74c288777e86dbf2497e0bfc444aecf1e710e8a030ea4dba13600ac066"}, + {file = "bitarray-3.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:244897669375d345fe67cd5c0701ec5ea81991a9193fa8cfa4bcb55277375eff"}, + {file = "bitarray-3.1.0.tar.gz", hash = "sha256:71757171a45eac58782861c49137ba3bed0da489155311857f69f4e9baf81fa4"}, ] [[package]] @@ -354,6 +553,7 @@ version = "21.12b0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.6.2" +groups = ["dev"] files = [ {file = "black-21.12b0-py3-none-any.whl", hash = "sha256:a615e69ae185e08fdd73e4715e260e2479c861b5740057fde6e8b4e3b7dd589f"}, {file = "black-21.12b0.tar.gz", hash = "sha256:77b80f693a569e2e527958459634f18df9b0ba2625ba4e0c2d5da5be42e6f2b3"}, @@ -366,7 +566,7 @@ pathspec = ">=0.9.0,<1" platformdirs = ">=2" tomli = ">=0.2.6,<2.0.0" typing-extensions = [ - {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}, + {version = ">=3.10.0.0"}, {version = ">=3.10.0.0,<3.10.0.1 || >3.10.0.1", markers = "python_version >= \"3.10\""}, ] @@ -383,31 +583,24 @@ version = "1.8.2" description = "Fast, simple object-to-object and broadcast signaling" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version < \"3.10\"" files = [ {file = "blinker-1.8.2-py3-none-any.whl", hash = "sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01"}, {file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"}, ] [[package]] -name = "cached-property" -version = "2.0.1" -description = "A decorator for caching properties in classes." -optional = false -python-versions = ">=3.8" -files = [ - {file = "cached_property-2.0.1-py3-none-any.whl", hash = "sha256:f617d70ab1100b7bcf6e42228f9ddcb78c676ffa167278d9f730d1c2fba69ccb"}, - {file = "cached_property-2.0.1.tar.gz", hash = "sha256:484d617105e3ee0e4f1f58725e72a8ef9e93deee462222dbd51cd91230897641"}, -] - -[[package]] -name = "cachelib" -version = "0.9.0" -description = "A collection of cache libraries in the same API interface." +name = "blinker" +version = "1.9.0" +description = "Fast, simple object-to-object and broadcast signaling" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version >= \"3.10\"" files = [ - {file = "cachelib-0.9.0-py3-none-any.whl", hash = "sha256:811ceeb1209d2fe51cd2b62810bd1eccf70feba5c52641532498be5c675493b3"}, - {file = "cachelib-0.9.0.tar.gz", hash = "sha256:38222cc7c1b79a23606de5c2607f4925779e37cdcea1c2ad21b8bae94b5425a5"}, + {file = "blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc"}, + {file = "blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf"}, ] [[package]] @@ -416,6 +609,7 @@ version = "2025.1.31" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, @@ -427,6 +621,7 @@ version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, @@ -524,96 +719,106 @@ files = [ [[package]] name = "ckzg" -version = "1.0.2" +version = "2.0.1" description = "Python bindings for C-KZG-4844" optional = false python-versions = "*" +groups = ["main"] files = [ - {file = "ckzg-1.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bdd082bc0f2a595e3546658ecbe1ff78fe65b0ab7e619a8197a62d94f46b5b46"}, - {file = "ckzg-1.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:50ca4af4e2f1a1e8b0a7e97b3aef39dedbb0d52d90866ece424f13f8df1b5972"}, - {file = "ckzg-1.0.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e9dc671b0a307ea65d0a216ca496c272dd3c1ed890ddc2a306da49b0d8ffc83"}, - {file = "ckzg-1.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d95e97a0d0f7758119bb905fb5688222b1556de465035614883c42fe4a047d1f"}, - {file = "ckzg-1.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27261672154cbd477d84d289845b0022fbdbe2ba45b7a2a2051c345fa04c8334"}, - {file = "ckzg-1.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c16d5ee1ddbbbad0367ff970b3ec9f6d1879e9f928023beda59ae9e16ad99e4c"}, - {file = "ckzg-1.0.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:09043738b029bdf4fdc82041b395cfc6f5b5cf63435e5d4d685d24fd14c834d3"}, - {file = "ckzg-1.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3c0afa232d2312e3101aaddb6971b486b0038a0f9171500bc23143f5749eff55"}, - {file = "ckzg-1.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:96e8281b6d58cf91b9559e1bd38132161d63467500838753364c68e825df2e2c"}, - {file = "ckzg-1.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b874167de1d6de72890a2ad5bd9aa7adbddc41c3409923b59cf4ef27f83f79da"}, - {file = "ckzg-1.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3d2ccd68b0743e20e853e31a08da490a8d38c7f12b9a0c4ee63ef5afa0dc2427"}, - {file = "ckzg-1.0.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e8d534ddbe785c44cf1cd62ee32d78b4310d66dd70e42851f5468af655b81f5"}, - {file = "ckzg-1.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c732cda00c76b326f39ae97edfc6773dd231b7c77288b38282584a7aee77c3a7"}, - {file = "ckzg-1.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abc5a27284db479ead4c053ff086d6e222914f1b0aa08b80eabfa116dbed4f7a"}, - {file = "ckzg-1.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e6bd5006cb3e802744309450183087a6594d50554814eee19065f7064dff7b05"}, - {file = "ckzg-1.0.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3594470134eda7adf2813ad3f1da55ced98c8a393262f47ce3890c5afa05b23e"}, - {file = "ckzg-1.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fea56f39e48b60c1ff6f751c47489e353d1bd95cae65c429cf5f87735d794431"}, - {file = "ckzg-1.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:f769eb2e1056ca396462460079f6849c778f58884bb24b638ff7028dd2120b65"}, - {file = "ckzg-1.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e3cb2f8c767aee57e88944f90848e8689ce43993b9ff21589cfb97a562208fe7"}, - {file = "ckzg-1.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5b29889f5bc5db530f766871c0ff4133e7270ecf63aaa3ca756d3b2731980802"}, - {file = "ckzg-1.0.2-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfcc70fb76b3d36125d646110d5001f2aa89c1c09ff5537a4550cdb7951f44d4"}, - {file = "ckzg-1.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ca8a256cdd56d06bc5ef24caac64845240dbabca402c5a1966d519b2514b4ec"}, - {file = "ckzg-1.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ea91b0236384f93ad1df01d530672f09e254bd8c3cf097ebf486aebb97f6c8c"}, - {file = "ckzg-1.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:65311e72780105f239d1d66512629a9f468b7c9f2609b8567fc68963ac638ef9"}, - {file = "ckzg-1.0.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0d7600ce7a73ac41d348712d0c1fe5e4cb6caa329377064cfa3a6fd8fbffb410"}, - {file = "ckzg-1.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:19893ee7bd7da8688382cb134cb9ee7bce5c38e3a9386e3ed99bb010487d2d17"}, - {file = "ckzg-1.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:c3e1a9a72695e777497e95bb2213316a1138f82d1bb5d67b9c029a522d24908e"}, - {file = "ckzg-1.0.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a2f59da9cb82b6a4be615f2561a255731eededa7ecd6ba4b2f2dedfc918ef137"}, - {file = "ckzg-1.0.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c915e1f2ef51657c3255d8b1e2aea6e0b93348ae316b2b79eaadfb17ad8f514e"}, - {file = "ckzg-1.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bcc0d2031fcabc4be37e9e602c926ef9347238d2f58c1b07e0c147f60b9e760b"}, - {file = "ckzg-1.0.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cdaad2745425d7708e76e8e56a52fdaf5c5cc1cfefd5129d24ff8dbe06a012d"}, - {file = "ckzg-1.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:1ec775649daade1b93041aac9c1660c2ad9828b57ccd2eeb5a3074d8f05e544a"}, - {file = "ckzg-1.0.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:02f9cc3e38b3702ec5895a1ebf927fd02b8f5c2f93c7cb9e438581b5b74472c8"}, - {file = "ckzg-1.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:0e816af31951b5e94e6bc069f21fe783427c190526e0437e16c4488a34ddcacc"}, - {file = "ckzg-1.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:651ba33ee2d7fefff14ca519a72996b733402f8b043fbfef12d5fe2a442d86d8"}, - {file = "ckzg-1.0.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:489763ad92e2175fb6ab455411f03ec104c630470d483e11578bf2e00608f283"}, - {file = "ckzg-1.0.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:69e1376284e9a5094d7c4d3e552202d6b32a67c5acc461b0b35718d8ec5c7363"}, - {file = "ckzg-1.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb9d0b09ca1bdb5955b626d6645f811424ae0fcab47699a1a938a3ce0438c25f"}, - {file = "ckzg-1.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d87a121ace8feb6c9386f247e7e36ef55e584fc8a6b1bc2c60757a59c1efe364"}, - {file = "ckzg-1.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:97c27153fab853f017fed159333b27beeb2e0da834c92c9ecdc26d0e5c3983b3"}, - {file = "ckzg-1.0.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b26799907257c39471cb3665f66f7630797140131606085c2c94a7094ab6ddf2"}, - {file = "ckzg-1.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:283a40c625222560fda3dcb912b666f7d50f9502587b73c4358979f519f1c961"}, - {file = "ckzg-1.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:5f029822d27c52b9c3dbe5706408b099da779f10929be0422a09a34aa026a872"}, - {file = "ckzg-1.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:edaea8fb50b01c6c19768d9305ad365639a8cd804754277d5108dcae4808f00b"}, - {file = "ckzg-1.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:27be65c88d5d773a30e6f198719cefede7e25cad807384c3d65a09c11616fc9d"}, - {file = "ckzg-1.0.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9ac729c5c6f3d2c030c0bc8c9e10edc253e36f002cfe227292035009965d349"}, - {file = "ckzg-1.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1528bc2b95aac6d184a90b023602c40d7b11b577235848c1b5593c00cf51d37"}, - {file = "ckzg-1.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:071dc7fc179316ce1bfabaa056156e4e84f312c4560ab7b9529a3b9a84019df3"}, - {file = "ckzg-1.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:895044069de7010be6c7ee703f03fd7548267a0823cf60b9dd26ec50267dd9e8"}, - {file = "ckzg-1.0.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ed8c99cd3d9af596470e0481fd58931007288951719bad026f0dd486dd0ec11"}, - {file = "ckzg-1.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:74d87eafe561d4bfb544a4f3419d26c56ad7de00f39789ef0fdb09515544d12e"}, - {file = "ckzg-1.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:54d71e5ca416bd51c543f9f51e426e6792f8a0280b83aef92faad1b826f401ea"}, - {file = "ckzg-1.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:da2d9988781a09a4577ee7ea8f51fe4a94b4422789a523164f5ba3118566ad41"}, - {file = "ckzg-1.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d9e030af7d6acdcb356fddfb095048bc8e880fe4cd70ff2206c64f33bf384a0d"}, - {file = "ckzg-1.0.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:145ae31c3d499d1950567bd636dc5b24292b600296b9deb5523bc20d8f7b51c3"}, - {file = "ckzg-1.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d81e68e84d80084da298471ad5eaddfcc1cf73545cb24e9453550c8186870982"}, - {file = "ckzg-1.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c67064bbbeba1a6892c9c80b3d0c2a540ff48a5ca5356fdb2a8d998b264e43e6"}, - {file = "ckzg-1.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:99694917eb6decefc0d330d9887a89ea770824b2fa76eb830bab5fe57ea5c20c"}, - {file = "ckzg-1.0.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:fca227ce0ce3427254a113fdb3aed5ecd99c1fc670cb0c60cc8a2154793678e4"}, - {file = "ckzg-1.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a66a690d3d1801085d11de6825df47a99b465ff32dbe90be4a3c9f43c577da96"}, - {file = "ckzg-1.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:272adfe471380d10e4a0e1639d877e504555079a60233dd82249c799b15be81e"}, - {file = "ckzg-1.0.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f37be0054ebb4b8ac6e6d5267290b239b09e7ddc611776051b4c3c4032d161ba"}, - {file = "ckzg-1.0.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:611c03a170f0f746180eeb0cc28cdc6f954561b8eb9013605a046de86520ee6b"}, - {file = "ckzg-1.0.2-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:75b2f0ab341f3c33702ce64e1c101116c7462a25686d0b1a0193ca654ad4f96e"}, - {file = "ckzg-1.0.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab29fc61fbd32096b82b02e6b18ae0d7423048d3540b7b90805b16ae10bdb769"}, - {file = "ckzg-1.0.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e43741e7453262aa3ba1754623d7864250b33751bd850dd548e3ed6bd1911093"}, - {file = "ckzg-1.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:155eacc237cb28c9eafda1c47a89e6e4550f1c2e711f2eee21e0bb2f4df75546"}, - {file = "ckzg-1.0.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d31d7fbe396a51f43375e38c31bc3a96c7996882582f95f3fcfd54acfa7b3ce6"}, - {file = "ckzg-1.0.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9d3d049186c9966e9140de39a9979d7adcfe22f8b02d2852c94d3c363235cc18"}, - {file = "ckzg-1.0.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88728fbd410d61bd5d655ac50b842714c38bc34ff717f73592132d28911fc88e"}, - {file = "ckzg-1.0.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:052d302058d72431acc9dd4a9c76854c8dfce10c698deef5252884e32a1ac7bf"}, - {file = "ckzg-1.0.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:633110a9431231664be2ad32baf10971547f18289d33967654581b9ae9c94a7e"}, - {file = "ckzg-1.0.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f439c9e5297ae29a700f6d55de1525e2e295dbbb7366f0974c8702fca9e536b9"}, - {file = "ckzg-1.0.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:94f7eb080c00c0ccbd4fafad69f0b35b624a6a229a28e11d365b60b58a072832"}, - {file = "ckzg-1.0.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f876783ec654b7b9525503c2a0a1b086e5d4f52ff65cac7e8747769b0c2e5468"}, - {file = "ckzg-1.0.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7e039800e50592580171830e788ef4a1d6bb54300d074ae9f9119e92aefc568"}, - {file = "ckzg-1.0.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13a8cccf0070a29bc01493179db2e61220ee1a6cb17f8ea41c68a2f043ace87f"}, - {file = "ckzg-1.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4f86cef801d7b0838e17b6ee2f2c9e747447d91ad1220a701baccdf7ef11a3c8"}, - {file = "ckzg-1.0.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2433a89af4158beddebbdd66fae95b34d40f2467bee8dc40df0333de5e616b5f"}, - {file = "ckzg-1.0.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c49d5dc0918ad912777720035f9820bdbb6c7e7d1898e12506d44ab3c938d525"}, - {file = "ckzg-1.0.2-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:331d49bc72430a3f85ea6ecb55a0d0d65f66a21d61af5783b465906a741366d5"}, - {file = "ckzg-1.0.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86627bc33bc63b8de869d7d5bfa9868619a4f3e4e7082103935c52f56c66b5"}, - {file = "ckzg-1.0.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab6a2ba2706b5eaa1ce6bc7c4e72970bf9587e2e0e482e5fb4df1996bccb7a40"}, - {file = "ckzg-1.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8bca5e7c38d913fabc24ad09545f78ba23cfc13e1ac8250644231729ca908549"}, - {file = "ckzg-1.0.2.tar.gz", hash = "sha256:4295acc380f8d42ebea4a4a0a68c424a322bb335a33bad05c72ead8cbb28d118"}, + {file = "ckzg-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b7f9ba6d215f8981c5545f952aac84875bd564a63da02fb22a3d1321662ecdc0"}, + {file = "ckzg-2.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8fdec3ff96399acba9baeef9e1b0b5258c08f73245780e6c69f7b73def5e8d0a"}, + {file = "ckzg-2.0.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1644369af9900a9f109d417d6760693edf134118f3100d0c68f56667de775b80"}, + {file = "ckzg-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0a2146f122d489ac7e67ae0c0743f8d0db1718e6aeed8f05717340594fe07dd"}, + {file = "ckzg-2.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:979841be50f2782b447762db38e9bc927ae251f6ca86c54a26561a52068ee779"}, + {file = "ckzg-2.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4516d86647ee4e8ea9470f4adf68fbebb6dc1bdedff7d9592c2504fe53145908"}, + {file = "ckzg-2.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:91866fc58a29b4829201efd9ffadfac3ffeca6359254a54a360ff6a189c34bf5"}, + {file = "ckzg-2.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ed35508dac059b2c0a7994383bc7a92eaf35d0b9ce790016819e2619e0f4b8a9"}, + {file = "ckzg-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:449c4fe38017351eca362106420eeb2d28d50b7e54aa8668b3af29a8ab780132"}, + {file = "ckzg-2.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:260608a22e2f2cadcd31f4495832d45d6460438c38faba9761b92df885a99d88"}, + {file = "ckzg-2.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e1015f99c50215098751b07d7e459ba9a2790d3692ca81552eed29996128e90d"}, + {file = "ckzg-2.0.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9dd350d97554c161dc5b8c7b32c2dc8e659632c374f60e2669fb3c9b5b294827"}, + {file = "ckzg-2.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eec7724fa8dc4ae95757efe4a87e7b2d4b880cb348c72ce7355fc0c4f64bc298"}, + {file = "ckzg-2.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3fa0f4398fa67fb71f0a2b34a652cc89e6e0e6af1340b0dc771db1a5f3e089c"}, + {file = "ckzg-2.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f865a0297aabeeb638187a46f7df445763360417b9df4dea60560d512c2cda09"}, + {file = "ckzg-2.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b6ec738350771dbf5974fb70cc8bbb20a4df784af770f7e655922adc08a2171"}, + {file = "ckzg-2.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9b4b669fc77edeb16adc182efc32b3737b36f741a2e33a170d40619e8b171a94"}, + {file = "ckzg-2.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:decb97f4a17c7338b2130dcc4b045df4cc0e7785ece872c764b554c7c73a99ff"}, + {file = "ckzg-2.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:285cf3121b8a8c5609c5b706314f68d2ba2784ab02c5bb7487c6ae1714ecb27f"}, + {file = "ckzg-2.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f927bc41c2551b0ef0056a649a7ebed29d9665680a10795f4cee5002c69ddb7"}, + {file = "ckzg-2.0.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fd9fb690c88919f30c9f3ab7cc46a7ecd734d5ff4c9ccea383c119b9b7cc4da"}, + {file = "ckzg-2.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fabc3bd41b306d1c7025d561c3281a007c2aca8ceaf998582dc3894904d9c73e"}, + {file = "ckzg-2.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2eb50c53efdb9c34f762bd0c8006cf79bc92a9daf47aa6b541e496988484124f"}, + {file = "ckzg-2.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7960cc62f959403293fb53a3c2404778369ae7cefc6d7f202e5e00567cf98c4b"}, + {file = "ckzg-2.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d721bcd492294c70eca39da0b0a433c29b6a571dbac2f7084bab06334904af06"}, + {file = "ckzg-2.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dde2391d025b5033ef0eeacf62b11ecfe446aea25682b5f547a907766ad0a8cb"}, + {file = "ckzg-2.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fab8859d9420f6f7df4e094ee3639bc49d18c8dab0df81bee825e2363dd67a09"}, + {file = "ckzg-2.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9747d92883199d4f8f3a3d7018134745fddcf692dfe67115434e4b32609ea785"}, + {file = "ckzg-2.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b2cf58fb9e165da97f0ffe9f4a6efb73992645fac8e0fa223a6cc7ec486a434a"}, + {file = "ckzg-2.0.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d25d006899d76bb8c9d3e8b27981dd6b66a78f9826e33c1bf981af6577a69a19"}, + {file = "ckzg-2.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a04bf0b32f04f5ea5e4b8518e292d3321bc05596fde95f9c3b4f504e5e4bc780"}, + {file = "ckzg-2.0.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d0cf3dccd72376bff10e1833641cc9d642f34f60ca63972626d9dfcfdc8e77f"}, + {file = "ckzg-2.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:770809c7e93087470cc524724419b0f85590edb033c7c73ba94aef70b36ca18b"}, + {file = "ckzg-2.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e31b59b8124148d5e21f7e41b35532d7af98260c44a77c3917958adece84296d"}, + {file = "ckzg-2.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:174f0c356df644d6e349ce03b7284d83dbec859e11ca5d1b1b3bace8b8fbc65d"}, + {file = "ckzg-2.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:30e375cd45142e56b5dbfdec05ce4deb2368d7f7dedfc7408ba37d5639af05ff"}, + {file = "ckzg-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:abdee71958b214730a8341b16bdd413d0fab1b1a2504fbdb7b0ef2aeee9f9d22"}, + {file = "ckzg-2.0.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b4442667058db791325fe231f22e4fc7aaa3495d535d75af5595bc5f4f86036"}, + {file = "ckzg-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c3c9aa9d4477ad52f3561b717e776c1a8a442d9d8b06600c7d8a2857d1ecf05"}, + {file = "ckzg-2.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68e0a9cde35f11e80b4e560d22990f2f29dd200a95d3141acde137cb6c883f9a"}, + {file = "ckzg-2.0.1-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:4508a089e53330866d3360000d76483400eeab5f8057b8e1f3e344ce2cc0097b"}, + {file = "ckzg-2.0.1-cp36-cp36m-musllinux_1_2_i686.whl", hash = "sha256:828cecee16ec576dcf4386beac4eedfd058fd32ee90827f2282e7156a53600be"}, + {file = "ckzg-2.0.1-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:bd437ec1dfb4f5609979328b5f465a74307f45d46d24234868c67d44da96903b"}, + {file = "ckzg-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:70406b10acf68469ac62110047044a6c1a998f5d5fcd6e27cb3ec2d5760d0490"}, + {file = "ckzg-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2f53fba88febac17e82a96eb83dc38ecf4b28abcdd15c0246534c358bd3b26c4"}, + {file = "ckzg-2.0.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be8e0d5015e7755af4ddaab9ae1a4084f72c84b2cbb53628f4366aeed46cc380"}, + {file = "ckzg-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:261414121091042d29f28fc319d7c9a7f950f91f8bf54c010b581ee6a0499473"}, + {file = "ckzg-2.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:524e1e66edd2be2c38b660824aa7b5d4525b41b30ac029d80738a8eee491aeb5"}, + {file = "ckzg-2.0.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:4a12a1d8ef8f475d9f0af9a538e1674057e007806cb1204bb269ea00d9f8c1e5"}, + {file = "ckzg-2.0.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:4cc4bb5f62417a58065deeaf124e178cb1787ef3228e6032600d1e0a2775765b"}, + {file = "ckzg-2.0.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:e7b015f5615bcb82fa0d935481a209fc1dcd9308fb52fb1a7e5400108df67a94"}, + {file = "ckzg-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:0518933ff3b9550f9dd60d833cdb74e8e97cc1cc58f0560b706916606dfd47d0"}, + {file = "ckzg-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1ac0bca0795990076cde1930ecec307379b5303e34367c6e6e8a16bdba5a7ba5"}, + {file = "ckzg-2.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8086d23a41020ede312843bda7ea4ee0c9831265379027904106f99f2f8ed469"}, + {file = "ckzg-2.0.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:31d1b141d41fa51aeac9440c936b812e885aef5719adfbd3a27550d8dc433997"}, + {file = "ckzg-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60a58e4d8cb91bad669ca111b7ccdd05c32de6787fdb571bb599625b043ad75b"}, + {file = "ckzg-2.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:633e143385622d7a43fcb5c4f400ec5ec15df0b1c74ab7d6449a41a7abed24ad"}, + {file = "ckzg-2.0.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:4876313614ea01f9a0039b5ca2c754340ba40aa8405f8756912d90ae55718011"}, + {file = "ckzg-2.0.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:19c86c8102200484074afac06b3946b457ba9915636de187f63854522be2e3bd"}, + {file = "ckzg-2.0.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:564abf27878f129781e1df4d33b1c4e264e5b25f89c1bdf95b7d6256e4bceb6c"}, + {file = "ckzg-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:bc2da29bb970d3f5de04fb60797dbb4490c010ffc683cbc6016349dd6fa60d14"}, + {file = "ckzg-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9c1869671140ae7e698520b678b594ebd26fb59ef476711403541597d7d32c01"}, + {file = "ckzg-2.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1dd2aec2c61e8cc2ec815900f6768c6fe74b8fd29810e79b57c4150c6db32fb6"}, + {file = "ckzg-2.0.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9632ef17285dbdd3fcd9780f599c266da736d9b2897decc4ea02ba8690bdf72"}, + {file = "ckzg-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5747d7926873e3af0f6af5fca666feb0097d06cab525950e2664a6fbcb90165d"}, + {file = "ckzg-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75484ffb78aaebaeb3a30f1194a9143b904312b0f365fc4101e58e1bf5f89f66"}, + {file = "ckzg-2.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b2f72bc861b8bee9bac3314c58586d1ab2d23530f932a8f0a8562c8a4a6a45f9"}, + {file = "ckzg-2.0.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:6f85e5802fea5b77f52fc3a14c8dec18a3f2b7c7070c811a4608940834f563cc"}, + {file = "ckzg-2.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:583a0b6b531a16974676439b23e7defb3dfe9732f18d13d2316152019c538af1"}, + {file = "ckzg-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:fafb9ac36b3398f8091d40773d9a450e5f74883dad8ca4ee22d472e7a231ef4d"}, + {file = "ckzg-2.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a12e96f20dce35e5222f898a5c8355054ef7c5ee038eeb97dbb694640b57577b"}, + {file = "ckzg-2.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:4e0ebc55253addaa24dd2cd871bbe3b8f57855f32b5f74e70bf2cb76b6f7da54"}, + {file = "ckzg-2.0.1-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f917a7bf363a3735db30559e1ed63cf1ccf414234433ba687fa72c007abd756"}, + {file = "ckzg-2.0.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30f08c984286853271d4adae219e9ba87275a15047dbaa262ab8dd6c01be97b0"}, + {file = "ckzg-2.0.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fa1ea4888417e1f109fd5e57965788fb7f53b674329b937a65604a3c1ca1d03"}, + {file = "ckzg-2.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0b249914aeaf05cabc71c5c3797e3d6c126cb2c64192b7eb6755ef6aa5ab2f11"}, + {file = "ckzg-2.0.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a038e26baf650e1c733dcaa066ec948e75556b0c485e8c790c9a758875c71a93"}, + {file = "ckzg-2.0.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d6deb2c822122bdd32b555fa3b9216c86a355f24a2cc6a46b9b5743b412b60c"}, + {file = "ckzg-2.0.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50f6f2fbceba9ece3fbc1d2613a246f4e6ec4d787f542859e70c358928c0e4a1"}, + {file = "ckzg-2.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33ca40ef30129e2347bff3c95ad093403a0d5703476705ab92c92fbffe89bd5a"}, + {file = "ckzg-2.0.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:700b989c2f7089edc8fac6dfbd1b4677e85b966216ebedee8eb5e7894765c188"}, + {file = "ckzg-2.0.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f11933c007c3df02446a81957ac6e2488058b969e2eff5357c98ab569a0c7999"}, + {file = "ckzg-2.0.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:3dbc9580eccecbd485f22e48f6044c48cbe6d838a7b7514cce179c085c65a960"}, + {file = "ckzg-2.0.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ad6eb83f343fea6dd9a13fd1bce87b9cd26abeeb72f0674a62d26e40fe0b8aca"}, + {file = "ckzg-2.0.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:269f82b992facbd20461310cf5784551c77d11017b7d4b85d741d70359be6794"}, + {file = "ckzg-2.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:895d67cfd43130652e1ae39b90465b392d9a72c7c7e6f250eaf14689bfda6351"}, + {file = "ckzg-2.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:369cf1aeaf336c31f2050a7f54ae21cf46f4b2db23ebb013fff621144ab361bb"}, + {file = "ckzg-2.0.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:24fda2637598a467e7b11ff664805ee7fdf4f6c7b0c043d6d0a6ccb69b5681ee"}, + {file = "ckzg-2.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ea27baabe5b22b92901c428768eacf93b992ac7681f93768ab24818ad26ccfed"}, + {file = "ckzg-2.0.1-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a33f71e382020f2bc4ead2bd6881a9bd3811d929f272da239ac01ad615a00802"}, + {file = "ckzg-2.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:926507c569727bb4c851a1eea702c5e902267de96e06ce2d685019f973f72968"}, + {file = "ckzg-2.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f5f29518b0a4555d8f2a28559209bd1d4080547aa629ff9ee51799346573b3f"}, + {file = "ckzg-2.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4595db84ce63c227e4448de0f7b39d3043e3477d78394ff651708c37fee6c486"}, + {file = "ckzg-2.0.1.tar.gz", hash = "sha256:62c5adc381637affa7e1df465c57750b356a761b8a3164c3106589b02532b9c9"}, ] [[package]] @@ -622,6 +827,7 @@ version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, @@ -636,10 +842,12 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "dev"] files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +markers = {dev = "platform_system == \"Windows\" or sys_platform == \"win32\""} [[package]] name = "coverage" @@ -647,6 +855,8 @@ version = "7.6.1" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version < \"3.10\"" files = [ {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, @@ -726,7 +936,87 @@ files = [ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} [package.extras] -toml = ["tomli"] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] + +[[package]] +name = "coverage" +version = "7.6.12" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +markers = "python_version >= \"3.10\"" +files = [ + {file = "coverage-7.6.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:704c8c8c6ce6569286ae9622e534b4f5b9759b6f2cd643f1c1a61f666d534fe8"}, + {file = "coverage-7.6.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ad7525bf0241e5502168ae9c643a2f6c219fa0a283001cee4cf23a9b7da75879"}, + {file = "coverage-7.6.12-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06097c7abfa611c91edb9e6920264e5be1d6ceb374efb4986f38b09eed4cb2fe"}, + {file = "coverage-7.6.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:220fa6c0ad7d9caef57f2c8771918324563ef0d8272c94974717c3909664e674"}, + {file = "coverage-7.6.12-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3688b99604a24492bcfe1c106278c45586eb819bf66a654d8a9a1433022fb2eb"}, + {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d1a987778b9c71da2fc8948e6f2656da6ef68f59298b7e9786849634c35d2c3c"}, + {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:cec6b9ce3bd2b7853d4a4563801292bfee40b030c05a3d29555fd2a8ee9bd68c"}, + {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ace9048de91293e467b44bce0f0381345078389814ff6e18dbac8fdbf896360e"}, + {file = "coverage-7.6.12-cp310-cp310-win32.whl", hash = "sha256:ea31689f05043d520113e0552f039603c4dd71fa4c287b64cb3606140c66f425"}, + {file = "coverage-7.6.12-cp310-cp310-win_amd64.whl", hash = "sha256:676f92141e3c5492d2a1596d52287d0d963df21bf5e55c8b03075a60e1ddf8aa"}, + {file = "coverage-7.6.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e18aafdfb3e9ec0d261c942d35bd7c28d031c5855dadb491d2723ba54f4c3015"}, + {file = "coverage-7.6.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66fe626fd7aa5982cdebad23e49e78ef7dbb3e3c2a5960a2b53632f1f703ea45"}, + {file = "coverage-7.6.12-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ef01d70198431719af0b1f5dcbefc557d44a190e749004042927b2a3fed0702"}, + {file = "coverage-7.6.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e92ae5a289a4bc4c0aae710c0948d3c7892e20fd3588224ebe242039573bf0"}, + {file = "coverage-7.6.12-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e695df2c58ce526eeab11a2e915448d3eb76f75dffe338ea613c1201b33bab2f"}, + {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d74c08e9aaef995f8c4ef6d202dbd219c318450fe2a76da624f2ebb9c8ec5d9f"}, + {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e995b3b76ccedc27fe4f477b349b7d64597e53a43fc2961db9d3fbace085d69d"}, + {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b1f097878d74fe51e1ddd1be62d8e3682748875b461232cf4b52ddc6e6db0bba"}, + {file = "coverage-7.6.12-cp311-cp311-win32.whl", hash = "sha256:1f7ffa05da41754e20512202c866d0ebfc440bba3b0ed15133070e20bf5aeb5f"}, + {file = "coverage-7.6.12-cp311-cp311-win_amd64.whl", hash = "sha256:e216c5c45f89ef8971373fd1c5d8d1164b81f7f5f06bbf23c37e7908d19e8558"}, + {file = "coverage-7.6.12-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b172f8e030e8ef247b3104902cc671e20df80163b60a203653150d2fc204d1ad"}, + {file = "coverage-7.6.12-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:641dfe0ab73deb7069fb972d4d9725bf11c239c309ce694dd50b1473c0f641c3"}, + {file = "coverage-7.6.12-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e549f54ac5f301e8e04c569dfdb907f7be71b06b88b5063ce9d6953d2d58574"}, + {file = "coverage-7.6.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:959244a17184515f8c52dcb65fb662808767c0bd233c1d8a166e7cf74c9ea985"}, + {file = "coverage-7.6.12-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bda1c5f347550c359f841d6614fb8ca42ae5cb0b74d39f8a1e204815ebe25750"}, + {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1ceeb90c3eda1f2d8c4c578c14167dbd8c674ecd7d38e45647543f19839dd6ea"}, + {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f16f44025c06792e0fb09571ae454bcc7a3ec75eeb3c36b025eccf501b1a4c3"}, + {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b076e625396e787448d27a411aefff867db2bffac8ed04e8f7056b07024eed5a"}, + {file = "coverage-7.6.12-cp312-cp312-win32.whl", hash = "sha256:00b2086892cf06c7c2d74983c9595dc511acca00665480b3ddff749ec4fb2a95"}, + {file = "coverage-7.6.12-cp312-cp312-win_amd64.whl", hash = "sha256:7ae6eabf519bc7871ce117fb18bf14e0e343eeb96c377667e3e5dd12095e0288"}, + {file = "coverage-7.6.12-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:488c27b3db0ebee97a830e6b5a3ea930c4a6e2c07f27a5e67e1b3532e76b9ef1"}, + {file = "coverage-7.6.12-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d1095bbee1851269f79fd8e0c9b5544e4c00c0c24965e66d8cba2eb5bb535fd"}, + {file = "coverage-7.6.12-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0533adc29adf6a69c1baa88c3d7dbcaadcffa21afbed3ca7a225a440e4744bf9"}, + {file = "coverage-7.6.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53c56358d470fa507a2b6e67a68fd002364d23c83741dbc4c2e0680d80ca227e"}, + {file = "coverage-7.6.12-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64cbb1a3027c79ca6310bf101014614f6e6e18c226474606cf725238cf5bc2d4"}, + {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:79cac3390bfa9836bb795be377395f28410811c9066bc4eefd8015258a7578c6"}, + {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9b148068e881faa26d878ff63e79650e208e95cf1c22bd3f77c3ca7b1d9821a3"}, + {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8bec2ac5da793c2685ce5319ca9bcf4eee683b8a1679051f8e6ec04c4f2fd7dc"}, + {file = "coverage-7.6.12-cp313-cp313-win32.whl", hash = "sha256:200e10beb6ddd7c3ded322a4186313d5ca9e63e33d8fab4faa67ef46d3460af3"}, + {file = "coverage-7.6.12-cp313-cp313-win_amd64.whl", hash = "sha256:2b996819ced9f7dbb812c701485d58f261bef08f9b85304d41219b1496b591ef"}, + {file = "coverage-7.6.12-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:299cf973a7abff87a30609879c10df0b3bfc33d021e1adabc29138a48888841e"}, + {file = "coverage-7.6.12-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4b467a8c56974bf06e543e69ad803c6865249d7a5ccf6980457ed2bc50312703"}, + {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2458f275944db8129f95d91aee32c828a408481ecde3b30af31d552c2ce284a0"}, + {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a9d8be07fb0832636a0f72b80d2a652fe665e80e720301fb22b191c3434d924"}, + {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14d47376a4f445e9743f6c83291e60adb1b127607a3618e3185bbc8091f0467b"}, + {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b95574d06aa9d2bd6e5cc35a5bbe35696342c96760b69dc4287dbd5abd4ad51d"}, + {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:ecea0c38c9079570163d663c0433a9af4094a60aafdca491c6a3d248c7432827"}, + {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2251fabcfee0a55a8578a9d29cecfee5f2de02f11530e7d5c5a05859aa85aee9"}, + {file = "coverage-7.6.12-cp313-cp313t-win32.whl", hash = "sha256:eb5507795caabd9b2ae3f1adc95f67b1104971c22c624bb354232d65c4fc90b3"}, + {file = "coverage-7.6.12-cp313-cp313t-win_amd64.whl", hash = "sha256:f60a297c3987c6c02ffb29effc70eadcbb412fe76947d394a1091a3615948e2f"}, + {file = "coverage-7.6.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e7575ab65ca8399c8c4f9a7d61bbd2d204c8b8e447aab9d355682205c9dd948d"}, + {file = "coverage-7.6.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8161d9fbc7e9fe2326de89cd0abb9f3599bccc1287db0aba285cb68d204ce929"}, + {file = "coverage-7.6.12-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a1e465f398c713f1b212400b4e79a09829cd42aebd360362cd89c5bdc44eb87"}, + {file = "coverage-7.6.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f25d8b92a4e31ff1bd873654ec367ae811b3a943583e05432ea29264782dc32c"}, + {file = "coverage-7.6.12-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a936309a65cc5ca80fa9f20a442ff9e2d06927ec9a4f54bcba9c14c066323f2"}, + {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aa6f302a3a0b5f240ee201297fff0bbfe2fa0d415a94aeb257d8b461032389bd"}, + {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f973643ef532d4f9be71dd88cf7588936685fdb576d93a79fe9f65bc337d9d73"}, + {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:78f5243bb6b1060aed6213d5107744c19f9571ec76d54c99cc15938eb69e0e86"}, + {file = "coverage-7.6.12-cp39-cp39-win32.whl", hash = "sha256:69e62c5034291c845fc4df7f8155e8544178b6c774f97a99e2734b05eb5bed31"}, + {file = "coverage-7.6.12-cp39-cp39-win_amd64.whl", hash = "sha256:b01a840ecc25dce235ae4c1b6a0daefb2a203dba0e6e980637ee9c2f6ee0df57"}, + {file = "coverage-7.6.12-pp39.pp310-none-any.whl", hash = "sha256:7e39e845c4d764208e7b8f6a21c541ade741e2c41afabdfa1caa28687a3c98cf"}, + {file = "coverage-7.6.12-py3-none-any.whl", hash = "sha256:eb8668cfbc279a536c633137deeb9435d2962caec279c3f8cf8b91fff6ff8953"}, + {file = "coverage-7.6.12.tar.gz", hash = "sha256:48cfc4641d95d34766ad41d9573cc0f22a48aa88d22657a1fe01dca0dbae4de2"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] [[package]] name = "cytoolz" @@ -734,6 +1024,8 @@ version = "1.0.1" description = "Cython implementation of Toolz: High performance functional utilities" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "implementation_name == \"cpython\"" files = [ {file = "cytoolz-1.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cec9af61f71fc3853eb5dca3d42eb07d1f48a4599fa502cbe92adde85f74b042"}, {file = "cytoolz-1.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:140bbd649dbda01e91add7642149a5987a7c3ccc251f2263de894b89f50b6608"}, @@ -849,6 +1141,7 @@ version = "0.22.3" description = "Marshal dataclasses to/from JSON. Use field properties with initial values. Construct a dataclass schema with JSON input." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "dataclass-wizard-0.22.3.tar.gz", hash = "sha256:4c46591782265058f1148cfd1f54a3a91221e63986fdd04c9d59f4ced61f4424"}, {file = "dataclass_wizard-0.22.3-py2.py3-none-any.whl", hash = "sha256:63751203e54b9b9349212cc185331da73c1adc99c51312575eb73bb5c00c1962"}, @@ -858,7 +1151,7 @@ files = [ typing-extensions = {version = ">=3.7.4.2", markers = "python_version <= \"3.9\""} [package.extras] -dev = ["Sphinx (==5.3.0)", "bump2version (==1.0.1)", "coverage (>=6.2)", "dataclass-factory (==2.12)", "dataclasses-json (==0.5.6)", "flake8 (>=3)", "jsons (==1.6.1)", "pip (>=21.3.1)", "pytest (==7.0.1)", "pytest-cov (==3.0.0)", "pytest-mock (>=3.6.1)", "pytimeparse (==1.1.8)", "sphinx-issues (==3.0.1)", "sphinx-issues (==4.0.0)", "tox (==3.24.5)", "twine (==3.8.0)", "watchdog[watchmedo] (==2.1.6)", "wheel (==0.37.1)", "wheel (==0.42.0)"] +dev = ["Sphinx (==5.3.0)", "bump2version (==1.0.1)", "coverage (>=6.2)", "dataclass-factory (==2.12)", "dataclasses-json (==0.5.6)", "flake8 (>=3)", "jsons (==1.6.1)", "pip (>=21.3.1)", "pytest (==7.0.1)", "pytest-cov (==3.0.0)", "pytest-mock (>=3.6.1)", "pytimeparse (==1.1.8)", "sphinx-issues (==3.0.1) ; python_version < \"3.8\"", "sphinx-issues (==4.0.0) ; python_version >= \"3.8\"", "tox (==3.24.5)", "twine (==3.8.0)", "watchdog[watchmedo] (==2.1.6)", "wheel (==0.37.1) ; python_version == \"3.6\"", "wheel (==0.42.0) ; python_version > \"3.6\""] timedelta = ["pytimeparse (>=1.1.7)"] yaml = ["PyYAML (>=5.3)"] @@ -868,6 +1161,7 @@ version = "1.2.18" description = "Python @deprecated decorator to deprecate old python classes, functions or methods." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +groups = ["main"] files = [ {file = "Deprecated-1.2.18-py2.py3-none-any.whl", hash = "sha256:bd5011788200372a32418f888e326a09ff80d0214bd961147cfed01b5c018eec"}, {file = "deprecated-1.2.18.tar.gz", hash = "sha256:422b6f6d859da6f2ef57857761bfb392480502a64c3028ca9bbe86085d72115d"}, @@ -877,7 +1171,7 @@ files = [ wrapt = ">=1.10,<2" [package.extras] -dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "setuptools", "tox"] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "setuptools ; python_version >= \"3.12\"", "tox"] [[package]] name = "dill" @@ -885,6 +1179,7 @@ version = "0.3.9" description = "serialize all of Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "dill-0.3.9-py3-none-any.whl", hash = "sha256:468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a"}, {file = "dill-0.3.9.tar.gz", hash = "sha256:81aa267dddf68cbfe8029c42ca9ec6a4ab3b22371d1c450abc54422577b4512c"}, @@ -894,12 +1189,73 @@ files = [ graph = ["objgraph (>=1.7.2)"] profile = ["gprof2dot (>=2022.7.29)"] +[[package]] +name = "dnspython" +version = "2.6.1" +description = "DNS toolkit" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version < \"3.10\"" +files = [ + {file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"}, + {file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"}, +] + +[package.extras] +dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "sphinx (>=7.2.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] +dnssec = ["cryptography (>=41)"] +doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] +doq = ["aioquic (>=0.9.25)"] +idna = ["idna (>=3.6)"] +trio = ["trio (>=0.23)"] +wmi = ["wmi (>=1.5.1)"] + +[[package]] +name = "dnspython" +version = "2.7.0" +description = "DNS toolkit" +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version >= \"3.10\"" +files = [ + {file = "dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86"}, + {file = "dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1"}, +] + +[package.extras] +dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "hypercorn (>=0.16.0)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "quart-trio (>=0.11.0)", "sphinx (>=7.2.0)", "sphinx-rtd-theme (>=2.0.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] +dnssec = ["cryptography (>=43)"] +doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] +doq = ["aioquic (>=1.0.0)"] +idna = ["idna (>=3.7)"] +trio = ["trio (>=0.23)"] +wmi = ["wmi (>=1.5.1)"] + +[[package]] +name = "email-validator" +version = "2.2.0" +description = "A robust email address syntax and deliverability validation library." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631"}, + {file = "email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7"}, +] + +[package.dependencies] +dnspython = ">=2.0.0" +idna = ">=2.0.0" + [[package]] name = "et-xmlfile" version = "2.0.0" description = "An implementation of lxml.xmlfile for the standard library" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "et_xmlfile-2.0.0-py3-none-any.whl", hash = "sha256:7a91720bc756843502c3b7504c77b8fe44217c85c537d85037f0f536151b2caa"}, {file = "et_xmlfile-2.0.0.tar.gz", hash = "sha256:dab3f4764309081ce75662649be815c4c9081e88f0837825f90fd28317d4da54"}, @@ -911,6 +1267,7 @@ version = "5.2.0" description = "eth_abi: Python utilities for working with Ethereum ABI definitions, especially encoding and decoding" optional = false python-versions = "<4,>=3.8" +groups = ["main"] files = [ {file = "eth_abi-5.2.0-py3-none-any.whl", hash = "sha256:17abe47560ad753f18054f5b3089fcb588f3e3a092136a416b6c1502cb7e8877"}, {file = "eth_abi-5.2.0.tar.gz", hash = "sha256:178703fa98c07d8eecd5ae569e7e8d159e493ebb6eeb534a8fe973fbc4e40ef0"}, @@ -929,30 +1286,32 @@ tools = ["hypothesis (>=6.22.0,<6.108.7)"] [[package]] name = "eth-account" -version = "0.11.3" +version = "0.13.5" description = "eth-account: Sign Ethereum transactions and messages with local private keys" optional = false python-versions = "<4,>=3.8" +groups = ["main"] files = [ - {file = "eth_account-0.11.3-py3-none-any.whl", hash = "sha256:16cf58aabc65171fc206489899b7e5546e3215e1a4debc12dbd55345c979081e"}, - {file = "eth_account-0.11.3.tar.gz", hash = "sha256:a712a9534638a7cfaa4cc069f1b9d5cefeee70362cfc3a7b0a2534ee61ce76c9"}, + {file = "eth_account-0.13.5-py3-none-any.whl", hash = "sha256:e43fd30c9a7fabb882b50e8c4c41d4486d2f3478ad97c66bb18cfcc872fdbec8"}, + {file = "eth_account-0.13.5.tar.gz", hash = "sha256:010c9ce5f3d2688106cf9bfeb711bb8eaf0154ea6f85325f54fecea85c2b3759"}, ] [package.dependencies] bitarray = ">=2.4.0" -ckzg = ">=0.4.3,<2" +ckzg = ">=2.0.0" eth-abi = ">=4.0.0-b.2" -eth-keyfile = ">=0.6.0" +eth-keyfile = ">=0.7.0,<0.9.0" eth-keys = ">=0.4.0" -eth-rlp = ">=0.3.0" +eth-rlp = ">=2.1.0" eth-utils = ">=2.0.0" -hexbytes = ">=0.1.0,<0.4.0" +hexbytes = ">=1.2.0" +pydantic = ">=2.0.0" rlp = ">=1.0.0" [package.extras] -dev = ["build (>=0.9.0)", "bumpversion (>=0.5.3)", "coverage", "hypothesis (>=4.18.0,<5)", "ipython", "pre-commit (>=3.4.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)", "sphinx (>=6.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)", "tox (>=4.0.0)", "twine", "wheel"] -docs = ["sphinx (>=6.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)"] -test = ["coverage", "hypothesis (>=4.18.0,<5)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"] +dev = ["build (>=0.9.0)", "bump_my_version (>=0.19.0)", "coverage", "hypothesis (>=6.22.0,<6.108.7)", "ipython", "mypy (==1.10.0)", "pre-commit (>=3.4.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)", "sphinx (>=6.0.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx_rtd_theme (>=1.0.0)", "towncrier (>=24,<25)", "tox (>=4.0.0)", "twine", "wheel"] +docs = ["sphinx (>=6.0.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx_rtd_theme (>=1.0.0)", "towncrier (>=24,<25)"] +test = ["coverage", "hypothesis (>=6.22.0,<6.108.7)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"] [[package]] name = "eth-hash" @@ -960,6 +1319,7 @@ version = "0.7.1" description = "eth-hash: The Ethereum hashing function, keccak256, sometimes (erroneously) called sha3" optional = false python-versions = "<4,>=3.8" +groups = ["main"] files = [ {file = "eth_hash-0.7.1-py3-none-any.whl", hash = "sha256:0fb1add2adf99ef28883fd6228eb447ef519ea72933535ad1a0b28c6f65f868a"}, {file = "eth_hash-0.7.1.tar.gz", hash = "sha256:d2411a403a0b0a62e8247b4117932d900ffb4c8c64b15f92620547ca5ce46be5"}, @@ -972,29 +1332,29 @@ pycryptodome = {version = ">=3.6.6,<4", optional = true, markers = "extra == \"p dev = ["build (>=0.9.0)", "bump_my_version (>=0.19.0)", "ipython", "mypy (==1.10.0)", "pre-commit (>=3.4.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)", "sphinx (>=6.0.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx_rtd_theme (>=1.0.0)", "towncrier (>=24,<25)", "tox (>=4.0.0)", "twine", "wheel"] docs = ["sphinx (>=6.0.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx_rtd_theme (>=1.0.0)", "towncrier (>=24,<25)"] pycryptodome = ["pycryptodome (>=3.6.6,<4)"] -pysha3 = ["pysha3 (>=1.0.0,<2.0.0)", "safe-pysha3 (>=1.0.0)"] +pysha3 = ["pysha3 (>=1.0.0,<2.0.0) ; python_version < \"3.9\"", "safe-pysha3 (>=1.0.0) ; python_version >= \"3.9\""] test = ["pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"] [[package]] name = "eth-keyfile" -version = "0.9.0" +version = "0.8.1" description = "eth-keyfile: A library for handling the encrypted keyfiles used to store ethereum private keys" optional = false python-versions = "<4,>=3.8" +groups = ["main"] files = [ - {file = "eth_keyfile-0.9.0-py3-none-any.whl", hash = "sha256:45d3513b6433ad885370225ba0429ed26493ba23589c5b1ca5da024765020fef"}, - {file = "eth_keyfile-0.9.0.tar.gz", hash = "sha256:8621c35e83cbc05909d2f23dbb8a87633918733caea553ae0e298f6a06291526"}, + {file = "eth_keyfile-0.8.1-py3-none-any.whl", hash = "sha256:65387378b82fe7e86d7cb9f8d98e6d639142661b2f6f490629da09fddbef6d64"}, + {file = "eth_keyfile-0.8.1.tar.gz", hash = "sha256:9708bc31f386b52cca0969238ff35b1ac72bd7a7186f2a84b86110d3c973bec1"}, ] [package.dependencies] eth-keys = ">=0.4.0" eth-utils = ">=2" -py_ecc = ">=5.2.0" pycryptodome = ">=3.6.6,<4" [package.extras] -dev = ["build (>=0.9.0)", "bump_my_version (>=0.19.0)", "ipython", "mypy (==1.10.0)", "pre-commit (>=3.4.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)", "towncrier (>=24,<25)", "tox (>=4.0.0)", "twine", "wheel"] -docs = ["towncrier (>=24,<25)"] +dev = ["build (>=0.9.0)", "bumpversion (>=0.5.3)", "ipython", "pre-commit (>=3.4.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)", "towncrier (>=21,<22)", "tox (>=4.0.0)", "twine", "wheel"] +docs = ["towncrier (>=21,<22)"] test = ["pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"] [[package]] @@ -1003,6 +1363,7 @@ version = "0.6.1" description = "eth-keys: Common API for Ethereum key operations" optional = false python-versions = "<4,>=3.8" +groups = ["main"] files = [ {file = "eth_keys-0.6.1-py3-none-any.whl", hash = "sha256:7deae4cd56e862e099ec58b78176232b931c4ea5ecded2f50c7b1ccbc10c24cf"}, {file = "eth_keys-0.6.1.tar.gz", hash = "sha256:a43e263cbcabfd62fa769168efc6c27b1f5603040e4de22bb84d12567e4fd962"}, @@ -1020,66 +1381,69 @@ test = ["asn1tools (>=0.146.2)", "eth-hash[pysha3]", "factory-boy (>=3.0.1)", "h [[package]] name = "eth-rlp" -version = "1.0.1" +version = "2.2.0" description = "eth-rlp: RLP definitions for common Ethereum objects in Python" optional = false -python-versions = ">=3.8, <4" +python-versions = "<4,>=3.8" +groups = ["main"] files = [ - {file = "eth-rlp-1.0.1.tar.gz", hash = "sha256:d61dbda892ee1220f28fb3663c08f6383c305db9f1f5624dc585c9cd05115027"}, - {file = "eth_rlp-1.0.1-py3-none-any.whl", hash = "sha256:dd76515d71654277377d48876b88e839d61553aaf56952e580bb7cebef2b1517"}, + {file = "eth_rlp-2.2.0-py3-none-any.whl", hash = "sha256:5692d595a741fbaef1203db6a2fedffbd2506d31455a6ad378c8449ee5985c47"}, + {file = "eth_rlp-2.2.0.tar.gz", hash = "sha256:5e4b2eb1b8213e303d6a232dfe35ab8c29e2d3051b86e8d359def80cd21db83d"}, ] [package.dependencies] eth-utils = ">=2.0.0" -hexbytes = ">=0.1.0,<1" +hexbytes = ">=1.2.0" rlp = ">=0.6.0" -typing-extensions = {version = ">=4.0.1", markers = "python_version <= \"3.11\""} +typing_extensions = {version = ">=4.0.1", markers = "python_version <= \"3.10\""} [package.extras] -dev = ["build (>=0.9.0)", "bumpversion (>=0.5.3)", "eth-hash[pycryptodome]", "ipython", "pre-commit (>=3.4.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)", "sphinx (>=6.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)", "tox (>=4.0.0)", "twine", "wheel"] -docs = ["sphinx (>=6.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)"] +dev = ["build (>=0.9.0)", "bump_my_version (>=0.19.0)", "eth-hash[pycryptodome]", "ipython", "mypy (==1.10.0)", "pre-commit (>=3.4.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)", "sphinx (>=6.0.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx_rtd_theme (>=1.0.0)", "towncrier (>=24,<25)", "tox (>=4.0.0)", "twine", "wheel"] +docs = ["sphinx (>=6.0.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx_rtd_theme (>=1.0.0)", "towncrier (>=24,<25)"] test = ["eth-hash[pycryptodome]", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"] [[package]] name = "eth-typing" -version = "4.4.0" +version = "5.2.0" description = "eth-typing: Common type annotations for ethereum python packages" optional = false python-versions = "<4,>=3.8" +groups = ["main"] files = [ - {file = "eth_typing-4.4.0-py3-none-any.whl", hash = "sha256:a5e30a6e69edda7b1d1e96e9d71bab48b9bb988a77909d8d1666242c5562f841"}, - {file = "eth_typing-4.4.0.tar.gz", hash = "sha256:93848083ac6bb4c20cc209ea9153a08b0a528be23337c889f89e1e5ffbe9807d"}, + {file = "eth_typing-5.2.0-py3-none-any.whl", hash = "sha256:e1f424e97990fc3c6a1c05a7b0968caed4e20e9c99a4d5f4db3df418e25ddc80"}, + {file = "eth_typing-5.2.0.tar.gz", hash = "sha256:28685f7e2270ea0d209b75bdef76d8ecef27703e1a16399f6929820d05071c28"}, ] [package.dependencies] -typing-extensions = ">=4.5.0" +typing_extensions = ">=4.5.0" [package.extras] -dev = ["build (>=0.9.0)", "bumpversion (>=0.5.3)", "ipython", "pre-commit (>=3.4.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)", "sphinx (>=6.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)", "tox (>=4.0.0)", "twine", "wheel"] -docs = ["sphinx (>=6.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)"] +dev = ["build (>=0.9.0)", "bump_my_version (>=0.19.0)", "ipython", "mypy (==1.10.0)", "pre-commit (>=3.4.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)", "sphinx (>=6.0.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx_rtd_theme (>=1.0.0)", "towncrier (>=24,<25)", "tox (>=4.0.0)", "twine", "wheel"] +docs = ["sphinx (>=6.0.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx_rtd_theme (>=1.0.0)", "towncrier (>=24,<25)"] test = ["pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"] [[package]] name = "eth-utils" -version = "4.1.1" +version = "5.1.0" description = "eth-utils: Common utility functions for python code that interacts with Ethereum" optional = false python-versions = "<4,>=3.8" +groups = ["main"] files = [ - {file = "eth_utils-4.1.1-py3-none-any.whl", hash = "sha256:ccbbac68a6d65cb6e294c5bcb6c6a5cec79a241c56dc5d9c345ed788c30f8534"}, - {file = "eth_utils-4.1.1.tar.gz", hash = "sha256:71c8d10dec7494aeed20fa7a4d52ec2ce4a2e52fdce80aab4f5c3c19f3648b25"}, + {file = "eth_utils-5.1.0-py3-none-any.whl", hash = "sha256:a99f1f01b51206620904c5af47fac65abc143aebd0a76bdec860381c5a3230f8"}, + {file = "eth_utils-5.1.0.tar.gz", hash = "sha256:84c6314b9cf1fcd526107464bbf487e3f87097a2e753360d5ed319f7d42e3f20"}, ] [package.dependencies] cytoolz = {version = ">=0.10.1", markers = "implementation_name == \"cpython\""} eth-hash = ">=0.3.1" -eth-typing = ">=3.0.0" +eth-typing = ">=5.0.0" toolz = {version = ">0.8.2", markers = "implementation_name == \"pypy\""} [package.extras] -dev = ["build (>=0.9.0)", "bumpversion (>=0.5.3)", "eth-hash[pycryptodome]", "hypothesis (>=4.43.0)", "ipython", "mypy (==1.5.1)", "pre-commit (>=3.4.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)", "sphinx (>=6.0.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)", "tox (>=4.0.0)", "twine", "wheel"] +dev = ["build (>=0.9.0)", "bump-my-version (>=0.19.0)", "eth-hash[pycryptodome]", "hypothesis (>=4.43.0)", "ipython", "mypy (==1.10.0)", "pre-commit (>=3.4.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)", "sphinx (>=6.0.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)", "tox (>=4.0.0)", "twine", "wheel"] docs = ["sphinx (>=6.0.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)"] -test = ["hypothesis (>=4.43.0)", "mypy (==1.5.1)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"] +test = ["hypothesis (>=4.43.0)", "mypy (==1.10.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"] [[package]] name = "ethereum-dasm" @@ -1087,6 +1451,7 @@ version = "0.1.5" description = "An ethereum bytecode disassembler with static and dynamic analysis features" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "ethereum_dasm-0.1.5-py3-none-any.whl", hash = "sha256:998aff7fbc1ef70210c1a34ffa2788e25f4e014c61e164411f6e4b4c2fdf2907"}, ] @@ -1107,6 +1472,7 @@ version = "0.1.10" description = "A lightweight ethereum evm bytecode asm instruction registry and disassembler library." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "evmdasm-0.1.10-py3-none-any.whl", hash = "sha256:30b7ef0c7b13edcaed7ee04a31fac7e24735610fa24b6803cd81706d93db1c61"}, {file = "evmdasm-0.1.10.tar.gz", hash = "sha256:b7a699740ab56bee605e0ffff51c72d49033755f41d40d001a4769567e906d78"}, @@ -1118,6 +1484,8 @@ version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] +markers = "python_version <= \"3.10\"" files = [ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, @@ -1127,135 +1495,68 @@ files = [ test = ["pytest (>=6)"] [[package]] -name = "flake8" -version = "3.9.2" -description = "the modular source code checker: pep8 pyflakes and co" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" -files = [ - {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, - {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, -] - -[package.dependencies] -mccabe = ">=0.6.0,<0.7.0" -pycodestyle = ">=2.7.0,<2.8.0" -pyflakes = ">=2.3.0,<2.4.0" - -[[package]] -name = "flask" -version = "3.0.3" -description = "A simple framework for building complex web applications." +name = "fastapi" +version = "0.115.11" +description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "flask-3.0.3-py3-none-any.whl", hash = "sha256:34e815dfaa43340d1d15a5c3a02b8476004037eb4840b34910c6e21679d288f3"}, - {file = "flask-3.0.3.tar.gz", hash = "sha256:ceb27b0af3823ea2737928a4d99d125a06175b8512c445cbd9a9ce200ef76842"}, + {file = "fastapi-0.115.11-py3-none-any.whl", hash = "sha256:32e1541b7b74602e4ef4a0260ecaf3aadf9d4f19590bba3e1bf2ac4666aa2c64"}, + {file = "fastapi-0.115.11.tar.gz", hash = "sha256:cc81f03f688678b92600a65a5e618b93592c65005db37157147204d8924bf94f"}, ] [package.dependencies] -blinker = ">=1.6.2" -click = ">=8.1.3" -importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.10\""} -itsdangerous = ">=2.1.2" -Jinja2 = ">=3.1.2" -Werkzeug = ">=3.0.0" +email-validator = {version = ">=2.0.0", optional = true, markers = "extra == \"standard\""} +fastapi-cli = {version = ">=0.0.5", extras = ["standard"], optional = true, markers = "extra == \"standard\""} +httpx = {version = ">=0.23.0", optional = true, markers = "extra == \"standard\""} +jinja2 = {version = ">=3.1.5", optional = true, markers = "extra == \"standard\""} +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" +python-multipart = {version = ">=0.0.18", optional = true, markers = "extra == \"standard\""} +starlette = ">=0.40.0,<0.47.0" +typing-extensions = ">=4.8.0" +uvicorn = {version = ">=0.12.0", extras = ["standard"], optional = true, markers = "extra == \"standard\""} [package.extras] -async = ["asgiref (>=3.2)"] -dotenv = ["python-dotenv"] - -[[package]] -name = "flask-caching" -version = "2.3.0" -description = "Adds caching support to Flask applications." -optional = false -python-versions = ">=3.8" -files = [ - {file = "Flask_Caching-2.3.0-py3-none-any.whl", hash = "sha256:51771c75682e5abc1483b78b96d9131d7941dc669b073852edfa319dd4e29b6e"}, - {file = "flask_caching-2.3.0.tar.gz", hash = "sha256:d7e4ca64a33b49feb339fcdd17e6ba25f5e01168cf885e53790e885f83a4d2cf"}, -] - -[package.dependencies] -cachelib = ">=0.9.0,<0.10.0" -Flask = "*" - -[[package]] -name = "flask-cors" -version = "3.0.9" -description = "A Flask extension adding a decorator for CORS support" -optional = false -python-versions = "*" -files = [ - {file = "Flask-Cors-3.0.9.tar.gz", hash = "sha256:6bcfc100288c5d1bcb1dbb854babd59beee622ffd321e444b05f24d6d58466b8"}, - {file = "Flask_Cors-3.0.9-py2.py3-none-any.whl", hash = "sha256:cee4480aaee421ed029eaa788f4049e3e26d15b5affb6a880dade6bafad38324"}, -] - -[package.dependencies] -Flask = ">=0.9" -Six = "*" +all = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=3.1.5)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] +standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "jinja2 (>=3.1.5)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"] [[package]] -name = "flask-limiter" -version = "3.8.0" -description = "Rate limiting for flask applications" +name = "fastapi-cli" +version = "0.0.7" +description = "Run and manage FastAPI apps from the command line with FastAPI CLI. 🚀" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "Flask_Limiter-3.8.0-py3-none-any.whl", hash = "sha256:0ab44f586d8cc349412791711b6cbafe8f86e7b60ad9e8f24f2686009f00900e"}, - {file = "flask_limiter-3.8.0.tar.gz", hash = "sha256:686f8b4a75404e47b91565a795c70d29f69c145f6907f1f32522e962b134dada"}, -] - -[package.dependencies] -Flask = ">=2" -limits = ">=3.13" -ordered-set = ">4,<5" -rich = ">=12,<14" -typing-extensions = ">=4" - -[package.extras] -memcached = ["limits[memcached]"] -mongodb = ["limits[mongodb]"] -redis = ["limits[redis]"] - -[[package]] -name = "flask-restx" -version = "1.3.0" -description = "Fully featured framework for fast, easy and documented API development with Flask" -optional = false -python-versions = "*" -files = [ - {file = "flask-restx-1.3.0.tar.gz", hash = "sha256:4f3d3fa7b6191fcc715b18c201a12cd875176f92ba4acc61626ccfd571ee1728"}, - {file = "flask_restx-1.3.0-py2.py3-none-any.whl", hash = "sha256:636c56c3fb3f2c1df979e748019f084a938c4da2035a3e535a4673e4fc177691"}, + {file = "fastapi_cli-0.0.7-py3-none-any.whl", hash = "sha256:d549368ff584b2804336c61f192d86ddea080c11255f375959627911944804f4"}, + {file = "fastapi_cli-0.0.7.tar.gz", hash = "sha256:02b3b65956f526412515907a0793c9094abd4bfb5457b389f645b0ea6ba3605e"}, ] [package.dependencies] -aniso8601 = ">=0.82" -Flask = ">=0.8,<2.0.0 || >2.0.0" -importlib-resources = "*" -jsonschema = "*" -pytz = "*" -werkzeug = "!=2.0.0" +rich-toolkit = ">=0.11.1" +typer = ">=0.12.3" +uvicorn = {version = ">=0.15.0", extras = ["standard"]} [package.extras] -dev = ["Faker (==2.0.0)", "black", "blinker", "invoke (==2.2.0)", "mock (==3.0.5)", "pytest (==7.0.1)", "pytest-benchmark (==3.4.1)", "pytest-cov (==4.0.0)", "pytest-flask (==1.3.0)", "pytest-mock (==3.6.1)", "pytest-profiling (==1.7.0)", "setuptools", "tox", "twine (==3.8.0)", "tzlocal"] -doc = ["Sphinx (==5.3.0)", "alabaster (==0.7.12)", "sphinx-issues (==3.0.1)"] -test = ["Faker (==2.0.0)", "blinker", "invoke (==2.2.0)", "mock (==3.0.5)", "pytest (==7.0.1)", "pytest-benchmark (==3.4.1)", "pytest-cov (==4.0.0)", "pytest-flask (==1.3.0)", "pytest-mock (==3.6.1)", "pytest-profiling (==1.7.0)", "setuptools", "twine (==3.8.0)", "tzlocal"] +standard = ["uvicorn[standard] (>=0.15.0)"] [[package]] -name = "flask-sqlalchemy" -version = "3.1.1" -description = "Add SQLAlchemy support to your Flask application." +name = "flake8" +version = "3.9.2" +description = "the modular source code checker: pep8 pyflakes and co" optional = false -python-versions = ">=3.8" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +groups = ["dev"] files = [ - {file = "flask_sqlalchemy-3.1.1-py3-none-any.whl", hash = "sha256:4ba4be7f419dc72f4efd8802d69974803c37259dd42f3913b0dcf75c9447e0a0"}, - {file = "flask_sqlalchemy-3.1.1.tar.gz", hash = "sha256:e4b68bb881802dda1a7d878b2fc84c06d1ee57fb40b874d3dc97dabfa36b8312"}, + {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, + {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, ] [package.dependencies] -flask = ">=2.2.5" -sqlalchemy = ">=2.0.16" +mccabe = ">=0.6.0,<0.7.0" +pycodestyle = ">=2.7.0,<2.8.0" +pyflakes = ">=2.3.0,<2.4.0" [[package]] name = "frozenlist" @@ -1263,6 +1564,7 @@ version = "1.5.0" description = "A list-like structure which implements collections.abc.MutableSequence" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5b6a66c18b5b9dd261ca98dffcb826a525334b2f29e7caa54e182255c5f6a65a"}, {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d1b3eb7b05ea246510b43a7e53ed1653e55c2121019a97e60cad7efb881a97bb"}, @@ -1364,6 +1666,8 @@ version = "3.1.1" description = "Lightweight in-process concurrent programming" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")" files = [ {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, @@ -1444,59 +1748,152 @@ files = [ docs = ["Sphinx", "furo"] test = ["objgraph", "psutil"] +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + [[package]] name = "hexbytes" -version = "0.3.1" +version = "1.3.0" description = "hexbytes: Python `bytes` subclass that decodes hex, with a readable console output" optional = false -python-versions = ">=3.7, <4" +python-versions = "<4,>=3.8" +groups = ["main"] files = [ - {file = "hexbytes-0.3.1-py3-none-any.whl", hash = "sha256:383595ad75026cf00abd570f44b368c6cdac0c6becfae5c39ff88829877f8a59"}, - {file = "hexbytes-0.3.1.tar.gz", hash = "sha256:a3fe35c6831ee8fafd048c4c086b986075fc14fd46258fa24ecb8d65745f9a9d"}, + {file = "hexbytes-1.3.0-py3-none-any.whl", hash = "sha256:83720b529c6e15ed21627962938dc2dec9bb1010f17bbbd66bf1e6a8287d522c"}, + {file = "hexbytes-1.3.0.tar.gz", hash = "sha256:4a61840c24b0909a6534350e2d28ee50159ca1c9e89ce275fd31c110312cf684"}, ] [package.extras] -dev = ["black (>=22)", "bumpversion (>=0.5.3)", "eth-utils (>=1.0.1,<3)", "flake8 (==6.0.0)", "flake8-bugbear (==23.3.23)", "hypothesis (>=3.44.24,<=6.31.6)", "ipython", "isort (>=5.10.1)", "mypy (==0.971)", "pydocstyle (>=5.0.0)", "pytest (>=7.0.0)", "pytest-watch (>=4.1.0)", "pytest-xdist (>=2.4.0)", "sphinx (>=5.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)", "tox (>=4.0.0)", "twine", "wheel"] -doc = ["sphinx (>=5.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)"] -lint = ["black (>=22)", "flake8 (==6.0.0)", "flake8-bugbear (==23.3.23)", "isort (>=5.10.1)", "mypy (==0.971)", "pydocstyle (>=5.0.0)"] -test = ["eth-utils (>=1.0.1,<3)", "hypothesis (>=3.44.24,<=6.31.6)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"] +dev = ["build (>=0.9.0)", "bump_my_version (>=0.19.0)", "eth_utils (>=2.0.0)", "hypothesis (>=3.44.24,<=6.31.6)", "ipython", "mypy (==1.10.0)", "pre-commit (>=3.4.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)", "sphinx (>=6.0.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx_rtd_theme (>=1.0.0)", "towncrier (>=24,<25)", "tox (>=4.0.0)", "twine", "wheel"] +docs = ["sphinx (>=6.0.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx_rtd_theme (>=1.0.0)", "towncrier (>=24,<25)"] +test = ["eth_utils (>=2.0.0)", "hypothesis (>=3.44.24,<=6.31.6)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"] [[package]] -name = "idna" -version = "3.10" -description = "Internationalized Domain Names in Applications (IDNA)" +name = "httpcore" +version = "1.0.7" +description = "A minimal low-level HTTP client." optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, - {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, + {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, + {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, ] +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + [package.extras] -all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<1.0)"] [[package]] -name = "importlib-metadata" -version = "8.5.0" -description = "Read metadata from Python packages" +name = "httptools" +version = "0.6.4" +description = "A collection of framework independent HTTP protocol utils." +optional = false +python-versions = ">=3.8.0" +groups = ["main"] +files = [ + {file = "httptools-0.6.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3c73ce323711a6ffb0d247dcd5a550b8babf0f757e86a52558fe5b86d6fefcc0"}, + {file = "httptools-0.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:345c288418f0944a6fe67be8e6afa9262b18c7626c3ef3c28adc5eabc06a68da"}, + {file = "httptools-0.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deee0e3343f98ee8047e9f4c5bc7cedbf69f5734454a94c38ee829fb2d5fa3c1"}, + {file = "httptools-0.6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca80b7485c76f768a3bc83ea58373f8db7b015551117375e4918e2aa77ea9b50"}, + {file = "httptools-0.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:90d96a385fa941283ebd231464045187a31ad932ebfa541be8edf5b3c2328959"}, + {file = "httptools-0.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:59e724f8b332319e2875efd360e61ac07f33b492889284a3e05e6d13746876f4"}, + {file = "httptools-0.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:c26f313951f6e26147833fc923f78f95604bbec812a43e5ee37f26dc9e5a686c"}, + {file = "httptools-0.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f47f8ed67cc0ff862b84a1189831d1d33c963fb3ce1ee0c65d3b0cbe7b711069"}, + {file = "httptools-0.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0614154d5454c21b6410fdf5262b4a3ddb0f53f1e1721cfd59d55f32138c578a"}, + {file = "httptools-0.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8787367fbdfccae38e35abf7641dafc5310310a5987b689f4c32cc8cc3ee975"}, + {file = "httptools-0.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b0f7fe4fd38e6a507bdb751db0379df1e99120c65fbdc8ee6c1d044897a636"}, + {file = "httptools-0.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40a5ec98d3f49904b9fe36827dcf1aadfef3b89e2bd05b0e35e94f97c2b14721"}, + {file = "httptools-0.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dacdd3d10ea1b4ca9df97a0a303cbacafc04b5cd375fa98732678151643d4988"}, + {file = "httptools-0.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:288cd628406cc53f9a541cfaf06041b4c71d751856bab45e3702191f931ccd17"}, + {file = "httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2"}, + {file = "httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44"}, + {file = "httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1"}, + {file = "httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e603a3bff50db08cd578d54f07032ca1631450ceb972c2f834c2b860c28ea2"}, + {file = "httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81"}, + {file = "httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f"}, + {file = "httptools-0.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970"}, + {file = "httptools-0.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ade273d7e767d5fae13fa637f4d53b6e961fb7fd93c7797562663f0171c26660"}, + {file = "httptools-0.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:856f4bc0478ae143bad54a4242fccb1f3f86a6e1be5548fecfd4102061b3a083"}, + {file = "httptools-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:322d20ea9cdd1fa98bd6a74b77e2ec5b818abdc3d36695ab402a0de8ef2865a3"}, + {file = "httptools-0.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d87b29bd4486c0093fc64dea80231f7c7f7eb4dc70ae394d70a495ab8436071"}, + {file = "httptools-0.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:342dd6946aa6bda4b8f18c734576106b8a31f2fe31492881a9a160ec84ff4bd5"}, + {file = "httptools-0.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b36913ba52008249223042dca46e69967985fb4051951f94357ea681e1f5dc0"}, + {file = "httptools-0.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:28908df1b9bb8187393d5b5db91435ccc9c8e891657f9cbb42a2541b44c82fc8"}, + {file = "httptools-0.6.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d3f0d369e7ffbe59c4b6116a44d6a8eb4783aae027f2c0b366cf0aa964185dba"}, + {file = "httptools-0.6.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:94978a49b8f4569ad607cd4946b759d90b285e39c0d4640c6b36ca7a3ddf2efc"}, + {file = "httptools-0.6.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40dc6a8e399e15ea525305a2ddba998b0af5caa2566bcd79dcbe8948181eeaff"}, + {file = "httptools-0.6.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab9ba8dcf59de5181f6be44a77458e45a578fc99c31510b8c65b7d5acc3cf490"}, + {file = "httptools-0.6.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:fc411e1c0a7dcd2f902c7c48cf079947a7e65b5485dea9decb82b9105ca71a43"}, + {file = "httptools-0.6.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:d54efd20338ac52ba31e7da78e4a72570cf729fac82bc31ff9199bedf1dc7440"}, + {file = "httptools-0.6.4-cp38-cp38-win_amd64.whl", hash = "sha256:df959752a0c2748a65ab5387d08287abf6779ae9165916fe053e68ae1fbdc47f"}, + {file = "httptools-0.6.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:85797e37e8eeaa5439d33e556662cc370e474445d5fab24dcadc65a8ffb04003"}, + {file = "httptools-0.6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:db353d22843cf1028f43c3651581e4bb49374d85692a85f95f7b9a130e1b2cab"}, + {file = "httptools-0.6.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1ffd262a73d7c28424252381a5b854c19d9de5f56f075445d33919a637e3547"}, + {file = "httptools-0.6.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:703c346571fa50d2e9856a37d7cd9435a25e7fd15e236c397bf224afaa355fe9"}, + {file = "httptools-0.6.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aafe0f1918ed07b67c1e838f950b1c1fabc683030477e60b335649b8020e1076"}, + {file = "httptools-0.6.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0e563e54979e97b6d13f1bbc05a96109923e76b901f786a5eae36e99c01237bd"}, + {file = "httptools-0.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:b799de31416ecc589ad79dd85a0b2657a8fe39327944998dea368c1d4c9e55e6"}, + {file = "httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c"}, +] + +[package.extras] +test = ["Cython (>=0.29.24)"] + +[[package]] +name = "httpx" +version = "0.28.1" +description = "The next generation HTTP client." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, - {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, + {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, + {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, ] [package.dependencies] -zipp = ">=3.20" +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -perf = ["ipython"] -test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] -type = ["pytest-mypy"] +brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "idna" +version = "3.10" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] [[package]] name = "importlib-resources" @@ -1504,6 +1901,8 @@ version = "6.4.5" description = "Read resources from Python packages" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version < \"3.9\"" files = [ {file = "importlib_resources-6.4.5-py3-none-any.whl", hash = "sha256:ac29d5f956f01d5e4bb63102a5a19957f1b9175e45649977264a1416783bb717"}, {file = "importlib_resources-6.4.5.tar.gz", hash = "sha256:980862a1d16c9e147a59603677fa2aa5fd82b87f223b6cb870695bcfce830065"}, @@ -1513,7 +1912,7 @@ files = [ zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] @@ -1526,6 +1925,7 @@ version = "2.0.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, @@ -1537,6 +1937,7 @@ version = "5.13.2" description = "A Python utility / library to sort Python imports." optional = false python-versions = ">=3.8.0" +groups = ["dev"] files = [ {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, @@ -1545,23 +1946,13 @@ files = [ [package.extras] colors = ["colorama (>=0.4.6)"] -[[package]] -name = "itsdangerous" -version = "2.2.0" -description = "Safely pass data to untrusted environments and back." -optional = false -python-versions = ">=3.8" -files = [ - {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"}, - {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"}, -] - [[package]] name = "jinja2" version = "3.1.5" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, @@ -1573,50 +1964,13 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] -[[package]] -name = "jsonschema" -version = "4.23.0" -description = "An implementation of JSON Schema validation for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, - {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, -] - -[package.dependencies] -attrs = ">=22.2.0" -importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} -jsonschema-specifications = ">=2023.03.6" -pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""} -referencing = ">=0.28.4" -rpds-py = ">=0.7.1" - -[package.extras] -format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=24.6.0)"] - -[[package]] -name = "jsonschema-specifications" -version = "2023.12.1" -description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, - {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, -] - -[package.dependencies] -importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} -referencing = ">=0.31.0" - [[package]] name = "kafka-python" version = "2.0.2" description = "Pure Python client for Apache Kafka" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "kafka-python-2.0.2.tar.gz", hash = "sha256:04dfe7fea2b63726cd6f3e79a2d86e709d608d74406638c5da33a01d45a9d7e3"}, {file = "kafka_python-2.0.2-py2.py3-none-any.whl", hash = "sha256:2d92418c7cb1c298fa6c7f0fb3519b520d0d7526ac6cb7ae2a4fc65a51a94b6e"}, @@ -1626,160 +1980,19 @@ files = [ crc32c = ["crc32c"] [[package]] -name = "limits" -version = "3.13.0" -description = "Rate limiting utilities" +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "limits-3.13.0-py3-none-any.whl", hash = "sha256:9767f7233da4255e9904b79908a728e8ec0984c0b086058b4cbbd309aea553f6"}, - {file = "limits-3.13.0.tar.gz", hash = "sha256:6571b0c567bfa175a35fed9f8a954c0c92f1c3200804282f1b8f1de4ad98a953"}, + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, ] [package.dependencies] -deprecated = ">=1.2" -importlib-resources = ">=1.3" -packaging = ">=21,<25" -typing-extensions = "*" - -[package.extras] -all = ["aetcd", "coredis (>=3.4.0,<5)", "emcache (>=0.6.1)", "emcache (>=1)", "etcd3", "motor (>=3,<4)", "pymemcache (>3,<5.0.0)", "pymongo (>4.1,<5)", "redis (>3,!=4.5.2,!=4.5.3,<6.0.0)", "redis (>=4.2.0,!=4.5.2,!=4.5.3)"] -async-etcd = ["aetcd"] -async-memcached = ["emcache (>=0.6.1)", "emcache (>=1)"] -async-mongodb = ["motor (>=3,<4)"] -async-redis = ["coredis (>=3.4.0,<5)"] -etcd = ["etcd3"] -memcached = ["pymemcache (>3,<5.0.0)"] -mongodb = ["pymongo (>4.1,<5)"] -redis = ["redis (>3,!=4.5.2,!=4.5.3,<6.0.0)"] -rediscluster = ["redis (>=4.2.0,!=4.5.2,!=4.5.3)"] - -[[package]] -name = "lru-dict" -version = "1.2.0" -description = "An Dict like LRU container." -optional = false -python-versions = "*" -files = [ - {file = "lru-dict-1.2.0.tar.gz", hash = "sha256:13c56782f19d68ddf4d8db0170041192859616514c706b126d0df2ec72a11bd7"}, - {file = "lru_dict-1.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:de906e5486b5c053d15b7731583c25e3c9147c288ac8152a6d1f9bccdec72641"}, - {file = "lru_dict-1.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:604d07c7604b20b3130405d137cae61579578b0e8377daae4125098feebcb970"}, - {file = "lru_dict-1.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:203b3e78d03d88f491fa134f85a42919020686b6e6f2d09759b2f5517260c651"}, - {file = "lru_dict-1.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:020b93870f8c7195774cbd94f033b96c14f51c57537969965c3af300331724fe"}, - {file = "lru_dict-1.2.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1184d91cfebd5d1e659d47f17a60185bbf621635ca56dcdc46c6a1745d25df5c"}, - {file = "lru_dict-1.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:fc42882b554a86e564e0b662da47b8a4b32fa966920bd165e27bb8079a323bc1"}, - {file = "lru_dict-1.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:18ee88ada65bd2ffd483023be0fa1c0a6a051ef666d1cd89e921dcce134149f2"}, - {file = "lru_dict-1.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:756230c22257597b7557eaef7f90484c489e9ba78e5bb6ab5a5bcfb6b03cb075"}, - {file = "lru_dict-1.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c4da599af36618881748b5db457d937955bb2b4800db891647d46767d636c408"}, - {file = "lru_dict-1.2.0-cp310-cp310-win32.whl", hash = "sha256:35a142a7d1a4fd5d5799cc4f8ab2fff50a598d8cee1d1c611f50722b3e27874f"}, - {file = "lru_dict-1.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:6da5b8099766c4da3bf1ed6e7d7f5eff1681aff6b5987d1258a13bd2ed54f0c9"}, - {file = "lru_dict-1.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b20b7c9beb481e92e07368ebfaa363ed7ef61e65ffe6e0edbdbaceb33e134124"}, - {file = "lru_dict-1.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22147367b296be31cc858bf167c448af02435cac44806b228c9be8117f1bfce4"}, - {file = "lru_dict-1.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34a3091abeb95e707f381a8b5b7dc8e4ee016316c659c49b726857b0d6d1bd7a"}, - {file = "lru_dict-1.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:877801a20f05c467126b55338a4e9fa30e2a141eb7b0b740794571b7d619ee11"}, - {file = "lru_dict-1.2.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d3336e901acec897bcd318c42c2b93d5f1d038e67688f497045fc6bad2c0be7"}, - {file = "lru_dict-1.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8dafc481d2defb381f19b22cc51837e8a42631e98e34b9e0892245cc96593deb"}, - {file = "lru_dict-1.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:87bbad3f5c3de8897b8c1263a9af73bbb6469fb90e7b57225dad89b8ef62cd8d"}, - {file = "lru_dict-1.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:25f9e0bc2fe8f41c2711ccefd2871f8a5f50a39e6293b68c3dec576112937aad"}, - {file = "lru_dict-1.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ae301c282a499dc1968dd633cfef8771dd84228ae9d40002a3ea990e4ff0c469"}, - {file = "lru_dict-1.2.0-cp311-cp311-win32.whl", hash = "sha256:c9617583173a29048e11397f165501edc5ae223504a404b2532a212a71ecc9ed"}, - {file = "lru_dict-1.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:6b7a031e47421d4b7aa626b8c91c180a9f037f89e5d0a71c4bb7afcf4036c774"}, - {file = "lru_dict-1.2.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ea2ac3f7a7a2f32f194c84d82a034e66780057fd908b421becd2f173504d040e"}, - {file = "lru_dict-1.2.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd46c94966f631a81ffe33eee928db58e9fbee15baba5923d284aeadc0e0fa76"}, - {file = "lru_dict-1.2.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:086ce993414f0b28530ded7e004c77dc57c5748fa6da488602aa6e7f79e6210e"}, - {file = "lru_dict-1.2.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df25a426446197488a6702954dcc1de511deee20c9db730499a2aa83fddf0df1"}, - {file = "lru_dict-1.2.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c53b12b89bd7a6c79f0536ff0d0a84fdf4ab5f6252d94b24b9b753bd9ada2ddf"}, - {file = "lru_dict-1.2.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:f9484016e6765bd295708cccc9def49f708ce07ac003808f69efa386633affb9"}, - {file = "lru_dict-1.2.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d0f7ec902a0097ac39f1922c89be9eaccf00eb87751e28915320b4f72912d057"}, - {file = "lru_dict-1.2.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:981ef3edc82da38d39eb60eae225b88a538d47b90cce2e5808846fd2cf64384b"}, - {file = "lru_dict-1.2.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e25b2e90a032dc248213af7f3f3e975e1934b204f3b16aeeaeaff27a3b65e128"}, - {file = "lru_dict-1.2.0-cp36-cp36m-win32.whl", hash = "sha256:59f3df78e94e07959f17764e7fa7ca6b54e9296953d2626a112eab08e1beb2db"}, - {file = "lru_dict-1.2.0-cp36-cp36m-win_amd64.whl", hash = "sha256:de24b47159e07833aeab517d9cb1c3c5c2d6445cc378b1c2f1d8d15fb4841d63"}, - {file = "lru_dict-1.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d0dd4cd58220351233002f910e35cc01d30337696b55c6578f71318b137770f9"}, - {file = "lru_dict-1.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a87bdc291718bbdf9ea4be12ae7af26cbf0706fa62c2ac332748e3116c5510a7"}, - {file = "lru_dict-1.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05fb8744f91f58479cbe07ed80ada6696ec7df21ea1740891d4107a8dd99a970"}, - {file = "lru_dict-1.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00f6e8a3fc91481b40395316a14c94daa0f0a5de62e7e01a7d589f8d29224052"}, - {file = "lru_dict-1.2.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b172fce0a0ffc0fa6d282c14256d5a68b5db1e64719c2915e69084c4b6bf555"}, - {file = "lru_dict-1.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:e707d93bae8f0a14e6df1ae8b0f076532b35f00e691995f33132d806a88e5c18"}, - {file = "lru_dict-1.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b9ec7a4a0d6b8297102aa56758434fb1fca276a82ed7362e37817407185c3abb"}, - {file = "lru_dict-1.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:f404dcc8172da1f28da9b1f0087009578e608a4899b96d244925c4f463201f2a"}, - {file = "lru_dict-1.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1171ad3bff32aa8086778be4a3bdff595cc2692e78685bcce9cb06b96b22dcc2"}, - {file = "lru_dict-1.2.0-cp37-cp37m-win32.whl", hash = "sha256:0c316dfa3897fabaa1fe08aae89352a3b109e5f88b25529bc01e98ac029bf878"}, - {file = "lru_dict-1.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:5919dd04446bc1ee8d6ecda2187deeebfff5903538ae71083e069bc678599446"}, - {file = "lru_dict-1.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fbf36c5a220a85187cacc1fcb7dd87070e04b5fc28df7a43f6842f7c8224a388"}, - {file = "lru_dict-1.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:712e71b64da181e1c0a2eaa76cd860265980cd15cb0e0498602b8aa35d5db9f8"}, - {file = "lru_dict-1.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f54908bf91280a9b8fa6a8c8f3c2f65850ce6acae2852bbe292391628ebca42f"}, - {file = "lru_dict-1.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3838e33710935da2ade1dd404a8b936d571e29268a70ff4ca5ba758abb3850df"}, - {file = "lru_dict-1.2.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5d5a5f976b39af73324f2b793862859902ccb9542621856d51a5993064f25e4"}, - {file = "lru_dict-1.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8bda3a9afd241ee0181661decaae25e5336ce513ac268ab57da737eacaa7871f"}, - {file = "lru_dict-1.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:bd2cd1b998ea4c8c1dad829fc4fa88aeed4dee555b5e03c132fc618e6123f168"}, - {file = "lru_dict-1.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:b55753ee23028ba8644fd22e50de7b8f85fa60b562a0fafaad788701d6131ff8"}, - {file = "lru_dict-1.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7e51fa6a203fa91d415f3b2900e5748ec8e06ad75777c98cc3aeb3983ca416d7"}, - {file = "lru_dict-1.2.0-cp38-cp38-win32.whl", hash = "sha256:cd6806313606559e6c7adfa0dbeb30fc5ab625f00958c3d93f84831e7a32b71e"}, - {file = "lru_dict-1.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d90a70c53b0566084447c3ef9374cc5a9be886e867b36f89495f211baabd322"}, - {file = "lru_dict-1.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a3ea7571b6bf2090a85ff037e6593bbafe1a8598d5c3b4560eb56187bcccb4dc"}, - {file = "lru_dict-1.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:287c2115a59c1c9ed0d5d8ae7671e594b1206c36ea9df2fca6b17b86c468ff99"}, - {file = "lru_dict-1.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b5ccfd2291c93746a286c87c3f895165b697399969d24c54804ec3ec559d4e43"}, - {file = "lru_dict-1.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b710f0f4d7ec4f9fa89dfde7002f80bcd77de8024017e70706b0911ea086e2ef"}, - {file = "lru_dict-1.2.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5345bf50e127bd2767e9fd42393635bbc0146eac01f6baf6ef12c332d1a6a329"}, - {file = "lru_dict-1.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:291d13f85224551913a78fe695cde04cbca9dcb1d84c540167c443eb913603c9"}, - {file = "lru_dict-1.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d5bb41bc74b321789803d45b124fc2145c1b3353b4ad43296d9d1d242574969b"}, - {file = "lru_dict-1.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0facf49b053bf4926d92d8d5a46fe07eecd2af0441add0182c7432d53d6da667"}, - {file = "lru_dict-1.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:987b73a06bcf5a95d7dc296241c6b1f9bc6cda42586948c9dabf386dc2bef1cd"}, - {file = "lru_dict-1.2.0-cp39-cp39-win32.whl", hash = "sha256:231d7608f029dda42f9610e5723614a35b1fff035a8060cf7d2be19f1711ace8"}, - {file = "lru_dict-1.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:71da89e134747e20ed5b8ad5b4ee93fc5b31022c2b71e8176e73c5a44699061b"}, - {file = "lru_dict-1.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:21b3090928c7b6cec509e755cc3ab742154b33660a9b433923bd12c37c448e3e"}, - {file = "lru_dict-1.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaecd7085212d0aa4cd855f38b9d61803d6509731138bf798a9594745953245b"}, - {file = "lru_dict-1.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ead83ac59a29d6439ddff46e205ce32f8b7f71a6bd8062347f77e232825e3d0a"}, - {file = "lru_dict-1.2.0-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:312b6b2a30188586fe71358f0f33e4bac882d33f5e5019b26f084363f42f986f"}, - {file = "lru_dict-1.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:b30122e098c80e36d0117810d46459a46313421ce3298709170b687dc1240b02"}, - {file = "lru_dict-1.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f010cfad3ab10676e44dc72a813c968cd586f37b466d27cde73d1f7f1ba158c2"}, - {file = "lru_dict-1.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20f5f411f7751ad9a2c02e80287cedf69ae032edd321fe696e310d32dd30a1f8"}, - {file = "lru_dict-1.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:afdadd73304c9befaed02eb42f5f09fdc16288de0a08b32b8080f0f0f6350aa6"}, - {file = "lru_dict-1.2.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7ab0c10c4fa99dc9e26b04e6b62ac32d2bcaea3aad9b81ec8ce9a7aa32b7b1b"}, - {file = "lru_dict-1.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:edad398d5d402c43d2adada390dd83c74e46e020945ff4df801166047013617e"}, - {file = "lru_dict-1.2.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:91d577a11b84387013815b1ad0bb6e604558d646003b44c92b3ddf886ad0f879"}, - {file = "lru_dict-1.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb12f19cdf9c4f2d9aa259562e19b188ff34afab28dd9509ff32a3f1c2c29326"}, - {file = "lru_dict-1.2.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e4c85aa8844bdca3c8abac3b7f78da1531c74e9f8b3e4890c6e6d86a5a3f6c0"}, - {file = "lru_dict-1.2.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c6acbd097b15bead4de8e83e8a1030bb4d8257723669097eac643a301a952f0"}, - {file = "lru_dict-1.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b6613daa851745dd22b860651de930275be9d3e9373283a2164992abacb75b62"}, -] - -[package.extras] -test = ["pytest"] - -[[package]] -name = "mako" -version = "1.3.9" -description = "A super-fast templating language that borrows the best ideas from the existing templating languages." -optional = false -python-versions = ">=3.8" -files = [ - {file = "Mako-1.3.9-py3-none-any.whl", hash = "sha256:95920acccb578427a9aa38e37a186b1e43156c87260d7ba18ca63aa4c7cbd3a1"}, - {file = "mako-1.3.9.tar.gz", hash = "sha256:b5d65ff3462870feec922dbccf38f6efb44e5714d7b593a656be86663d8600ac"}, -] - -[package.dependencies] -MarkupSafe = ">=0.9.2" - -[package.extras] -babel = ["Babel"] -lingua = ["lingua"] -testing = ["pytest"] - -[[package]] -name = "markdown-it-py" -version = "3.0.0" -description = "Python port of markdown-it. Markdown parsing, done right!" -optional = false -python-versions = ">=3.8" -files = [ - {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, - {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, -] - -[package.dependencies] -mdurl = ">=0.1,<1.0" +mdurl = ">=0.1,<1.0" [package.extras] benchmarking = ["psutil", "pytest", "pytest-benchmark"] @@ -1797,6 +2010,8 @@ version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version < \"3.10\"" files = [ {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, @@ -1860,12 +2075,85 @@ files = [ {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] +[[package]] +name = "markupsafe" +version = "3.0.2" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version >= \"3.10\"" +files = [ + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, + {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, +] + [[package]] name = "mccabe" version = "0.6.1" description = "McCabe checker, plugin for flake8" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, @@ -1877,17 +2165,52 @@ version = "0.1.2" description = "Markdown URL utilities" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, ] +[[package]] +name = "mirakuru" +version = "2.5.2" +description = "Process executor (not only) for tests." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version < \"3.10\"" +files = [ + {file = "mirakuru-2.5.2-py3-none-any.whl", hash = "sha256:90c2d90a8cf14349b2f33e6db30a16acd855499811e0312e56cf80ceacf2d3e5"}, + {file = "mirakuru-2.5.2.tar.gz", hash = "sha256:41ca583d355eb7a6cfdc21c1aea549979d685c27b57239b88725434f115a7132"}, +] + +[package.dependencies] +psutil = {version = ">=4.0.0", markers = "sys_platform != \"cygwin\""} + +[[package]] +name = "mirakuru" +version = "2.6.0" +description = "Process executor (not only) for tests." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +markers = "python_version >= \"3.10\"" +files = [ + {file = "mirakuru-2.6.0-py3-none-any.whl", hash = "sha256:0ff7080997e63289dc309d0237e137ca2cfa863b3d26b3d5e8fd4e1c2b2ef659"}, + {file = "mirakuru-2.6.0.tar.gz", hash = "sha256:3256fcf81ef090a30be97a8ce50ff0c178292d7e542866c5fedc5ae6801e3a17"}, +] + +[package.dependencies] +psutil = {version = ">=4.0.0", markers = "sys_platform != \"cygwin\""} + [[package]] name = "mmh3" version = "5.0.1" description = "Python extension for MurmurHash (MurmurHash3), a set of fast and robust hash functions." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version < \"3.10\"" files = [ {file = "mmh3-5.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f0a4b4bf05778ed77d820d6e7d0e9bd6beb0c01af10e1ce9233f5d2f814fcafa"}, {file = "mmh3-5.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac7a391039aeab95810c2d020b69a94eb6b4b37d4e2374831e92db3a0cdf71c6"}, @@ -1995,12 +2318,113 @@ plot = ["matplotlib (==3.9.2)", "pandas (==2.2.2)"] test = ["pytest (==8.3.3)", "pytest-sugar (==1.0.0)"] type = ["mypy (==1.11.2)"] +[[package]] +name = "mmh3" +version = "5.1.0" +description = "Python extension for MurmurHash (MurmurHash3), a set of fast and robust hash functions." +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version >= \"3.10\"" +files = [ + {file = "mmh3-5.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:eaf4ac5c6ee18ca9232238364d7f2a213278ae5ca97897cafaa123fcc7bb8bec"}, + {file = "mmh3-5.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:48f9aa8ccb9ad1d577a16104834ac44ff640d8de8c0caed09a2300df7ce8460a"}, + {file = "mmh3-5.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d4ba8cac21e1f2d4e436ce03a82a7f87cda80378691f760e9ea55045ec480a3d"}, + {file = "mmh3-5.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d69281c281cb01994f054d862a6bb02a2e7acfe64917795c58934b0872b9ece4"}, + {file = "mmh3-5.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4d05ed3962312fbda2a1589b97359d2467f677166952f6bd410d8c916a55febf"}, + {file = "mmh3-5.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78ae6a03f4cff4aa92ddd690611168856f8c33a141bd3e5a1e0a85521dc21ea0"}, + {file = "mmh3-5.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:95f983535b39795d9fb7336438faae117424c6798f763d67c6624f6caf2c4c01"}, + {file = "mmh3-5.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d46fdd80d4c7ecadd9faa6181e92ccc6fe91c50991c9af0e371fdf8b8a7a6150"}, + {file = "mmh3-5.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0f16e976af7365ea3b5c425124b2a7f0147eed97fdbb36d99857f173c8d8e096"}, + {file = "mmh3-5.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:6fa97f7d1e1f74ad1565127229d510f3fd65d931fdedd707c1e15100bc9e5ebb"}, + {file = "mmh3-5.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4052fa4a8561bd62648e9eb993c8f3af3bdedadf3d9687aa4770d10e3709a80c"}, + {file = "mmh3-5.1.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:3f0e8ae9f961037f812afe3cce7da57abf734285961fffbeff9a4c011b737732"}, + {file = "mmh3-5.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:99297f207db967814f1f02135bb7fe7628b9eacb046134a34e1015b26b06edce"}, + {file = "mmh3-5.1.0-cp310-cp310-win32.whl", hash = "sha256:2e6c8dc3631a5e22007fbdb55e993b2dbce7985c14b25b572dd78403c2e79182"}, + {file = "mmh3-5.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:e4e8c7ad5a4dddcfde35fd28ef96744c1ee0f9d9570108aa5f7e77cf9cfdf0bf"}, + {file = "mmh3-5.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:45da549269883208912868a07d0364e1418d8292c4259ca11699ba1b2475bd26"}, + {file = "mmh3-5.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0b529dcda3f951ff363a51d5866bc6d63cf57f1e73e8961f864ae5010647079d"}, + {file = "mmh3-5.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4db1079b3ace965e562cdfc95847312f9273eb2ad3ebea983435c8423e06acd7"}, + {file = "mmh3-5.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:22d31e3a0ff89b8eb3b826d6fc8e19532998b2aa6b9143698043a1268da413e1"}, + {file = "mmh3-5.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2139bfbd354cd6cb0afed51c4b504f29bcd687a3b1460b7e89498329cc28a894"}, + {file = "mmh3-5.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c8105c6a435bc2cd6ea2ef59558ab1a2976fd4a4437026f562856d08996673a"}, + {file = "mmh3-5.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57730067174a7f36fcd6ce012fe359bd5510fdaa5fe067bc94ed03e65dafb769"}, + {file = "mmh3-5.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bde80eb196d7fdc765a318604ded74a4378f02c5b46c17aa48a27d742edaded2"}, + {file = "mmh3-5.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9c8eddcb441abddeb419c16c56fd74b3e2df9e57f7aa2903221996718435c7a"}, + {file = "mmh3-5.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:99e07e4acafbccc7a28c076a847fb060ffc1406036bc2005acb1b2af620e53c3"}, + {file = "mmh3-5.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9e25ba5b530e9a7d65f41a08d48f4b3fedc1e89c26486361166a5544aa4cad33"}, + {file = "mmh3-5.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:bb9bf7475b4d99156ce2f0cf277c061a17560c8c10199c910a680869a278ddc7"}, + {file = "mmh3-5.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2a1b0878dd281ea3003368ab53ff6f568e175f1b39f281df1da319e58a19c23a"}, + {file = "mmh3-5.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:25f565093ac8b8aefe0f61f8f95c9a9d11dd69e6a9e9832ff0d293511bc36258"}, + {file = "mmh3-5.1.0-cp311-cp311-win32.whl", hash = "sha256:1e3554d8792387eac73c99c6eaea0b3f884e7130eb67986e11c403e4f9b6d372"}, + {file = "mmh3-5.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:8ad777a48197882492af50bf3098085424993ce850bdda406a358b6ab74be759"}, + {file = "mmh3-5.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:f29dc4efd99bdd29fe85ed6c81915b17b2ef2cf853abf7213a48ac6fb3eaabe1"}, + {file = "mmh3-5.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:45712987367cb9235026e3cbf4334670522a97751abfd00b5bc8bfa022c3311d"}, + {file = "mmh3-5.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b1020735eb35086ab24affbea59bb9082f7f6a0ad517cb89f0fc14f16cea4dae"}, + {file = "mmh3-5.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:babf2a78ce5513d120c358722a2e3aa7762d6071cd10cede026f8b32452be322"}, + {file = "mmh3-5.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4f47f58cd5cbef968c84a7c1ddc192fef0a36b48b0b8a3cb67354531aa33b00"}, + {file = "mmh3-5.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2044a601c113c981f2c1e14fa33adc9b826c9017034fe193e9eb49a6882dbb06"}, + {file = "mmh3-5.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c94d999c9f2eb2da44d7c2826d3fbffdbbbbcde8488d353fee7c848ecc42b968"}, + {file = "mmh3-5.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a015dcb24fa0c7a78f88e9419ac74f5001c1ed6a92e70fd1803f74afb26a4c83"}, + {file = "mmh3-5.1.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:457da019c491a2d20e2022c7d4ce723675e4c081d9efc3b4d8b9f28a5ea789bd"}, + {file = "mmh3-5.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:71408579a570193a4ac9c77344d68ddefa440b00468a0b566dcc2ba282a9c559"}, + {file = "mmh3-5.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8b3a04bc214a6e16c81f02f855e285c6df274a2084787eeafaa45f2fbdef1b63"}, + {file = "mmh3-5.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:832dae26a35514f6d3c1e267fa48e8de3c7b978afdafa0529c808ad72e13ada3"}, + {file = "mmh3-5.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bf658a61fc92ef8a48945ebb1076ef4ad74269e353fffcb642dfa0890b13673b"}, + {file = "mmh3-5.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3313577453582b03383731b66447cdcdd28a68f78df28f10d275d7d19010c1df"}, + {file = "mmh3-5.1.0-cp312-cp312-win32.whl", hash = "sha256:1d6508504c531ab86c4424b5a5ff07c1132d063863339cf92f6657ff7a580f76"}, + {file = "mmh3-5.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:aa75981fcdf3f21759d94f2c81b6a6e04a49dfbcdad88b152ba49b8e20544776"}, + {file = "mmh3-5.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:a4c1a76808dfea47f7407a0b07aaff9087447ef6280716fd0783409b3088bb3c"}, + {file = "mmh3-5.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a523899ca29cfb8a5239618474a435f3d892b22004b91779fcb83504c0d5b8c"}, + {file = "mmh3-5.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:17cef2c3a6ca2391ca7171a35ed574b5dab8398163129a3e3a4c05ab85a4ff40"}, + {file = "mmh3-5.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:52e12895b30110f3d89dae59a888683cc886ed0472dd2eca77497edef6161997"}, + {file = "mmh3-5.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0d6719045cda75c3f40397fc24ab67b18e0cb8f69d3429ab4c39763c4c608dd"}, + {file = "mmh3-5.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d19fa07d303a91f8858982c37e6939834cb11893cb3ff20e6ee6fa2a7563826a"}, + {file = "mmh3-5.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:31b47a620d622fbde8ca1ca0435c5d25de0ac57ab507209245e918128e38e676"}, + {file = "mmh3-5.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00f810647c22c179b6821079f7aa306d51953ac893587ee09cf1afb35adf87cb"}, + {file = "mmh3-5.1.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6128b610b577eed1e89ac7177ab0c33d06ade2aba93f5c89306032306b5f1c6"}, + {file = "mmh3-5.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1e550a45d2ff87a1c11b42015107f1778c93f4c6f8e731bf1b8fa770321b8cc4"}, + {file = "mmh3-5.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:785ae09276342f79fd8092633e2d52c0f7c44d56e8cfda8274ccc9b76612dba2"}, + {file = "mmh3-5.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:0f4be3703a867ef976434afd3661a33884abe73ceb4ee436cac49d3b4c2aaa7b"}, + {file = "mmh3-5.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e513983830c4ff1f205ab97152a0050cf7164f1b4783d702256d39c637b9d107"}, + {file = "mmh3-5.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b9135c300535c828c0bae311b659f33a31c941572eae278568d1a953c4a57b59"}, + {file = "mmh3-5.1.0-cp313-cp313-win32.whl", hash = "sha256:c65dbd12885a5598b70140d24de5839551af5a99b29f9804bb2484b29ef07692"}, + {file = "mmh3-5.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:10db7765201fc65003fa998faa067417ef6283eb5f9bba8f323c48fd9c33e91f"}, + {file = "mmh3-5.1.0-cp313-cp313-win_arm64.whl", hash = "sha256:b22fe2e54be81f6c07dcb36b96fa250fb72effe08aa52fbb83eade6e1e2d5fd7"}, + {file = "mmh3-5.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:166b67749a1d8c93b06f5e90576f1ba838a65c8e79f28ffd9dfafba7c7d0a084"}, + {file = "mmh3-5.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:adba83c7ba5cc8ea201ee1e235f8413a68e7f7b8a657d582cc6c6c9d73f2830e"}, + {file = "mmh3-5.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a61f434736106804eb0b1612d503c4e6eb22ba31b16e6a2f987473de4226fa55"}, + {file = "mmh3-5.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba9ce59816b30866093f048b3312c2204ff59806d3a02adee71ff7bd22b87554"}, + {file = "mmh3-5.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd51597bef1e503363b05cb579db09269e6e6c39d419486626b255048daf545b"}, + {file = "mmh3-5.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d51a1ed642d3fb37b8f4cab966811c52eb246c3e1740985f701ef5ad4cdd2145"}, + {file = "mmh3-5.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:709bfe81c53bf8a3609efcbd65c72305ade60944f66138f697eefc1a86b6e356"}, + {file = "mmh3-5.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e01a9b0092b6f82e861137c8e9bb9899375125b24012eb5219e61708be320032"}, + {file = "mmh3-5.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:27e46a2c13c9a805e03c9ec7de0ca8e096794688ab2125bdce4229daf60c4a56"}, + {file = "mmh3-5.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:5766299c1d26f6bfd0a638e070bd17dbd98d4ccb067d64db3745bf178e700ef0"}, + {file = "mmh3-5.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:7785205e3e4443fdcbb73766798c7647f94c2f538b90f666688f3e757546069e"}, + {file = "mmh3-5.1.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:8e574fbd39afb433b3ab95683b1b4bf18313dc46456fc9daaddc2693c19ca565"}, + {file = "mmh3-5.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1b6727a5a20e32cbf605743749f3862abe5f5e097cbf2afc7be5aafd32a549ae"}, + {file = "mmh3-5.1.0-cp39-cp39-win32.whl", hash = "sha256:d6eaa711d4b9220fe5252032a44bf68e5dcfb7b21745a96efc9e769b0dd57ec2"}, + {file = "mmh3-5.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:49d444913f6c02980e5241a53fe9af2338f2043d6ce5b6f5ea7d302c52c604ac"}, + {file = "mmh3-5.1.0-cp39-cp39-win_arm64.whl", hash = "sha256:0daaeaedd78773b70378f2413c7d6b10239a75d955d30d54f460fb25d599942d"}, + {file = "mmh3-5.1.0.tar.gz", hash = "sha256:136e1e670500f177f49ec106a4ebf0adf20d18d96990cc36ea492c651d2b406c"}, +] + +[package.extras] +benchmark = ["pymmh3 (==0.0.5)", "pyperf (==2.8.1)", "xxhash (==3.5.0)"] +docs = ["myst-parser (==4.0.0)", "shibuya (==2024.12.21)", "sphinx (==8.1.3)", "sphinx-copybutton (==0.5.2)"] +lint = ["black (==24.10.0)", "clang-format (==19.1.7)", "isort (==5.13.2)", "pylint (==3.3.3)"] +plot = ["matplotlib (==3.10.0)", "pandas (==2.2.3)"] +test = ["pytest (==8.3.4)", "pytest-sugar (==1.0.0)"] +type = ["mypy (==1.14.1)"] + [[package]] name = "mpire" version = "2.10.2" description = "A Python package for easy multiprocessing, but faster than multiprocessing" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "mpire-2.10.2-py3-none-any.whl", hash = "sha256:d627707f7a8d02aa4c7f7d59de399dec5290945ddf7fbd36cbb1d6ebb37a51fb"}, {file = "mpire-2.10.2.tar.gz", hash = "sha256:f66a321e93fadff34585a4bfa05e95bd946cf714b442f51c529038eb45773d97"}, @@ -2014,9 +2438,9 @@ tqdm = ">=4.27" [package.extras] dashboard = ["flask"] -dill = ["multiprocess", "multiprocess (>=0.70.15)"] +dill = ["multiprocess (>=0.70.15) ; python_version >= \"3.11\"", "multiprocess ; python_version < \"3.11\""] docs = ["docutils (==0.17.1)", "sphinx (==3.2.1)", "sphinx-autodoc-typehints (==1.11.0)", "sphinx-rtd-theme (==0.5.0)", "sphinx-versions (==1.0.1)", "sphinxcontrib-images (==0.9.2)"] -testing = ["ipywidgets", "multiprocess", "multiprocess (>=0.70.15)", "numpy", "pywin32 (>=301)", "rich"] +testing = ["ipywidgets", "multiprocess (>=0.70.15) ; python_version >= \"3.11\"", "multiprocess ; python_version < \"3.11\"", "numpy", "pywin32 (>=301) ; platform_system == \"Windows\"", "rich"] [[package]] name = "multidict" @@ -2024,6 +2448,7 @@ version = "6.1.0" description = "multidict implementation" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"}, {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"}, @@ -2128,6 +2553,7 @@ version = "0.70.17" description = "better multiprocessing and multithreading in Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "multiprocess-0.70.17-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7ddb24e5bcdb64e90ec5543a1f05a39463068b6d3b804aa3f2a4e16ec28562d6"}, {file = "multiprocess-0.70.17-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d729f55198a3579f6879766a6d9b72b42d4b320c0dcb7844afb774d75b573c62"}, @@ -2156,6 +2582,7 @@ version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false python-versions = ">=3.5" +groups = ["dev"] files = [ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, @@ -2167,6 +2594,7 @@ version = "1.24.4" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "numpy-1.24.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0bfb52d2169d58c1cdb8cc1f16989101639b34c7d3ce60ed70b19c63eba0b64"}, {file = "numpy-1.24.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ed094d4f0c177b1b8e7aa9cba7d6ceed51c0e569a5318ac0ca9a090680a6a1b1"}, @@ -2204,6 +2632,7 @@ version = "3.1.3" description = "A Python library to read/write Excel 2010 xlsx/xlsm files" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "openpyxl-3.1.3-py2.py3-none-any.whl", hash = "sha256:25071b558db709de9e8782c3d3e058af3b23ffb2fc6f40c8f0c45a154eced2c3"}, {file = "openpyxl-3.1.3.tar.gz", hash = "sha256:8dd482e5350125b2388070bb2477927be2e8ebc27df61178709bc8c8751da2f9"}, @@ -2212,26 +2641,13 @@ files = [ [package.dependencies] et-xmlfile = "*" -[[package]] -name = "ordered-set" -version = "4.1.0" -description = "An OrderedSet is a custom MutableSet that remembers its order, so that every" -optional = false -python-versions = ">=3.7" -files = [ - {file = "ordered-set-4.1.0.tar.gz", hash = "sha256:694a8e44c87657c59292ede72891eb91d34131f6531463aab3009191c77364a8"}, - {file = "ordered_set-4.1.0-py3-none-any.whl", hash = "sha256:046e1132c71fcf3330438a539928932caf51ddbc582496833e23de611de14562"}, -] - -[package.extras] -dev = ["black", "mypy", "pytest"] - [[package]] name = "orjson" version = "3.10.7" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "orjson-3.10.7-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:74f4544f5a6405b90da8ea724d15ac9c36da4d72a738c64685003337401f5c12"}, {file = "orjson-3.10.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34a566f22c28222b08875b18b0dfbf8a947e69df21a9ed5c51a6bf91cfb944ac"}, @@ -2298,6 +2714,7 @@ version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, @@ -2309,6 +2726,7 @@ version = "1.5.3" description = "Powerful data structures for data analysis, time series, and statistics" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pandas-1.5.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3749077d86e3a2f0ed51367f30bf5b82e131cc0f14260c4d3e499186fccc4406"}, {file = "pandas-1.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:972d8a45395f2a2d26733eb8d0f629b2f90bebe8e8eddbb8829b180c09639572"}, @@ -2343,7 +2761,7 @@ files = [ numpy = [ {version = ">=1.20.3", markers = "python_version < \"3.10\""}, {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, - {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, + {version = ">=1.21.0", markers = "python_version == \"3.10\""}, ] python-dateutil = ">=2.8.1" pytz = ">=2020.1" @@ -2357,6 +2775,7 @@ version = "0.10.0" description = "(Soon to be) the fastest pure-Python PEG parser I could muster" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "parsimonious-0.10.0-py3-none-any.whl", hash = "sha256:982ab435fabe86519b57f6b35610aa4e4e977e9f02a14353edf4bbc75369fc0f"}, {file = "parsimonious-0.10.0.tar.gz", hash = "sha256:8281600da180ec8ae35427a4ab4f7b82bfec1e3d1e52f80cb60ea82b9512501c"}, @@ -2371,28 +2790,19 @@ version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] -[[package]] -name = "pkgutil-resolve-name" -version = "1.3.10" -description = "Resolve a name to an object." -optional = false -python-versions = ">=3.6" -files = [ - {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"}, - {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, -] - [[package]] name = "platformdirs" version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, @@ -2409,6 +2819,7 @@ version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, @@ -2418,12 +2829,39 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "port-for" +version = "0.7.3" +description = "Utility that helps with local TCP ports management. It can find an unused TCP localhost port and remember the association." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version < \"3.10\"" +files = [ + {file = "port_for-0.7.3-py3-none-any.whl", hash = "sha256:786fa1171cee23093a475d65228b4a9877d249827ceb7cd2362cb7b80d0c69d4"}, + {file = "port_for-0.7.3.tar.gz", hash = "sha256:2d597e5854a1b323b17eba8ae0630784c779857abde5e22444c88d233a60f953"}, +] + +[[package]] +name = "port-for" +version = "0.7.4" +description = "Utility that helps with local TCP ports management. It can find an unused TCP localhost port and remember the association." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +markers = "python_version >= \"3.10\"" +files = [ + {file = "port_for-0.7.4-py3-none-any.whl", hash = "sha256:08404aa072651a53dcefe8d7a598ee8a1dca320d9ac44ac464da16ccf2a02c4a"}, + {file = "port_for-0.7.4.tar.gz", hash = "sha256:fc7713e7b22f89442f335ce12536653656e8f35146739eccaeff43d28436028d"}, +] + [[package]] name = "pottery" version = "3.0.0" description = "Redis for Humans." optional = false python-versions = ">=3.7, <4" +groups = ["main"] files = [ {file = "pottery-3.0.0-py3-none-any.whl", hash = "sha256:0190323bbb1289d40c5cd683feb04c4b8cff76a6c723f3ded9137c8bcc9fb5f8"}, {file = "pottery-3.0.0.tar.gz", hash = "sha256:adda303e9357442bcac1d4c7f86aa7deec855e0190c101d09448afbcf5676a74"}, @@ -2440,6 +2878,7 @@ version = "0.21.1" description = "Python client for the Prometheus monitoring system." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "prometheus_client-0.21.1-py3-none-any.whl", hash = "sha256:594b45c410d6f4f8888940fe80b5cc2521b305a1fafe1c58609ef715a001f301"}, {file = "prometheus_client-0.21.1.tar.gz", hash = "sha256:252505a722ac04b0456be05c05f75f45d760c2911ffc45f2a06bcaed9f3ae3fb"}, @@ -2454,6 +2893,8 @@ version = "0.2.0" description = "Accelerated property cache" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version < \"3.10\"" files = [ {file = "propcache-0.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c5869b8fd70b81835a6f187c5fdbe67917a04d7e52b6e7cc4e5fe39d55c39d58"}, {file = "propcache-0.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:952e0d9d07609d9c5be361f33b0d6d650cd2bae393aabb11d9b719364521984b"}, @@ -2556,31 +2997,171 @@ files = [ ] [[package]] -name = "protobuf" -version = "5.29.3" -description = "" +name = "propcache" +version = "0.3.0" +description = "Accelerated property cache" +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version >= \"3.10\"" +files = [ + {file = "propcache-0.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:efa44f64c37cc30c9f05932c740a8b40ce359f51882c70883cc95feac842da4d"}, + {file = "propcache-0.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2383a17385d9800b6eb5855c2f05ee550f803878f344f58b6e194de08b96352c"}, + {file = "propcache-0.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d3e7420211f5a65a54675fd860ea04173cde60a7cc20ccfbafcccd155225f8bc"}, + {file = "propcache-0.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3302c5287e504d23bb0e64d2a921d1eb4a03fb93a0a0aa3b53de059f5a5d737d"}, + {file = "propcache-0.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7e2e068a83552ddf7a39a99488bcba05ac13454fb205c847674da0352602082f"}, + {file = "propcache-0.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d913d36bdaf368637b4f88d554fb9cb9d53d6920b9c5563846555938d5450bf"}, + {file = "propcache-0.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ee1983728964d6070ab443399c476de93d5d741f71e8f6e7880a065f878e0b9"}, + {file = "propcache-0.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:36ca5e9a21822cc1746023e88f5c0af6fce3af3b85d4520efb1ce4221bed75cc"}, + {file = "propcache-0.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9ecde3671e62eeb99e977f5221abcf40c208f69b5eb986b061ccec317c82ebd0"}, + {file = "propcache-0.3.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:d383bf5e045d7f9d239b38e6acadd7b7fdf6c0087259a84ae3475d18e9a2ae8b"}, + {file = "propcache-0.3.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:8cb625bcb5add899cb8ba7bf716ec1d3e8f7cdea9b0713fa99eadf73b6d4986f"}, + {file = "propcache-0.3.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:5fa159dcee5dba00c1def3231c249cf261185189205073bde13797e57dd7540a"}, + {file = "propcache-0.3.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:a7080b0159ce05f179cfac592cda1a82898ca9cd097dacf8ea20ae33474fbb25"}, + {file = "propcache-0.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ed7161bccab7696a473fe7ddb619c1d75963732b37da4618ba12e60899fefe4f"}, + {file = "propcache-0.3.0-cp310-cp310-win32.whl", hash = "sha256:bf0d9a171908f32d54f651648c7290397b8792f4303821c42a74e7805bfb813c"}, + {file = "propcache-0.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:42924dc0c9d73e49908e35bbdec87adedd651ea24c53c29cac103ede0ea1d340"}, + {file = "propcache-0.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9ddd49258610499aab83b4f5b61b32e11fce873586282a0e972e5ab3bcadee51"}, + {file = "propcache-0.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2578541776769b500bada3f8a4eeaf944530516b6e90c089aa368266ed70c49e"}, + {file = "propcache-0.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8074c5dd61c8a3e915fa8fc04754fa55cfa5978200d2daa1e2d4294c1f136aa"}, + {file = "propcache-0.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b58229a844931bca61b3a20efd2be2a2acb4ad1622fc026504309a6883686fbf"}, + {file = "propcache-0.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e45377d5d6fefe1677da2a2c07b024a6dac782088e37c0b1efea4cfe2b1be19b"}, + {file = "propcache-0.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ec5060592d83454e8063e487696ac3783cc48c9a329498bafae0d972bc7816c9"}, + {file = "propcache-0.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15010f29fbed80e711db272909a074dc79858c6d28e2915704cfc487a8ac89c6"}, + {file = "propcache-0.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a254537b9b696ede293bfdbc0a65200e8e4507bc9f37831e2a0318a9b333c85c"}, + {file = "propcache-0.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2b975528998de037dfbc10144b8aed9b8dd5a99ec547f14d1cb7c5665a43f075"}, + {file = "propcache-0.3.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:19d36bb351ad5554ff20f2ae75f88ce205b0748c38b146c75628577020351e3c"}, + {file = "propcache-0.3.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6032231d4a5abd67c7f71168fd64a47b6b451fbcb91c8397c2f7610e67683810"}, + {file = "propcache-0.3.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6985a593417cdbc94c7f9c3403747335e450c1599da1647a5af76539672464d3"}, + {file = "propcache-0.3.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:6a1948df1bb1d56b5e7b0553c0fa04fd0e320997ae99689488201f19fa90d2e7"}, + {file = "propcache-0.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8319293e85feadbbfe2150a5659dbc2ebc4afdeaf7d98936fb9a2f2ba0d4c35c"}, + {file = "propcache-0.3.0-cp311-cp311-win32.whl", hash = "sha256:63f26258a163c34542c24808f03d734b338da66ba91f410a703e505c8485791d"}, + {file = "propcache-0.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:cacea77ef7a2195f04f9279297684955e3d1ae4241092ff0cfcef532bb7a1c32"}, + {file = "propcache-0.3.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e53d19c2bf7d0d1e6998a7e693c7e87300dd971808e6618964621ccd0e01fe4e"}, + {file = "propcache-0.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a61a68d630e812b67b5bf097ab84e2cd79b48c792857dc10ba8a223f5b06a2af"}, + {file = "propcache-0.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fb91d20fa2d3b13deea98a690534697742029f4fb83673a3501ae6e3746508b5"}, + {file = "propcache-0.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67054e47c01b7b349b94ed0840ccae075449503cf1fdd0a1fdd98ab5ddc2667b"}, + {file = "propcache-0.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:997e7b8f173a391987df40f3b52c423e5850be6f6df0dcfb5376365440b56667"}, + {file = "propcache-0.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d663fd71491dde7dfdfc899d13a067a94198e90695b4321084c6e450743b8c7"}, + {file = "propcache-0.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8884ba1a0fe7210b775106b25850f5e5a9dc3c840d1ae9924ee6ea2eb3acbfe7"}, + {file = "propcache-0.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa806bbc13eac1ab6291ed21ecd2dd426063ca5417dd507e6be58de20e58dfcf"}, + {file = "propcache-0.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6f4d7a7c0aff92e8354cceca6fe223973ddf08401047920df0fcb24be2bd5138"}, + {file = "propcache-0.3.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:9be90eebc9842a93ef8335291f57b3b7488ac24f70df96a6034a13cb58e6ff86"}, + {file = "propcache-0.3.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:bf15fc0b45914d9d1b706f7c9c4f66f2b7b053e9517e40123e137e8ca8958b3d"}, + {file = "propcache-0.3.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5a16167118677d94bb48bfcd91e420088854eb0737b76ec374b91498fb77a70e"}, + {file = "propcache-0.3.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:41de3da5458edd5678b0f6ff66691507f9885f5fe6a0fb99a5d10d10c0fd2d64"}, + {file = "propcache-0.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:728af36011bb5d344c4fe4af79cfe186729efb649d2f8b395d1572fb088a996c"}, + {file = "propcache-0.3.0-cp312-cp312-win32.whl", hash = "sha256:6b5b7fd6ee7b54e01759f2044f936dcf7dea6e7585f35490f7ca0420fe723c0d"}, + {file = "propcache-0.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:2d15bc27163cd4df433e75f546b9ac31c1ba7b0b128bfb1b90df19082466ff57"}, + {file = "propcache-0.3.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a2b9bf8c79b660d0ca1ad95e587818c30ccdb11f787657458d6f26a1ea18c568"}, + {file = "propcache-0.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b0c1a133d42c6fc1f5fbcf5c91331657a1ff822e87989bf4a6e2e39b818d0ee9"}, + {file = "propcache-0.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bb2f144c6d98bb5cbc94adeb0447cfd4c0f991341baa68eee3f3b0c9c0e83767"}, + {file = "propcache-0.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1323cd04d6e92150bcc79d0174ce347ed4b349d748b9358fd2e497b121e03c8"}, + {file = "propcache-0.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b812b3cb6caacd072276ac0492d249f210006c57726b6484a1e1805b3cfeea0"}, + {file = "propcache-0.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:742840d1d0438eb7ea4280f3347598f507a199a35a08294afdcc560c3739989d"}, + {file = "propcache-0.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c6e7e4f9167fddc438cd653d826f2222222564daed4116a02a184b464d3ef05"}, + {file = "propcache-0.3.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a94ffc66738da99232ddffcf7910e0f69e2bbe3a0802e54426dbf0714e1c2ffe"}, + {file = "propcache-0.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3c6ec957025bf32b15cbc6b67afe233c65b30005e4c55fe5768e4bb518d712f1"}, + {file = "propcache-0.3.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:549722908de62aa0b47a78b90531c022fa6e139f9166be634f667ff45632cc92"}, + {file = "propcache-0.3.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5d62c4f6706bff5d8a52fd51fec6069bef69e7202ed481486c0bc3874912c787"}, + {file = "propcache-0.3.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:24c04f8fbf60094c531667b8207acbae54146661657a1b1be6d3ca7773b7a545"}, + {file = "propcache-0.3.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:7c5f5290799a3f6539cc5e6f474c3e5c5fbeba74a5e1e5be75587746a940d51e"}, + {file = "propcache-0.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4fa0e7c9c3cf7c276d4f6ab9af8adddc127d04e0fcabede315904d2ff76db626"}, + {file = "propcache-0.3.0-cp313-cp313-win32.whl", hash = "sha256:ee0bd3a7b2e184e88d25c9baa6a9dc609ba25b76daae942edfb14499ac7ec374"}, + {file = "propcache-0.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:1c8f7d896a16da9455f882870a507567d4f58c53504dc2d4b1e1d386dfe4588a"}, + {file = "propcache-0.3.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:e560fd75aaf3e5693b91bcaddd8b314f4d57e99aef8a6c6dc692f935cc1e6bbf"}, + {file = "propcache-0.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:65a37714b8ad9aba5780325228598a5b16c47ba0f8aeb3dc0514701e4413d7c0"}, + {file = "propcache-0.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:07700939b2cbd67bfb3b76a12e1412405d71019df00ca5697ce75e5ef789d829"}, + {file = "propcache-0.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c0fdbdf6983526e269e5a8d53b7ae3622dd6998468821d660d0daf72779aefa"}, + {file = "propcache-0.3.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:794c3dd744fad478b6232289c866c25406ecdfc47e294618bdf1697e69bd64a6"}, + {file = "propcache-0.3.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4544699674faf66fb6b4473a1518ae4999c1b614f0b8297b1cef96bac25381db"}, + {file = "propcache-0.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fddb8870bdb83456a489ab67c6b3040a8d5a55069aa6f72f9d872235fbc52f54"}, + {file = "propcache-0.3.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f857034dc68d5ceb30fb60afb6ff2103087aea10a01b613985610e007053a121"}, + {file = "propcache-0.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:02df07041e0820cacc8f739510078f2aadcfd3fc57eaeeb16d5ded85c872c89e"}, + {file = "propcache-0.3.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f47d52fd9b2ac418c4890aad2f6d21a6b96183c98021f0a48497a904199f006e"}, + {file = "propcache-0.3.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:9ff4e9ecb6e4b363430edf2c6e50173a63e0820e549918adef70515f87ced19a"}, + {file = "propcache-0.3.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:ecc2920630283e0783c22e2ac94427f8cca29a04cfdf331467d4f661f4072dac"}, + {file = "propcache-0.3.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:c441c841e82c5ba7a85ad25986014be8d7849c3cfbdb6004541873505929a74e"}, + {file = "propcache-0.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6c929916cbdb540d3407c66f19f73387f43e7c12fa318a66f64ac99da601bcdf"}, + {file = "propcache-0.3.0-cp313-cp313t-win32.whl", hash = "sha256:0c3e893c4464ebd751b44ae76c12c5f5c1e4f6cbd6fbf67e3783cd93ad221863"}, + {file = "propcache-0.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:75e872573220d1ee2305b35c9813626e620768248425f58798413e9c39741f46"}, + {file = "propcache-0.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:03c091bb752349402f23ee43bb2bff6bd80ccab7c9df6b88ad4322258d6960fc"}, + {file = "propcache-0.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:46ed02532cb66612d42ae5c3929b5e98ae330ea0f3900bc66ec5f4862069519b"}, + {file = "propcache-0.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:11ae6a8a01b8a4dc79093b5d3ca2c8a4436f5ee251a9840d7790dccbd96cb649"}, + {file = "propcache-0.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df03cd88f95b1b99052b52b1bb92173229d7a674df0ab06d2b25765ee8404bce"}, + {file = "propcache-0.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:03acd9ff19021bd0567582ac88f821b66883e158274183b9e5586f678984f8fe"}, + {file = "propcache-0.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd54895e4ae7d32f1e3dd91261df46ee7483a735017dc6f987904f194aa5fd14"}, + {file = "propcache-0.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26a67e5c04e3119594d8cfae517f4b9330c395df07ea65eab16f3d559b7068fe"}, + {file = "propcache-0.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee25f1ac091def37c4b59d192bbe3a206298feeb89132a470325bf76ad122a1e"}, + {file = "propcache-0.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:58e6d2a5a7cb3e5f166fd58e71e9a4ff504be9dc61b88167e75f835da5764d07"}, + {file = "propcache-0.3.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:be90c94570840939fecedf99fa72839aed70b0ced449b415c85e01ae67422c90"}, + {file = "propcache-0.3.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:49ea05212a529c2caffe411e25a59308b07d6e10bf2505d77da72891f9a05641"}, + {file = "propcache-0.3.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:119e244ab40f70a98c91906d4c1f4c5f2e68bd0b14e7ab0a06922038fae8a20f"}, + {file = "propcache-0.3.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:507c5357a8d8b4593b97fb669c50598f4e6cccbbf77e22fa9598aba78292b4d7"}, + {file = "propcache-0.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8526b0941ec5a40220fc4dfde76aed58808e2b309c03e9fa8e2260083ef7157f"}, + {file = "propcache-0.3.0-cp39-cp39-win32.whl", hash = "sha256:7cedd25e5f678f7738da38037435b340694ab34d424938041aa630d8bac42663"}, + {file = "propcache-0.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:bf4298f366ca7e1ad1d21bbb58300a6985015909964077afd37559084590c929"}, + {file = "propcache-0.3.0-py3-none-any.whl", hash = "sha256:67dda3c7325691c2081510e92c561f465ba61b975f481735aefdfc845d2cd043"}, + {file = "propcache-0.3.0.tar.gz", hash = "sha256:a8fd93de4e1d278046345f49e2238cdb298589325849b2645d4a94c53faeffc5"}, +] + +[[package]] +name = "psutil" +version = "7.0.0" +description = "Cross-platform lib for process and system monitoring in Python. NOTE: the syntax of this script MUST be kept compatible with Python 2.7." +optional = false +python-versions = ">=3.6" +groups = ["dev"] +markers = "sys_platform != \"cygwin\"" +files = [ + {file = "psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25"}, + {file = "psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da"}, + {file = "psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91"}, + {file = "psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34"}, + {file = "psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993"}, + {file = "psutil-7.0.0-cp36-cp36m-win32.whl", hash = "sha256:84df4eb63e16849689f76b1ffcb36db7b8de703d1bc1fe41773db487621b6c17"}, + {file = "psutil-7.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:1e744154a6580bc968a0195fd25e80432d3afec619daf145b9e5ba16cc1d688e"}, + {file = "psutil-7.0.0-cp37-abi3-win32.whl", hash = "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99"}, + {file = "psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553"}, + {file = "psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456"}, +] + +[package.extras] +dev = ["abi3audit", "black (==24.10.0)", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest", "pytest-cov", "pytest-xdist", "requests", "rstcheck", "ruff", "setuptools", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "vulture", "wheel"] +test = ["pytest", "pytest-xdist", "setuptools"] + +[[package]] +name = "psycopg" +version = "3.2.5" +description = "PostgreSQL database adapter for Python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "protobuf-5.29.3-cp310-abi3-win32.whl", hash = "sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888"}, - {file = "protobuf-5.29.3-cp310-abi3-win_amd64.whl", hash = "sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a"}, - {file = "protobuf-5.29.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a8434404bbf139aa9e1300dbf989667a83d42ddda9153d8ab76e0d5dcaca484e"}, - {file = "protobuf-5.29.3-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:daaf63f70f25e8689c072cfad4334ca0ac1d1e05a92fc15c54eb9cf23c3efd84"}, - {file = "protobuf-5.29.3-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:c027e08a08be10b67c06bf2370b99c811c466398c357e615ca88c91c07f0910f"}, - {file = "protobuf-5.29.3-cp38-cp38-win32.whl", hash = "sha256:84a57163a0ccef3f96e4b6a20516cedcf5bb3a95a657131c5c3ac62200d23252"}, - {file = "protobuf-5.29.3-cp38-cp38-win_amd64.whl", hash = "sha256:b89c115d877892a512f79a8114564fb435943b59067615894c3b13cd3e1fa107"}, - {file = "protobuf-5.29.3-cp39-cp39-win32.whl", hash = "sha256:0eb32bfa5219fc8d4111803e9a690658aa2e6366384fd0851064b963b6d1f2a7"}, - {file = "protobuf-5.29.3-cp39-cp39-win_amd64.whl", hash = "sha256:6ce8cc3389a20693bfde6c6562e03474c40851b44975c9b2bf6df7d8c4f864da"}, - {file = "protobuf-5.29.3-py3-none-any.whl", hash = "sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f"}, - {file = "protobuf-5.29.3.tar.gz", hash = "sha256:5da0f41edaf117bde316404bad1a486cb4ededf8e4a54891296f648e8e076620"}, + {file = "psycopg-3.2.5-py3-none-any.whl", hash = "sha256:b782130983e5b3de30b4c529623d3687033b4dafa05bb661fc6bf45837ca5879"}, + {file = "psycopg-3.2.5.tar.gz", hash = "sha256:f5f750611c67cb200e85b408882f29265c66d1de7f813add4f8125978bfd70e8"}, ] +[package.dependencies] +"backports.zoneinfo" = {version = ">=0.2.0", markers = "python_version < \"3.9\""} +typing-extensions = {version = ">=4.6", markers = "python_version < \"3.13\""} +tzdata = {version = "*", markers = "sys_platform == \"win32\""} + +[package.extras] +binary = ["psycopg-binary (==3.2.5) ; implementation_name != \"pypy\""] +c = ["psycopg-c (==3.2.5) ; implementation_name != \"pypy\""] +dev = ["ast-comments (>=1.1.2)", "black (>=24.1.0)", "codespell (>=2.2)", "dnspython (>=2.1)", "flake8 (>=4.0)", "isort-psycopg", "isort[colors] (>=6.0)", "mypy (>=1.14)", "pre-commit (>=4.0.1)", "types-setuptools (>=57.4)", "wheel (>=0.37)"] +docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)"] +pool = ["psycopg-pool"] +test = ["anyio (>=4.0)", "mypy (>=1.14)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"] + [[package]] name = "psycopg2-binary" version = "2.9.9" description = "psycopg2 - Python-PostgreSQL Database Adapter" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "psycopg2-binary-2.9.9.tar.gz", hash = "sha256:7f01846810177d829c7692f1f5ada8096762d9172af1b1a28d4ab5b77c923c1c"}, {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c2470da5418b76232f02a2fcd2229537bb2d5a7096674ce61859c3229f2eb202"}, @@ -2656,33 +3237,13 @@ files = [ {file = "psycopg2_binary-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:f7ae5d65ccfbebdfa761585228eb4d0df3a8b15cfb53bd953e713e09fbb12957"}, ] -[[package]] -name = "py-ecc" -version = "7.0.1" -description = "py-ecc: Elliptic curve crypto in python including secp256k1, alt_bn128, and bls12_381" -optional = false -python-versions = "<4,>=3.8" -files = [ - {file = "py_ecc-7.0.1-py3-none-any.whl", hash = "sha256:84a8b4d436163c83c65345a68e32f921ef6e64374a36f8e561f0455b4b08f5f2"}, - {file = "py_ecc-7.0.1.tar.gz", hash = "sha256:557461f42e57294d734305a30faf6b8903421651871e9cdeff8d8e67c6796c70"}, -] - -[package.dependencies] -cached-property = ">=1.5.1" -eth-typing = ">=3.0.0" -eth-utils = ">=2.0.0" - -[package.extras] -dev = ["build (>=0.9.0)", "bumpversion (>=0.5.3)", "ipython", "pre-commit (>=3.4.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)", "sphinx (>=6.0.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)", "tox (>=4.0.0)", "twine", "wheel"] -docs = ["sphinx (>=6.0.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)"] -test = ["pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"] - [[package]] name = "pycodestyle" version = "2.7.0" description = "Python style guide checker" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["dev"] files = [ {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, @@ -2694,6 +3255,7 @@ version = "3.21.0" description = "Cryptographic library for Python" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +groups = ["main"] files = [ {file = "pycryptodome-3.21.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:dad9bf36eda068e89059d1f07408e397856be9511d7113ea4b586642a429a4fd"}, {file = "pycryptodome-3.21.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:a1752eca64c60852f38bb29e2c86fca30d7672c024128ef5d70cc15868fa10f4"}, @@ -2729,12 +3291,168 @@ files = [ {file = "pycryptodome-3.21.0.tar.gz", hash = "sha256:f7787e0d469bdae763b876174cf2e6c0f7be79808af26b1da96f1a64bcf47297"}, ] +[[package]] +name = "pydantic" +version = "2.10.6" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, + {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +pydantic-core = "2.27.2" +typing-extensions = ">=4.12.2" + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] + +[[package]] +name = "pydantic-core" +version = "2.27.2" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"}, + {file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pydantic-settings" +version = "2.8.1" +description = "Settings management using Pydantic" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pydantic_settings-2.8.1-py3-none-any.whl", hash = "sha256:81942d5ac3d905f7f3ee1a70df5dfb62d5569c12f51a5a647defc1c3d9ee2e9c"}, + {file = "pydantic_settings-2.8.1.tar.gz", hash = "sha256:d5c663dfbe9db9d5e1c646b2e161da12f0d734d422ee56f567d0ea2cee4e8585"}, +] + +[package.dependencies] +pydantic = ">=2.7.0" +python-dotenv = ">=0.21.0" + +[package.extras] +azure-key-vault = ["azure-identity (>=1.16.0)", "azure-keyvault-secrets (>=4.8.0)"] +toml = ["tomli (>=2.0.1)"] +yaml = ["pyyaml (>=6.0.1)"] + [[package]] name = "pyflakes" version = "2.3.1" description = "passive checker of Python programs" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["dev"] files = [ {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, @@ -2742,27 +3460,67 @@ files = [ [[package]] name = "pygments" -version = "2.19.1" +version = "2.13.0" description = "Pygments is a syntax highlighting package written in Python." optional = false +python-versions = ">=3.6" +groups = ["main", "dev"] +files = [ + {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, + {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, +] + +[package.extras] +plugins = ["importlib-metadata ; python_version < \"3.8\""] + +[[package]] +name = "pyjwt" +version = "2.9.0" +description = "JSON Web Token implementation in Python" +optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version < \"3.10\"" +files = [ + {file = "PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850"}, + {file = "pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c"}, +] + +[package.extras] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] + +[[package]] +name = "pyjwt" +version = "2.10.1" +description = "JSON Web Token implementation in Python" +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version >= \"3.10\"" files = [ - {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, - {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, + {file = "PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"}, + {file = "pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953"}, ] [package.extras] -windows-terminal = ["colorama (>=0.4.6)"] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pytest" -version = "8.3.4" +version = "8.3.5" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, - {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, + {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"}, + {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"}, ] [package.dependencies] @@ -2782,6 +3540,8 @@ version = "5.0.0" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version < \"3.10\"" files = [ {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, @@ -2794,12 +3554,73 @@ pytest = ">=4.6" [package.extras] testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] +[[package]] +name = "pytest-cov" +version = "6.0.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +markers = "python_version >= \"3.10\"" +files = [ + {file = "pytest-cov-6.0.0.tar.gz", hash = "sha256:fde0b595ca248bb8e2d76f020b465f3b107c9632e6a1d1705f17834c89dcadc0"}, + {file = "pytest_cov-6.0.0-py3-none-any.whl", hash = "sha256:eee6f1b9e61008bd34975a4d5bab25801eb31898b032dd55addc93e96fcaaa35"}, +] + +[package.dependencies] +coverage = {version = ">=7.5", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] + +[[package]] +name = "pytest-postgresql" +version = "6.1.1" +description = "Postgresql fixtures and fixture factories for Pytest." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version < \"3.10\"" +files = [ + {file = "pytest_postgresql-6.1.1-py3-none-any.whl", hash = "sha256:bd4c0970d25685ac3d34d42263fcbfbf134bf02d22519fce7e1ccf4122d8b99a"}, + {file = "pytest_postgresql-6.1.1.tar.gz", hash = "sha256:f996637367e6aecebba1349da52eea95340bdb434c90e4b79739e62c656056e2"}, +] + +[package.dependencies] +mirakuru = "*" +port-for = ">=0.7.3" +psycopg = ">=3.0.0" +pytest = ">=6.2" +setuptools = "*" + +[[package]] +name = "pytest-postgresql" +version = "7.0.0" +description = "Postgresql fixtures and fixture factories for Pytest." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +markers = "python_version >= \"3.10\"" +files = [ + {file = "pytest_postgresql-7.0.0-py3-none-any.whl", hash = "sha256:aaebadbf060b85cca7755fdf5ed7aa2929edd0f842c9b7f56ffe1e58e0d3b749"}, + {file = "pytest_postgresql-7.0.0.tar.gz", hash = "sha256:cf0016cee5d9ac06f50cfc61bb0597d1fa90780d77c4453bc18e4930cae04aaa"}, +] + +[package.dependencies] +mirakuru = ">=2.6.0" +packaging = "*" +port-for = ">=0.7.3" +psycopg = ">=3.0.0" +pytest = ">=6.2" + [[package]] name = "python-dateutil" version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, @@ -2808,12 +3629,40 @@ files = [ [package.dependencies] six = ">=1.5" +[[package]] +name = "python-dotenv" +version = "1.0.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "python-multipart" +version = "0.0.20" +description = "A streaming multipart parser for Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104"}, + {file = "python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13"}, +] + [[package]] name = "pytz" version = "2025.1" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "pytz-2025.1-py2.py3-none-any.whl", hash = "sha256:89dd22dca55b46eac6eda23b2d72721bf1bdfef212645d81513ef5d03038de57"}, {file = "pytz-2025.1.tar.gz", hash = "sha256:c2db42be2a2518b28e65f9207c4d05e6ff547d1efa4086469ef855e4ab70178e"}, @@ -2825,6 +3674,7 @@ version = "16.0.0" description = "Unicode normalization forms (NFC, NFKC, NFD, NFKD). A library independent of the Python core Unicode database." optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "pyunormalize-16.0.0-py3-none-any.whl", hash = "sha256:c647d95e5d1e2ea9a2f448d1d95d8518348df24eab5c3fd32d2b5c3300a49152"}, {file = "pyunormalize-16.0.0.tar.gz", hash = "sha256:2e1dfbb4a118154ae26f70710426a52a364b926c9191f764601f5a8cb12761f7"}, @@ -2836,6 +3686,8 @@ version = "308" description = "Python for Window Extensions" optional = false python-versions = "*" +groups = ["main"] +markers = "platform_system == \"Windows\"" files = [ {file = "pywin32-308-cp310-cp310-win32.whl", hash = "sha256:796ff4426437896550d2981b9c2ac0ffd75238ad9ea2d3bfa67a1abd546d262e"}, {file = "pywin32-308-cp310-cp310-win_amd64.whl", hash = "sha256:4fc888c59b3c0bef905ce7eb7e2106a07712015ea1c8234b703a088d46110e8e"}, @@ -2863,6 +3715,7 @@ version = "6.0.2" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, @@ -2925,6 +3778,7 @@ version = "4.6.0" description = "Python client for Redis database and key-value store" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "redis-4.6.0-py3-none-any.whl", hash = "sha256:e2b03db868160ee4591de3cb90d40ebb50a90dd302138775937f6a42b7ed183c"}, {file = "redis-4.6.0.tar.gz", hash = "sha256:585dc516b9eb042a619ef0a39c3d7d55fe81bdb4df09a52c9cdde0d07bf1aa7d"}, @@ -2937,27 +3791,13 @@ async-timeout = {version = ">=4.0.2", markers = "python_full_version <= \"3.11.2 hiredis = ["hiredis (>=1.0.0)"] ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] -[[package]] -name = "referencing" -version = "0.35.1" -description = "JSON Referencing + Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de"}, - {file = "referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c"}, -] - -[package.dependencies] -attrs = ">=22.2.0" -rpds-py = ">=0.7.0" - [[package]] name = "regex" version = "2024.11.6" description = "Alternative regular expression module, to replace re." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff590880083d60acc0433f9c3f713c51f7ac6ebb9adf889c79a261ecf541aa91"}, {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:658f90550f38270639e83ce492f27d2c8d2cd63805c65a13a14d36ca126753f0"}, @@ -3061,6 +3901,7 @@ version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, @@ -3082,6 +3923,7 @@ version = "13.9.4" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.8.0" +groups = ["main"] files = [ {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, @@ -3095,12 +3937,30 @@ typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.1 [package.extras] jupyter = ["ipywidgets (>=7.5.1,<9)"] +[[package]] +name = "rich-toolkit" +version = "0.13.2" +description = "Rich toolkit for building command-line applications" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "rich_toolkit-0.13.2-py3-none-any.whl", hash = "sha256:f3f6c583e5283298a2f7dbd3c65aca18b7f818ad96174113ab5bec0b0e35ed61"}, + {file = "rich_toolkit-0.13.2.tar.gz", hash = "sha256:fea92557530de7c28f121cbed572ad93d9e0ddc60c3ca643f1b831f2f56b95d3"}, +] + +[package.dependencies] +click = ">=8.1.7" +rich = ">=13.7.1" +typing-extensions = ">=4.12.2" + [[package]] name = "rlp" version = "4.1.0" description = "rlp: A package for Recursive Length Prefix encoding and decoding" optional = false python-versions = "<4,>=3.8" +groups = ["main"] files = [ {file = "rlp-4.1.0-py3-none-any.whl", hash = "sha256:8eca394c579bad34ee0b937aecb96a57052ff3716e19c7a578883e767bc5da6f"}, {file = "rlp-4.1.0.tar.gz", hash = "sha256:be07564270a96f3e225e2c107db263de96b5bc1f27722d2855bd3459a08e95a9"}, @@ -3115,124 +3975,13 @@ docs = ["sphinx (>=6.0.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx_rtd_theme rust-backend = ["rusty-rlp (>=0.2.1)"] test = ["hypothesis (>=6.22.0,<6.108.7)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"] -[[package]] -name = "rpds-py" -version = "0.20.1" -description = "Python bindings to Rust's persistent data structures (rpds)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "rpds_py-0.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:a649dfd735fff086e8a9d0503a9f0c7d01b7912a333c7ae77e1515c08c146dad"}, - {file = "rpds_py-0.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f16bc1334853e91ddaaa1217045dd7be166170beec337576818461268a3de67f"}, - {file = "rpds_py-0.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14511a539afee6f9ab492b543060c7491c99924314977a55c98bfa2ee29ce78c"}, - {file = "rpds_py-0.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3ccb8ac2d3c71cda472b75af42818981bdacf48d2e21c36331b50b4f16930163"}, - {file = "rpds_py-0.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c142b88039b92e7e0cb2552e8967077e3179b22359e945574f5e2764c3953dcf"}, - {file = "rpds_py-0.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f19169781dddae7478a32301b499b2858bc52fc45a112955e798ee307e294977"}, - {file = "rpds_py-0.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13c56de6518e14b9bf6edde23c4c39dac5b48dcf04160ea7bce8fca8397cdf86"}, - {file = "rpds_py-0.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:925d176a549f4832c6f69fa6026071294ab5910e82a0fe6c6228fce17b0706bd"}, - {file = "rpds_py-0.20.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:78f0b6877bfce7a3d1ff150391354a410c55d3cdce386f862926a4958ad5ab7e"}, - {file = "rpds_py-0.20.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3dd645e2b0dcb0fd05bf58e2e54c13875847687d0b71941ad2e757e5d89d4356"}, - {file = "rpds_py-0.20.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4f676e21db2f8c72ff0936f895271e7a700aa1f8d31b40e4e43442ba94973899"}, - {file = "rpds_py-0.20.1-cp310-none-win32.whl", hash = "sha256:648386ddd1e19b4a6abab69139b002bc49ebf065b596119f8f37c38e9ecee8ff"}, - {file = "rpds_py-0.20.1-cp310-none-win_amd64.whl", hash = "sha256:d9ecb51120de61e4604650666d1f2b68444d46ae18fd492245a08f53ad2b7711"}, - {file = "rpds_py-0.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:762703bdd2b30983c1d9e62b4c88664df4a8a4d5ec0e9253b0231171f18f6d75"}, - {file = "rpds_py-0.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0b581f47257a9fce535c4567782a8976002d6b8afa2c39ff616edf87cbeff712"}, - {file = "rpds_py-0.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:842c19a6ce894493563c3bd00d81d5100e8e57d70209e84d5491940fdb8b9e3a"}, - {file = "rpds_py-0.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42cbde7789f5c0bcd6816cb29808e36c01b960fb5d29f11e052215aa85497c93"}, - {file = "rpds_py-0.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c8e9340ce5a52f95fa7d3b552b35c7e8f3874d74a03a8a69279fd5fca5dc751"}, - {file = "rpds_py-0.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ba6f89cac95c0900d932c9efb7f0fb6ca47f6687feec41abcb1bd5e2bd45535"}, - {file = "rpds_py-0.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a916087371afd9648e1962e67403c53f9c49ca47b9680adbeef79da3a7811b0"}, - {file = "rpds_py-0.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:200a23239781f46149e6a415f1e870c5ef1e712939fe8fa63035cd053ac2638e"}, - {file = "rpds_py-0.20.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:58b1d5dd591973d426cbb2da5e27ba0339209832b2f3315928c9790e13f159e8"}, - {file = "rpds_py-0.20.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6b73c67850ca7cae0f6c56f71e356d7e9fa25958d3e18a64927c2d930859b8e4"}, - {file = "rpds_py-0.20.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d8761c3c891cc51e90bc9926d6d2f59b27beaf86c74622c8979380a29cc23ac3"}, - {file = "rpds_py-0.20.1-cp311-none-win32.whl", hash = "sha256:cd945871335a639275eee904caef90041568ce3b42f402c6959b460d25ae8732"}, - {file = "rpds_py-0.20.1-cp311-none-win_amd64.whl", hash = "sha256:7e21b7031e17c6b0e445f42ccc77f79a97e2687023c5746bfb7a9e45e0921b84"}, - {file = "rpds_py-0.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:36785be22066966a27348444b40389f8444671630063edfb1a2eb04318721e17"}, - {file = "rpds_py-0.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:142c0a5124d9bd0e2976089484af5c74f47bd3298f2ed651ef54ea728d2ea42c"}, - {file = "rpds_py-0.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbddc10776ca7ebf2a299c41a4dde8ea0d8e3547bfd731cb87af2e8f5bf8962d"}, - {file = "rpds_py-0.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:15a842bb369e00295392e7ce192de9dcbf136954614124a667f9f9f17d6a216f"}, - {file = "rpds_py-0.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be5ef2f1fc586a7372bfc355986226484e06d1dc4f9402539872c8bb99e34b01"}, - {file = "rpds_py-0.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbcf360c9e3399b056a238523146ea77eeb2a596ce263b8814c900263e46031a"}, - {file = "rpds_py-0.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecd27a66740ffd621d20b9a2f2b5ee4129a56e27bfb9458a3bcc2e45794c96cb"}, - {file = "rpds_py-0.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0b937b2a1988f184a3e9e577adaa8aede21ec0b38320d6009e02bd026db04fa"}, - {file = "rpds_py-0.20.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6889469bfdc1eddf489729b471303739bf04555bb151fe8875931f8564309afc"}, - {file = "rpds_py-0.20.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:19b73643c802f4eaf13d97f7855d0fb527fbc92ab7013c4ad0e13a6ae0ed23bd"}, - {file = "rpds_py-0.20.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3c6afcf2338e7f374e8edc765c79fbcb4061d02b15dd5f8f314a4af2bdc7feb5"}, - {file = "rpds_py-0.20.1-cp312-none-win32.whl", hash = "sha256:dc73505153798c6f74854aba69cc75953888cf9866465196889c7cdd351e720c"}, - {file = "rpds_py-0.20.1-cp312-none-win_amd64.whl", hash = "sha256:8bbe951244a838a51289ee53a6bae3a07f26d4e179b96fc7ddd3301caf0518eb"}, - {file = "rpds_py-0.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:6ca91093a4a8da4afae7fe6a222c3b53ee4eef433ebfee4d54978a103435159e"}, - {file = "rpds_py-0.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b9c2fe36d1f758b28121bef29ed1dee9b7a2453e997528e7d1ac99b94892527c"}, - {file = "rpds_py-0.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f009c69bc8c53db5dfab72ac760895dc1f2bc1b62ab7408b253c8d1ec52459fc"}, - {file = "rpds_py-0.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6740a3e8d43a32629bb9b009017ea5b9e713b7210ba48ac8d4cb6d99d86c8ee8"}, - {file = "rpds_py-0.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:32b922e13d4c0080d03e7b62991ad7f5007d9cd74e239c4b16bc85ae8b70252d"}, - {file = "rpds_py-0.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe00a9057d100e69b4ae4a094203a708d65b0f345ed546fdef86498bf5390982"}, - {file = "rpds_py-0.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49fe9b04b6fa685bd39237d45fad89ba19e9163a1ccaa16611a812e682913496"}, - {file = "rpds_py-0.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aa7ac11e294304e615b43f8c441fee5d40094275ed7311f3420d805fde9b07b4"}, - {file = "rpds_py-0.20.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6aa97af1558a9bef4025f8f5d8c60d712e0a3b13a2fe875511defc6ee77a1ab7"}, - {file = "rpds_py-0.20.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:483b29f6f7ffa6af845107d4efe2e3fa8fb2693de8657bc1849f674296ff6a5a"}, - {file = "rpds_py-0.20.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:37fe0f12aebb6a0e3e17bb4cd356b1286d2d18d2e93b2d39fe647138458b4bcb"}, - {file = "rpds_py-0.20.1-cp313-none-win32.whl", hash = "sha256:a624cc00ef2158e04188df5e3016385b9353638139a06fb77057b3498f794782"}, - {file = "rpds_py-0.20.1-cp313-none-win_amd64.whl", hash = "sha256:b71b8666eeea69d6363248822078c075bac6ed135faa9216aa85f295ff009b1e"}, - {file = "rpds_py-0.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:5b48e790e0355865197ad0aca8cde3d8ede347831e1959e158369eb3493d2191"}, - {file = "rpds_py-0.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3e310838a5801795207c66c73ea903deda321e6146d6f282e85fa7e3e4854804"}, - {file = "rpds_py-0.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2249280b870e6a42c0d972339e9cc22ee98730a99cd7f2f727549af80dd5a963"}, - {file = "rpds_py-0.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e79059d67bea28b53d255c1437b25391653263f0e69cd7dec170d778fdbca95e"}, - {file = "rpds_py-0.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b431c777c9653e569986ecf69ff4a5dba281cded16043d348bf9ba505486f36"}, - {file = "rpds_py-0.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da584ff96ec95e97925174eb8237e32f626e7a1a97888cdd27ee2f1f24dd0ad8"}, - {file = "rpds_py-0.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02a0629ec053fc013808a85178524e3cb63a61dbc35b22499870194a63578fb9"}, - {file = "rpds_py-0.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fbf15aff64a163db29a91ed0868af181d6f68ec1a3a7d5afcfe4501252840bad"}, - {file = "rpds_py-0.20.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:07924c1b938798797d60c6308fa8ad3b3f0201802f82e4a2c41bb3fafb44cc28"}, - {file = "rpds_py-0.20.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:4a5a844f68776a7715ecb30843b453f07ac89bad393431efbf7accca3ef599c1"}, - {file = "rpds_py-0.20.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:518d2ca43c358929bf08f9079b617f1c2ca6e8848f83c1225c88caeac46e6cbc"}, - {file = "rpds_py-0.20.1-cp38-none-win32.whl", hash = "sha256:3aea7eed3e55119635a74bbeb80b35e776bafccb70d97e8ff838816c124539f1"}, - {file = "rpds_py-0.20.1-cp38-none-win_amd64.whl", hash = "sha256:7dca7081e9a0c3b6490a145593f6fe3173a94197f2cb9891183ef75e9d64c425"}, - {file = "rpds_py-0.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b41b6321805c472f66990c2849e152aff7bc359eb92f781e3f606609eac877ad"}, - {file = "rpds_py-0.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a90c373ea2975519b58dece25853dbcb9779b05cc46b4819cb1917e3b3215b6"}, - {file = "rpds_py-0.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16d4477bcb9fbbd7b5b0e4a5d9b493e42026c0bf1f06f723a9353f5153e75d30"}, - {file = "rpds_py-0.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:84b8382a90539910b53a6307f7c35697bc7e6ffb25d9c1d4e998a13e842a5e83"}, - {file = "rpds_py-0.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4888e117dd41b9d34194d9e31631af70d3d526efc363085e3089ab1a62c32ed1"}, - {file = "rpds_py-0.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5265505b3d61a0f56618c9b941dc54dc334dc6e660f1592d112cd103d914a6db"}, - {file = "rpds_py-0.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e75ba609dba23f2c95b776efb9dd3f0b78a76a151e96f96cc5b6b1b0004de66f"}, - {file = "rpds_py-0.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1791ff70bc975b098fe6ecf04356a10e9e2bd7dc21fa7351c1742fdeb9b4966f"}, - {file = "rpds_py-0.20.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d126b52e4a473d40232ec2052a8b232270ed1f8c9571aaf33f73a14cc298c24f"}, - {file = "rpds_py-0.20.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:c14937af98c4cc362a1d4374806204dd51b1e12dded1ae30645c298e5a5c4cb1"}, - {file = "rpds_py-0.20.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3d089d0b88996df627693639d123c8158cff41c0651f646cd8fd292c7da90eaf"}, - {file = "rpds_py-0.20.1-cp39-none-win32.whl", hash = "sha256:653647b8838cf83b2e7e6a0364f49af96deec64d2a6578324db58380cff82aca"}, - {file = "rpds_py-0.20.1-cp39-none-win_amd64.whl", hash = "sha256:fa41a64ac5b08b292906e248549ab48b69c5428f3987b09689ab2441f267d04d"}, - {file = "rpds_py-0.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a07ced2b22f0cf0b55a6a510078174c31b6d8544f3bc00c2bcee52b3d613f74"}, - {file = "rpds_py-0.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:68cb0a499f2c4a088fd2f521453e22ed3527154136a855c62e148b7883b99f9a"}, - {file = "rpds_py-0.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa3060d885657abc549b2a0f8e1b79699290e5d83845141717c6c90c2df38311"}, - {file = "rpds_py-0.20.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:95f3b65d2392e1c5cec27cff08fdc0080270d5a1a4b2ea1d51d5f4a2620ff08d"}, - {file = "rpds_py-0.20.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2cc3712a4b0b76a1d45a9302dd2f53ff339614b1c29603a911318f2357b04dd2"}, - {file = "rpds_py-0.20.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d4eea0761e37485c9b81400437adb11c40e13ef513375bbd6973e34100aeb06"}, - {file = "rpds_py-0.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f5179583d7a6cdb981151dd349786cbc318bab54963a192692d945dd3f6435d"}, - {file = "rpds_py-0.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2fbb0ffc754490aff6dabbf28064be47f0f9ca0b9755976f945214965b3ace7e"}, - {file = "rpds_py-0.20.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:a94e52537a0e0a85429eda9e49f272ada715506d3b2431f64b8a3e34eb5f3e75"}, - {file = "rpds_py-0.20.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:92b68b79c0da2a980b1c4197e56ac3dd0c8a149b4603747c4378914a68706979"}, - {file = "rpds_py-0.20.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:93da1d3db08a827eda74356f9f58884adb254e59b6664f64cc04cdff2cc19b0d"}, - {file = "rpds_py-0.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:754bbed1a4ca48479e9d4182a561d001bbf81543876cdded6f695ec3d465846b"}, - {file = "rpds_py-0.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ca449520e7484534a2a44faf629362cae62b660601432d04c482283c47eaebab"}, - {file = "rpds_py-0.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:9c4cb04a16b0f199a8c9bf807269b2f63b7b5b11425e4a6bd44bd6961d28282c"}, - {file = "rpds_py-0.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb63804105143c7e24cee7db89e37cb3f3941f8e80c4379a0b355c52a52b6780"}, - {file = "rpds_py-0.20.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:55cd1fa4ecfa6d9f14fbd97ac24803e6f73e897c738f771a9fe038f2f11ff07c"}, - {file = "rpds_py-0.20.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f8f741b6292c86059ed175d80eefa80997125b7c478fb8769fd9ac8943a16c0"}, - {file = "rpds_py-0.20.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fc212779bf8411667234b3cdd34d53de6c2b8b8b958e1e12cb473a5f367c338"}, - {file = "rpds_py-0.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ad56edabcdb428c2e33bbf24f255fe2b43253b7d13a2cdbf05de955217313e6"}, - {file = "rpds_py-0.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0a3a1e9ee9728b2c1734f65d6a1d376c6f2f6fdcc13bb007a08cc4b1ff576dc5"}, - {file = "rpds_py-0.20.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:e13de156137b7095442b288e72f33503a469aa1980ed856b43c353ac86390519"}, - {file = "rpds_py-0.20.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:07f59760ef99f31422c49038964b31c4dfcfeb5d2384ebfc71058a7c9adae2d2"}, - {file = "rpds_py-0.20.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:59240685e7da61fb78f65a9f07f8108e36a83317c53f7b276b4175dc44151684"}, - {file = "rpds_py-0.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:83cba698cfb3c2c5a7c3c6bac12fe6c6a51aae69513726be6411076185a8b24a"}, - {file = "rpds_py-0.20.1.tar.gz", hash = "sha256:e1791c4aabd117653530dccd24108fa03cc6baf21f58b950d0a73c3b3b29a350"}, -] - [[package]] name = "ruff" version = "0.0.235" description = "An extremely fast Python linter, written in Rust." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "ruff-0.0.235-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:50327fe28aa914c4b2e3d06c3e41f47bcfbd595843a26f5f7fda30ca5318755f"}, {file = "ruff-0.0.235-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:d29966029ff77a1c336004ff3e1effd33db8554ad9ec9f87ff339d0f3d44ae35"}, @@ -3252,23 +4001,120 @@ files = [ {file = "ruff-0.0.235.tar.gz", hash = "sha256:270c0c83c01d00370851813edfd1502f2146a0a0b4e75b723e0c388252840f5a"}, ] +[[package]] +name = "sentry-sdk" +version = "1.45.1" +description = "Python client for Sentry (https://sentry.io)" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "sentry_sdk-1.45.1-py2.py3-none-any.whl", hash = "sha256:608887855ccfe39032bfd03936e3a1c4f4fc99b3a4ac49ced54a4220de61c9c1"}, + {file = "sentry_sdk-1.45.1.tar.gz", hash = "sha256:a16c997c0f4e3df63c0fc5e4207ccb1ab37900433e0f72fef88315d317829a26"}, +] + +[package.dependencies] +certifi = "*" +fastapi = {version = ">=0.79.0", optional = true, markers = "extra == \"fastapi\""} +urllib3 = {version = ">=1.26.11", markers = "python_version >= \"3.6\""} + +[package.extras] +aiohttp = ["aiohttp (>=3.5)"] +arq = ["arq (>=0.23)"] +asyncpg = ["asyncpg (>=0.23)"] +beam = ["apache-beam (>=2.12)"] +bottle = ["bottle (>=0.12.13)"] +celery = ["celery (>=3)"] +celery-redbeat = ["celery-redbeat (>=2)"] +chalice = ["chalice (>=1.16.0)"] +clickhouse-driver = ["clickhouse-driver (>=0.2.0)"] +django = ["django (>=1.8)"] +falcon = ["falcon (>=1.4)"] +fastapi = ["fastapi (>=0.79.0)"] +flask = ["blinker (>=1.1)", "flask (>=0.11)", "markupsafe"] +grpcio = ["grpcio (>=1.21.1)"] +httpx = ["httpx (>=0.16.0)"] +huey = ["huey (>=2)"] +loguru = ["loguru (>=0.5)"] +openai = ["openai (>=1.0.0)", "tiktoken (>=0.3.0)"] +opentelemetry = ["opentelemetry-distro (>=0.35b0)"] +opentelemetry-experimental = ["opentelemetry-distro (>=0.40b0,<1.0)", "opentelemetry-instrumentation-aiohttp-client (>=0.40b0,<1.0)", "opentelemetry-instrumentation-django (>=0.40b0,<1.0)", "opentelemetry-instrumentation-fastapi (>=0.40b0,<1.0)", "opentelemetry-instrumentation-flask (>=0.40b0,<1.0)", "opentelemetry-instrumentation-requests (>=0.40b0,<1.0)", "opentelemetry-instrumentation-sqlite3 (>=0.40b0,<1.0)", "opentelemetry-instrumentation-urllib (>=0.40b0,<1.0)"] +pure-eval = ["asttokens", "executing", "pure-eval"] +pymongo = ["pymongo (>=3.1)"] +pyspark = ["pyspark (>=2.4.4)"] +quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] +rq = ["rq (>=0.6)"] +sanic = ["sanic (>=0.8)"] +sqlalchemy = ["sqlalchemy (>=1.2)"] +starlette = ["starlette (>=0.19.1)"] +starlite = ["starlite (>=1.48)"] +tornado = ["tornado (>=5)"] + +[[package]] +name = "setuptools" +version = "75.3.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version < \"3.10\"" +files = [ + {file = "setuptools-75.3.0-py3-none-any.whl", hash = "sha256:f2504966861356aa38616760c0f66568e535562374995367b4e69c7143cf6bcd"}, + {file = "setuptools-75.3.0.tar.gz", hash = "sha256:fba5dd4d766e97be1b1681d98712680ae8f2f26d7881245f2ce9e40714f1a686"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.5.2) ; sys_platform != \"cygwin\""] +core = ["importlib-metadata (>=6) ; python_version < \"3.10\"", "importlib-resources (>=5.10.2) ; python_version < \"3.9\"", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib-metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.12.*)", "pytest-mypy"] + +[[package]] +name = "shellingham" +version = "1.5.4" +description = "Tool to Detect Surrounding Shell" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, + {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, +] + [[package]] name = "six" version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + [[package]] name = "sortedcontainers" version = "2.4.0" description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, @@ -3280,6 +4126,7 @@ version = "2.0.31" description = "Database Abstraction Library" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "SQLAlchemy-2.0.31-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f2a213c1b699d3f5768a7272de720387ae0122f1becf0901ed6eaa1abd1baf6c"}, {file = "SQLAlchemy-2.0.31-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9fea3d0884e82d1e33226935dac990b967bef21315cbcc894605db3441347443"}, @@ -3361,12 +4208,68 @@ postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] pymysql = ["pymysql"] sqlcipher = ["sqlcipher3_binary"] +[[package]] +name = "sqlmodel" +version = "0.0.23" +description = "SQLModel, SQL databases in Python, designed for simplicity, compatibility, and robustness." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "sqlmodel-0.0.23-py3-none-any.whl", hash = "sha256:93810464b11810845920e0fedfabb31af2ccc2bb670637e9445da7ccee5f9f46"}, + {file = "sqlmodel-0.0.23.tar.gz", hash = "sha256:2b36be0d8b751c211864ec59c8c07c008078dbdeaf516f597eb61880a615197b"}, +] + +[package.dependencies] +pydantic = ">=1.10.13,<3.0.0" +SQLAlchemy = ">=2.0.14,<2.1.0" + +[[package]] +name = "starlette" +version = "0.44.0" +description = "The little ASGI library that shines." +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version < \"3.10\"" +files = [ + {file = "starlette-0.44.0-py3-none-any.whl", hash = "sha256:19edeb75844c16dcd4f9dd72f22f9108c1539f3fc9c4c88885654fef64f85aea"}, + {file = "starlette-0.44.0.tar.gz", hash = "sha256:e35166950a3ccccc701962fe0711db0bc14f2ecd37c6f9fe5e3eae0cbaea8715"}, +] + +[package.dependencies] +anyio = ">=3.4.0,<5" +typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} + +[package.extras] +full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.18)", "pyyaml"] + +[[package]] +name = "starlette" +version = "0.46.0" +description = "The little ASGI library that shines." +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version >= \"3.10\"" +files = [ + {file = "starlette-0.46.0-py3-none-any.whl", hash = "sha256:913f0798bd90ba90a9156383bcf1350a17d6259451d0d8ee27fc0cf2db609038"}, + {file = "starlette-0.46.0.tar.gz", hash = "sha256:b359e4567456b28d473d0193f34c0de0ed49710d75ef183a74a5ce0499324f50"}, +] + +[package.dependencies] +anyio = ">=3.6.2,<5" + +[package.extras] +full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.18)", "pyyaml"] + [[package]] name = "tabulate" version = "0.9.0" description = "Pretty-print tabular data" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, @@ -3381,6 +4284,7 @@ version = "9.0.0" description = "Retry code until it succeeds" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"}, {file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"}, @@ -3396,6 +4300,7 @@ version = "1.2.3" description = "A lil' TOML parser" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "tomli-1.2.3-py3-none-any.whl", hash = "sha256:e3069e4be3ead9668e21cb9b074cd948f7b3113fd9c8bba083f48247aab8b11c"}, {file = "tomli-1.2.3.tar.gz", hash = "sha256:05b6166bff487dc068d322585c7ea4ef78deed501cc124060e0f238e89a9231f"}, @@ -3407,6 +4312,8 @@ version = "1.0.0" description = "List processing tools and functional utilities" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "implementation_name == \"cpython\" or implementation_name == \"pypy\"" files = [ {file = "toolz-1.0.0-py3-none-any.whl", hash = "sha256:292c8f1c4e7516bf9086f8850935c799a874039c8bcf959d47b600e4c44a6236"}, {file = "toolz-1.0.0.tar.gz", hash = "sha256:2c86e3d9a04798ac556793bced838816296a2f085017664e4995cb40a1047a02"}, @@ -3418,6 +4325,7 @@ version = "4.67.1" description = "Fast, Extensible Progress Meter" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"}, {file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"}, @@ -3433,68 +4341,443 @@ notebook = ["ipywidgets (>=6)"] slack = ["slack-sdk"] telegram = ["requests"] +[[package]] +name = "typer" +version = "0.15.2" +description = "Typer, build great CLIs. Easy to code. Based on Python type hints." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "typer-0.15.2-py3-none-any.whl", hash = "sha256:46a499c6107d645a9c13f7ee46c5d5096cae6f5fc57dd11eccbbb9ae3e44ddfc"}, + {file = "typer-0.15.2.tar.gz", hash = "sha256:ab2fab47533a813c49fe1f16b1a370fd5819099c00b119e0633df65f22144ba5"}, +] + +[package.dependencies] +click = ">=8.0.0" +rich = ">=10.11.0" +shellingham = ">=1.3.0" +typing-extensions = ">=3.7.4.3" + +[[package]] +name = "types-requests" +version = "2.32.0.20241016" +description = "Typing stubs for requests" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version < \"3.10\"" +files = [ + {file = "types-requests-2.32.0.20241016.tar.gz", hash = "sha256:0d9cad2f27515d0e3e3da7134a1b6f28fb97129d86b867f24d9c726452634d95"}, + {file = "types_requests-2.32.0.20241016-py3-none-any.whl", hash = "sha256:4195d62d6d3e043a4eaaf08ff8a62184584d2e8684e9d2aa178c7915a7da3747"}, +] + +[package.dependencies] +urllib3 = ">=2" + +[[package]] +name = "types-requests" +version = "2.32.0.20250301" +description = "Typing stubs for requests" +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version >= \"3.10\"" +files = [ + {file = "types_requests-2.32.0.20250301-py3-none-any.whl", hash = "sha256:0003e0124e2cbefefb88222ff822b48616af40c74df83350f599a650c8de483b"}, + {file = "types_requests-2.32.0.20250301.tar.gz", hash = "sha256:3d909dc4eaab159c0d964ebe8bfa326a7afb4578d8706408d417e17d61b0c500"}, +] + +[package.dependencies] +urllib3 = ">=2" + [[package]] name = "typing-extensions" version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] +[[package]] +name = "tzdata" +version = "2025.1" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +groups = ["dev"] +markers = "sys_platform == \"win32\"" +files = [ + {file = "tzdata-2025.1-py2.py3-none-any.whl", hash = "sha256:7e127113816800496f027041c570f50bcd464a020098a3b6b199517772303639"}, + {file = "tzdata-2025.1.tar.gz", hash = "sha256:24894909e88cdb28bd1636c6887801df64cb485bd593f2fd83ef29075a81d694"}, +] + [[package]] name = "urllib3" version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version < \"3.10\"" files = [ {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "urllib3" +version = "2.3.0" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version >= \"3.10\"" +files = [ + {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, + {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] +[[package]] +name = "uvicorn" +version = "0.33.0" +description = "The lightning-fast ASGI server." +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version < \"3.10\"" +files = [ + {file = "uvicorn-0.33.0-py3-none-any.whl", hash = "sha256:2c30de4aeea83661a520abab179b24084a0019c0c1bbe137e5409f741cbde5f8"}, + {file = "uvicorn-0.33.0.tar.gz", hash = "sha256:3577119f82b7091cf4d3d4177bfda0bae4723ed92ab1439e8d779de880c9cc59"}, +] + +[package.dependencies] +click = ">=7.0" +colorama = {version = ">=0.4", optional = true, markers = "sys_platform == \"win32\" and extra == \"standard\""} +h11 = ">=0.8" +httptools = {version = ">=0.6.3", optional = true, markers = "extra == \"standard\""} +python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} +pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} +typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} +uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\" and extra == \"standard\""} +watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} +websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""} + +[package.extras] +standard = ["colorama (>=0.4) ; sys_platform == \"win32\"", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1) ; sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"", "watchfiles (>=0.13)", "websockets (>=10.4)"] + +[[package]] +name = "uvicorn" +version = "0.34.0" +description = "The lightning-fast ASGI server." +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version >= \"3.10\"" +files = [ + {file = "uvicorn-0.34.0-py3-none-any.whl", hash = "sha256:023dc038422502fa28a09c7a30bf2b6991512da7dcdb8fd35fe57cfc154126f4"}, + {file = "uvicorn-0.34.0.tar.gz", hash = "sha256:404051050cd7e905de2c9a7e61790943440b3416f49cb409f965d9dcd0fa73e9"}, +] + +[package.dependencies] +click = ">=7.0" +colorama = {version = ">=0.4", optional = true, markers = "sys_platform == \"win32\" and extra == \"standard\""} +h11 = ">=0.8" +httptools = {version = ">=0.6.3", optional = true, markers = "extra == \"standard\""} +python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} +pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} +typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} +uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\" and extra == \"standard\""} +watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} +websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""} + +[package.extras] +standard = ["colorama (>=0.4) ; sys_platform == \"win32\"", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1) ; sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"", "watchfiles (>=0.13)", "websockets (>=10.4)"] + +[[package]] +name = "uvloop" +version = "0.21.0" +description = "Fast implementation of asyncio event loop on top of libuv" +optional = false +python-versions = ">=3.8.0" +groups = ["main"] +markers = "sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"" +files = [ + {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f"}, + {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d"}, + {file = "uvloop-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f38b2e090258d051d68a5b14d1da7203a3c3677321cf32a95a6f4db4dd8b6f26"}, + {file = "uvloop-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87c43e0f13022b998eb9b973b5e97200c8b90823454d4bc06ab33829e09fb9bb"}, + {file = "uvloop-0.21.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:10d66943def5fcb6e7b37310eb6b5639fd2ccbc38df1177262b0640c3ca68c1f"}, + {file = "uvloop-0.21.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:67dd654b8ca23aed0a8e99010b4c34aca62f4b7fce88f39d452ed7622c94845c"}, + {file = "uvloop-0.21.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c0f3fa6200b3108919f8bdabb9a7f87f20e7097ea3c543754cabc7d717d95cf8"}, + {file = "uvloop-0.21.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0878c2640cf341b269b7e128b1a5fed890adc4455513ca710d77d5e93aa6d6a0"}, + {file = "uvloop-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9fb766bb57b7388745d8bcc53a359b116b8a04c83a2288069809d2b3466c37e"}, + {file = "uvloop-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a375441696e2eda1c43c44ccb66e04d61ceeffcd76e4929e527b7fa401b90fb"}, + {file = "uvloop-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:baa0e6291d91649c6ba4ed4b2f982f9fa165b5bbd50a9e203c416a2797bab3c6"}, + {file = "uvloop-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4509360fcc4c3bd2c70d87573ad472de40c13387f5fda8cb58350a1d7475e58d"}, + {file = "uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c"}, + {file = "uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2"}, + {file = "uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d"}, + {file = "uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc"}, + {file = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb"}, + {file = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f"}, + {file = "uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281"}, + {file = "uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af"}, + {file = "uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6"}, + {file = "uvloop-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816"}, + {file = "uvloop-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc"}, + {file = "uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553"}, + {file = "uvloop-0.21.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:17df489689befc72c39a08359efac29bbee8eee5209650d4b9f34df73d22e414"}, + {file = "uvloop-0.21.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc09f0ff191e61c2d592a752423c767b4ebb2986daa9ed62908e2b1b9a9ae206"}, + {file = "uvloop-0.21.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0ce1b49560b1d2d8a2977e3ba4afb2414fb46b86a1b64056bc4ab929efdafbe"}, + {file = "uvloop-0.21.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e678ad6fe52af2c58d2ae3c73dc85524ba8abe637f134bf3564ed07f555c5e79"}, + {file = "uvloop-0.21.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:460def4412e473896ef179a1671b40c039c7012184b627898eea5072ef6f017a"}, + {file = "uvloop-0.21.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:10da8046cc4a8f12c91a1c39d1dd1585c41162a15caaef165c2174db9ef18bdc"}, + {file = "uvloop-0.21.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c097078b8031190c934ed0ebfee8cc5f9ba9642e6eb88322b9958b649750f72b"}, + {file = "uvloop-0.21.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:46923b0b5ee7fc0020bef24afe7836cb068f5050ca04caf6b487c513dc1a20b2"}, + {file = "uvloop-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53e420a3afe22cdcf2a0f4846e377d16e718bc70103d7088a4f7623567ba5fb0"}, + {file = "uvloop-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88cb67cdbc0e483da00af0b2c3cdad4b7c61ceb1ee0f33fe00e09c81e3a6cb75"}, + {file = "uvloop-0.21.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:221f4f2a1f46032b403bf3be628011caf75428ee3cc204a22addf96f586b19fd"}, + {file = "uvloop-0.21.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2d1f581393673ce119355d56da84fe1dd9d2bb8b3d13ce792524e1607139feff"}, + {file = "uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3"}, +] + +[package.extras] +dev = ["Cython (>=3.0,<4.0)", "setuptools (>=60)"] +docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] +test = ["aiohttp (>=3.10.5)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] + +[[package]] +name = "watchfiles" +version = "0.24.0" +description = "Simple, modern and high performance file watching and code reload in python." +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version < \"3.10\"" +files = [ + {file = "watchfiles-0.24.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:083dc77dbdeef09fa44bb0f4d1df571d2e12d8a8f985dccde71ac3ac9ac067a0"}, + {file = "watchfiles-0.24.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e94e98c7cb94cfa6e071d401ea3342767f28eb5a06a58fafdc0d2a4974f4f35c"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82ae557a8c037c42a6ef26c494d0631cacca040934b101d001100ed93d43f361"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:acbfa31e315a8f14fe33e3542cbcafc55703b8f5dcbb7c1eecd30f141df50db3"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b74fdffce9dfcf2dc296dec8743e5b0332d15df19ae464f0e249aa871fc1c571"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:449f43f49c8ddca87c6b3980c9284cab6bd1f5c9d9a2b00012adaaccd5e7decd"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4abf4ad269856618f82dee296ac66b0cd1d71450fc3c98532d93798e73399b7a"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f895d785eb6164678ff4bb5cc60c5996b3ee6df3edb28dcdeba86a13ea0465e"}, + {file = "watchfiles-0.24.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7ae3e208b31be8ce7f4c2c0034f33406dd24fbce3467f77223d10cd86778471c"}, + {file = "watchfiles-0.24.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2efec17819b0046dde35d13fb8ac7a3ad877af41ae4640f4109d9154ed30a188"}, + {file = "watchfiles-0.24.0-cp310-none-win32.whl", hash = "sha256:6bdcfa3cd6fdbdd1a068a52820f46a815401cbc2cb187dd006cb076675e7b735"}, + {file = "watchfiles-0.24.0-cp310-none-win_amd64.whl", hash = "sha256:54ca90a9ae6597ae6dc00e7ed0a040ef723f84ec517d3e7ce13e63e4bc82fa04"}, + {file = "watchfiles-0.24.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:bdcd5538e27f188dd3c804b4a8d5f52a7fc7f87e7fd6b374b8e36a4ca03db428"}, + {file = "watchfiles-0.24.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2dadf8a8014fde6addfd3c379e6ed1a981c8f0a48292d662e27cabfe4239c83c"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6509ed3f467b79d95fc62a98229f79b1a60d1b93f101e1c61d10c95a46a84f43"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8360f7314a070c30e4c976b183d1d8d1585a4a50c5cb603f431cebcbb4f66327"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:316449aefacf40147a9efaf3bd7c9bdd35aaba9ac5d708bd1eb5763c9a02bef5"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73bde715f940bea845a95247ea3e5eb17769ba1010efdc938ffcb967c634fa61"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3770e260b18e7f4e576edca4c0a639f704088602e0bc921c5c2e721e3acb8d15"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa0fd7248cf533c259e59dc593a60973a73e881162b1a2f73360547132742823"}, + {file = "watchfiles-0.24.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d7a2e3b7f5703ffbd500dabdefcbc9eafeff4b9444bbdd5d83d79eedf8428fab"}, + {file = "watchfiles-0.24.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d831ee0a50946d24a53821819b2327d5751b0c938b12c0653ea5be7dea9c82ec"}, + {file = "watchfiles-0.24.0-cp311-none-win32.whl", hash = "sha256:49d617df841a63b4445790a254013aea2120357ccacbed00253f9c2b5dc24e2d"}, + {file = "watchfiles-0.24.0-cp311-none-win_amd64.whl", hash = "sha256:d3dcb774e3568477275cc76554b5a565024b8ba3a0322f77c246bc7111c5bb9c"}, + {file = "watchfiles-0.24.0-cp311-none-win_arm64.whl", hash = "sha256:9301c689051a4857d5b10777da23fafb8e8e921bcf3abe6448a058d27fb67633"}, + {file = "watchfiles-0.24.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7211b463695d1e995ca3feb38b69227e46dbd03947172585ecb0588f19b0d87a"}, + {file = "watchfiles-0.24.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4b8693502d1967b00f2fb82fc1e744df128ba22f530e15b763c8d82baee15370"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdab9555053399318b953a1fe1f586e945bc8d635ce9d05e617fd9fe3a4687d6"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:34e19e56d68b0dad5cff62273107cf5d9fbaf9d75c46277aa5d803b3ef8a9e9b"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:41face41f036fee09eba33a5b53a73e9a43d5cb2c53dad8e61fa6c9f91b5a51e"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5148c2f1ea043db13ce9b0c28456e18ecc8f14f41325aa624314095b6aa2e9ea"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e4bd963a935aaf40b625c2499f3f4f6bbd0c3776f6d3bc7c853d04824ff1c9f"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c79d7719d027b7a42817c5d96461a99b6a49979c143839fc37aa5748c322f234"}, + {file = "watchfiles-0.24.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:32aa53a9a63b7f01ed32e316e354e81e9da0e6267435c7243bf8ae0f10b428ef"}, + {file = "watchfiles-0.24.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ce72dba6a20e39a0c628258b5c308779b8697f7676c254a845715e2a1039b968"}, + {file = "watchfiles-0.24.0-cp312-none-win32.whl", hash = "sha256:d9018153cf57fc302a2a34cb7564870b859ed9a732d16b41a9b5cb2ebed2d444"}, + {file = "watchfiles-0.24.0-cp312-none-win_amd64.whl", hash = "sha256:551ec3ee2a3ac9cbcf48a4ec76e42c2ef938a7e905a35b42a1267fa4b1645896"}, + {file = "watchfiles-0.24.0-cp312-none-win_arm64.whl", hash = "sha256:b52a65e4ea43c6d149c5f8ddb0bef8d4a1e779b77591a458a893eb416624a418"}, + {file = "watchfiles-0.24.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:3d2e3ab79a1771c530233cadfd277fcc762656d50836c77abb2e5e72b88e3a48"}, + {file = "watchfiles-0.24.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:327763da824817b38ad125dcd97595f942d720d32d879f6c4ddf843e3da3fe90"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd82010f8ab451dabe36054a1622870166a67cf3fce894f68895db6f74bbdc94"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d64ba08db72e5dfd5c33be1e1e687d5e4fcce09219e8aee893a4862034081d4e"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1cf1f6dd7825053f3d98f6d33f6464ebdd9ee95acd74ba2c34e183086900a827"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:43e3e37c15a8b6fe00c1bce2473cfa8eb3484bbeecf3aefbf259227e487a03df"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88bcd4d0fe1d8ff43675360a72def210ebad3f3f72cabfeac08d825d2639b4ab"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:999928c6434372fde16c8f27143d3e97201160b48a614071261701615a2a156f"}, + {file = "watchfiles-0.24.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:30bbd525c3262fd9f4b1865cb8d88e21161366561cd7c9e1194819e0a33ea86b"}, + {file = "watchfiles-0.24.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:edf71b01dec9f766fb285b73930f95f730bb0943500ba0566ae234b5c1618c18"}, + {file = "watchfiles-0.24.0-cp313-none-win32.whl", hash = "sha256:f4c96283fca3ee09fb044f02156d9570d156698bc3734252175a38f0e8975f07"}, + {file = "watchfiles-0.24.0-cp313-none-win_amd64.whl", hash = "sha256:a974231b4fdd1bb7f62064a0565a6b107d27d21d9acb50c484d2cdba515b9366"}, + {file = "watchfiles-0.24.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ee82c98bed9d97cd2f53bdb035e619309a098ea53ce525833e26b93f673bc318"}, + {file = "watchfiles-0.24.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fd92bbaa2ecdb7864b7600dcdb6f2f1db6e0346ed425fbd01085be04c63f0b05"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f83df90191d67af5a831da3a33dd7628b02a95450e168785586ed51e6d28943c"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fca9433a45f18b7c779d2bae7beeec4f740d28b788b117a48368d95a3233ed83"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b995bfa6bf01a9e09b884077a6d37070464b529d8682d7691c2d3b540d357a0c"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed9aba6e01ff6f2e8285e5aa4154e2970068fe0fc0998c4380d0e6278222269b"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5171ef898299c657685306d8e1478a45e9303ddcd8ac5fed5bd52ad4ae0b69b"}, + {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4933a508d2f78099162da473841c652ad0de892719043d3f07cc83b33dfd9d91"}, + {file = "watchfiles-0.24.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95cf3b95ea665ab03f5a54765fa41abf0529dbaf372c3b83d91ad2cfa695779b"}, + {file = "watchfiles-0.24.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:01def80eb62bd5db99a798d5e1f5f940ca0a05986dcfae21d833af7a46f7ee22"}, + {file = "watchfiles-0.24.0-cp38-none-win32.whl", hash = "sha256:4d28cea3c976499475f5b7a2fec6b3a36208656963c1a856d328aeae056fc5c1"}, + {file = "watchfiles-0.24.0-cp38-none-win_amd64.whl", hash = "sha256:21ab23fdc1208086d99ad3f69c231ba265628014d4aed31d4e8746bd59e88cd1"}, + {file = "watchfiles-0.24.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b665caeeda58625c3946ad7308fbd88a086ee51ccb706307e5b1fa91556ac886"}, + {file = "watchfiles-0.24.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5c51749f3e4e269231510da426ce4a44beb98db2dce9097225c338f815b05d4f"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82b2509f08761f29a0fdad35f7e1638b8ab1adfa2666d41b794090361fb8b855"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a60e2bf9dc6afe7f743e7c9b149d1fdd6dbf35153c78fe3a14ae1a9aee3d98b"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f7d9b87c4c55e3ea8881dfcbf6d61ea6775fffed1fedffaa60bd047d3c08c430"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:78470906a6be5199524641f538bd2c56bb809cd4bf29a566a75051610bc982c3"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:07cdef0c84c03375f4e24642ef8d8178e533596b229d32d2bbd69e5128ede02a"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d337193bbf3e45171c8025e291530fb7548a93c45253897cd764a6a71c937ed9"}, + {file = "watchfiles-0.24.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ec39698c45b11d9694a1b635a70946a5bad066b593af863460a8e600f0dff1ca"}, + {file = "watchfiles-0.24.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e28d91ef48eab0afb939fa446d8ebe77e2f7593f5f463fd2bb2b14132f95b6e"}, + {file = "watchfiles-0.24.0-cp39-none-win32.whl", hash = "sha256:7138eff8baa883aeaa074359daabb8b6c1e73ffe69d5accdc907d62e50b1c0da"}, + {file = "watchfiles-0.24.0-cp39-none-win_amd64.whl", hash = "sha256:b3ef2c69c655db63deb96b3c3e587084612f9b1fa983df5e0c3379d41307467f"}, + {file = "watchfiles-0.24.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:632676574429bee8c26be8af52af20e0c718cc7f5f67f3fb658c71928ccd4f7f"}, + {file = "watchfiles-0.24.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:a2a9891723a735d3e2540651184be6fd5b96880c08ffe1a98bae5017e65b544b"}, + {file = "watchfiles-0.24.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a7fa2bc0efef3e209a8199fd111b8969fe9db9c711acc46636686331eda7dd4"}, + {file = "watchfiles-0.24.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01550ccf1d0aed6ea375ef259706af76ad009ef5b0203a3a4cce0f6024f9b68a"}, + {file = "watchfiles-0.24.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:96619302d4374de5e2345b2b622dc481257a99431277662c30f606f3e22f42be"}, + {file = "watchfiles-0.24.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:85d5f0c7771dcc7a26c7a27145059b6bb0ce06e4e751ed76cdf123d7039b60b5"}, + {file = "watchfiles-0.24.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:951088d12d339690a92cef2ec5d3cfd957692834c72ffd570ea76a6790222777"}, + {file = "watchfiles-0.24.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49fb58bcaa343fedc6a9e91f90195b20ccb3135447dc9e4e2570c3a39565853e"}, + {file = "watchfiles-0.24.0.tar.gz", hash = "sha256:afb72325b74fa7a428c009c1b8be4b4d7c2afedafb2982827ef2156646df2fe1"}, +] + +[package.dependencies] +anyio = ">=3.0.0" + +[[package]] +name = "watchfiles" +version = "1.0.4" +description = "Simple, modern and high performance file watching and code reload in python." +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version >= \"3.10\"" +files = [ + {file = "watchfiles-1.0.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ba5bb3073d9db37c64520681dd2650f8bd40902d991e7b4cfaeece3e32561d08"}, + {file = "watchfiles-1.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9f25d0ba0fe2b6d2c921cf587b2bf4c451860086534f40c384329fb96e2044d1"}, + {file = "watchfiles-1.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47eb32ef8c729dbc4f4273baece89398a4d4b5d21a1493efea77a17059f4df8a"}, + {file = "watchfiles-1.0.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:076f293100db3b0b634514aa0d294b941daa85fc777f9c698adb1009e5aca0b1"}, + {file = "watchfiles-1.0.4-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1eacd91daeb5158c598fe22d7ce66d60878b6294a86477a4715154990394c9b3"}, + {file = "watchfiles-1.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:13c2ce7b72026cfbca120d652f02c7750f33b4c9395d79c9790b27f014c8a5a2"}, + {file = "watchfiles-1.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:90192cdc15ab7254caa7765a98132a5a41471cf739513cc9bcf7d2ffcc0ec7b2"}, + {file = "watchfiles-1.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:278aaa395f405972e9f523bd786ed59dfb61e4b827856be46a42130605fd0899"}, + {file = "watchfiles-1.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a462490e75e466edbb9fc4cd679b62187153b3ba804868452ef0577ec958f5ff"}, + {file = "watchfiles-1.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8d0d0630930f5cd5af929040e0778cf676a46775753e442a3f60511f2409f48f"}, + {file = "watchfiles-1.0.4-cp310-cp310-win32.whl", hash = "sha256:cc27a65069bcabac4552f34fd2dce923ce3fcde0721a16e4fb1b466d63ec831f"}, + {file = "watchfiles-1.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:8b1f135238e75d075359cf506b27bf3f4ca12029c47d3e769d8593a2024ce161"}, + {file = "watchfiles-1.0.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:2a9f93f8439639dc244c4d2902abe35b0279102bca7bbcf119af964f51d53c19"}, + {file = "watchfiles-1.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9eea33ad8c418847dd296e61eb683cae1c63329b6d854aefcd412e12d94ee235"}, + {file = "watchfiles-1.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31f1a379c9dcbb3f09cf6be1b7e83b67c0e9faabed0471556d9438a4a4e14202"}, + {file = "watchfiles-1.0.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ab594e75644421ae0a2484554832ca5895f8cab5ab62de30a1a57db460ce06c6"}, + {file = "watchfiles-1.0.4-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc2eb5d14a8e0d5df7b36288979176fbb39672d45184fc4b1c004d7c3ce29317"}, + {file = "watchfiles-1.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f68d8e9d5a321163ddacebe97091000955a1b74cd43724e346056030b0bacee"}, + {file = "watchfiles-1.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9ce064e81fe79faa925ff03b9f4c1a98b0bbb4a1b8c1b015afa93030cb21a49"}, + {file = "watchfiles-1.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b77d5622ac5cc91d21ae9c2b284b5d5c51085a0bdb7b518dba263d0af006132c"}, + {file = "watchfiles-1.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1941b4e39de9b38b868a69b911df5e89dc43767feeda667b40ae032522b9b5f1"}, + {file = "watchfiles-1.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4f8c4998506241dedf59613082d1c18b836e26ef2a4caecad0ec41e2a15e4226"}, + {file = "watchfiles-1.0.4-cp311-cp311-win32.whl", hash = "sha256:4ebbeca9360c830766b9f0df3640b791be569d988f4be6c06d6fae41f187f105"}, + {file = "watchfiles-1.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:05d341c71f3d7098920f8551d4df47f7b57ac5b8dad56558064c3431bdfc0b74"}, + {file = "watchfiles-1.0.4-cp311-cp311-win_arm64.whl", hash = "sha256:32b026a6ab64245b584acf4931fe21842374da82372d5c039cba6bf99ef722f3"}, + {file = "watchfiles-1.0.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:229e6ec880eca20e0ba2f7e2249c85bae1999d330161f45c78d160832e026ee2"}, + {file = "watchfiles-1.0.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5717021b199e8353782dce03bd8a8f64438832b84e2885c4a645f9723bf656d9"}, + {file = "watchfiles-1.0.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0799ae68dfa95136dde7c472525700bd48777875a4abb2ee454e3ab18e9fc712"}, + {file = "watchfiles-1.0.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:43b168bba889886b62edb0397cab5b6490ffb656ee2fcb22dec8bfeb371a9e12"}, + {file = "watchfiles-1.0.4-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb2c46e275fbb9f0c92e7654b231543c7bbfa1df07cdc4b99fa73bedfde5c844"}, + {file = "watchfiles-1.0.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:857f5fc3aa027ff5e57047da93f96e908a35fe602d24f5e5d8ce64bf1f2fc733"}, + {file = "watchfiles-1.0.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55ccfd27c497b228581e2838d4386301227fc0cb47f5a12923ec2fe4f97b95af"}, + {file = "watchfiles-1.0.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c11ea22304d17d4385067588123658e9f23159225a27b983f343fcffc3e796a"}, + {file = "watchfiles-1.0.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:74cb3ca19a740be4caa18f238298b9d472c850f7b2ed89f396c00a4c97e2d9ff"}, + {file = "watchfiles-1.0.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c7cce76c138a91e720d1df54014a047e680b652336e1b73b8e3ff3158e05061e"}, + {file = "watchfiles-1.0.4-cp312-cp312-win32.whl", hash = "sha256:b045c800d55bc7e2cadd47f45a97c7b29f70f08a7c2fa13241905010a5493f94"}, + {file = "watchfiles-1.0.4-cp312-cp312-win_amd64.whl", hash = "sha256:c2acfa49dd0ad0bf2a9c0bb9a985af02e89345a7189be1efc6baa085e0f72d7c"}, + {file = "watchfiles-1.0.4-cp312-cp312-win_arm64.whl", hash = "sha256:22bb55a7c9e564e763ea06c7acea24fc5d2ee5dfc5dafc5cfbedfe58505e9f90"}, + {file = "watchfiles-1.0.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:8012bd820c380c3d3db8435e8cf7592260257b378b649154a7948a663b5f84e9"}, + {file = "watchfiles-1.0.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:aa216f87594f951c17511efe5912808dfcc4befa464ab17c98d387830ce07b60"}, + {file = "watchfiles-1.0.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62c9953cf85529c05b24705639ffa390f78c26449e15ec34d5339e8108c7c407"}, + {file = "watchfiles-1.0.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7cf684aa9bba4cd95ecb62c822a56de54e3ae0598c1a7f2065d51e24637a3c5d"}, + {file = "watchfiles-1.0.4-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f44a39aee3cbb9b825285ff979ab887a25c5d336e5ec3574f1506a4671556a8d"}, + {file = "watchfiles-1.0.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38320582736922be8c865d46520c043bff350956dfc9fbaee3b2df4e1740a4b"}, + {file = "watchfiles-1.0.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39f4914548b818540ef21fd22447a63e7be6e24b43a70f7642d21f1e73371590"}, + {file = "watchfiles-1.0.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f12969a3765909cf5dc1e50b2436eb2c0e676a3c75773ab8cc3aa6175c16e902"}, + {file = "watchfiles-1.0.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:0986902677a1a5e6212d0c49b319aad9cc48da4bd967f86a11bde96ad9676ca1"}, + {file = "watchfiles-1.0.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:308ac265c56f936636e3b0e3f59e059a40003c655228c131e1ad439957592303"}, + {file = "watchfiles-1.0.4-cp313-cp313-win32.whl", hash = "sha256:aee397456a29b492c20fda2d8961e1ffb266223625346ace14e4b6d861ba9c80"}, + {file = "watchfiles-1.0.4-cp313-cp313-win_amd64.whl", hash = "sha256:d6097538b0ae5c1b88c3b55afa245a66793a8fec7ada6755322e465fb1a0e8cc"}, + {file = "watchfiles-1.0.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:d3452c1ec703aa1c61e15dfe9d482543e4145e7c45a6b8566978fbb044265a21"}, + {file = "watchfiles-1.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7b75fee5a16826cf5c46fe1c63116e4a156924d668c38b013e6276f2582230f0"}, + {file = "watchfiles-1.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e997802d78cdb02623b5941830ab06f8860038faf344f0d288d325cc9c5d2ff"}, + {file = "watchfiles-1.0.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e0611d244ce94d83f5b9aff441ad196c6e21b55f77f3c47608dcf651efe54c4a"}, + {file = "watchfiles-1.0.4-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9745a4210b59e218ce64c91deb599ae8775c8a9da4e95fb2ee6fe745fc87d01a"}, + {file = "watchfiles-1.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4810ea2ae622add560f4aa50c92fef975e475f7ac4900ce5ff5547b2434642d8"}, + {file = "watchfiles-1.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:740d103cd01458f22462dedeb5a3382b7f2c57d07ff033fbc9465919e5e1d0f3"}, + {file = "watchfiles-1.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdbd912a61543a36aef85e34f212e5d2486e7c53ebfdb70d1e0b060cc50dd0bf"}, + {file = "watchfiles-1.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0bc80d91ddaf95f70258cf78c471246846c1986bcc5fd33ccc4a1a67fcb40f9a"}, + {file = "watchfiles-1.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab0311bb2ffcd9f74b6c9de2dda1612c13c84b996d032cd74799adb656af4e8b"}, + {file = "watchfiles-1.0.4-cp39-cp39-win32.whl", hash = "sha256:02a526ee5b5a09e8168314c905fc545c9bc46509896ed282aeb5a8ba9bd6ca27"}, + {file = "watchfiles-1.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:a5ae5706058b27c74bac987d615105da17724172d5aaacc6c362a40599b6de43"}, + {file = "watchfiles-1.0.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cdcc92daeae268de1acf5b7befcd6cfffd9a047098199056c72e4623f531de18"}, + {file = "watchfiles-1.0.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d8d3d9203705b5797f0af7e7e5baa17c8588030aaadb7f6a86107b7247303817"}, + {file = "watchfiles-1.0.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdef5a1be32d0b07dcea3318a0be95d42c98ece24177820226b56276e06b63b0"}, + {file = "watchfiles-1.0.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:342622287b5604ddf0ed2d085f3a589099c9ae8b7331df3ae9845571586c4f3d"}, + {file = "watchfiles-1.0.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9fe37a2de80aa785d340f2980276b17ef697ab8db6019b07ee4fd28a8359d2f3"}, + {file = "watchfiles-1.0.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:9d1ef56b56ed7e8f312c934436dea93bfa3e7368adfcf3df4c0da6d4de959a1e"}, + {file = "watchfiles-1.0.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95b42cac65beae3a362629950c444077d1b44f1790ea2772beaea95451c086bb"}, + {file = "watchfiles-1.0.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e0227b8ed9074c6172cf55d85b5670199c99ab11fd27d2c473aa30aec67ee42"}, + {file = "watchfiles-1.0.4.tar.gz", hash = "sha256:6ba473efd11062d73e4f00c2b730255f9c1bdd73cd5f9fe5b5da8dbd4a717205"}, +] + +[package.dependencies] +anyio = ">=3.0.0" + [[package]] name = "web3" -version = "6.20.3" -description = "web3.py" +version = "7.8.0" +description = "web3: A Python library for interacting with Ethereum" optional = false -python-versions = ">=3.7.2" +python-versions = "<4,>=3.8" +groups = ["main"] files = [ - {file = "web3-6.20.3-py3-none-any.whl", hash = "sha256:529fbb33f2476ce8185f7a2ed7e2e07c4c28621b0e89b845fbfdcaea9571286d"}, - {file = "web3-6.20.3.tar.gz", hash = "sha256:c69dbf1a61ace172741d06990e60afc7f55f303eac087e7235f382df3047d017"}, + {file = "web3-7.8.0-py3-none-any.whl", hash = "sha256:c8771b3d8772f7104a0462804449beb57d36cef7bd8b411140f95a92fc46b559"}, + {file = "web3-7.8.0.tar.gz", hash = "sha256:712bc9fd6b1ef6e467ee24c25b581e1951cab2cba17f9f548f12587734f2c857"}, ] [package.dependencies] aiohttp = ">=3.7.4.post0" -ckzg = "<2" -eth-abi = ">=4.0.0" -eth-account = ">=0.8.0,<0.13" +eth-abi = ">=5.0.1" +eth-account = ">=0.13.1" eth-hash = {version = ">=0.5.1", extras = ["pycryptodome"]} -eth-typing = ">=3.0.0,<4.2.0 || >4.2.0,<5.0.0" -eth-utils = ">=2.1.0,<5" -hexbytes = ">=0.1.0,<0.4.0" -jsonschema = ">=4.0.0" -lru-dict = ">=1.1.6,<1.3.0" -protobuf = ">=4.21.6" +eth-typing = ">=5.0.0" +eth-utils = ">=5.0.0" +hexbytes = ">=1.2.0" +pydantic = ">=2.4.0" pyunormalize = ">=15.0.0" pywin32 = {version = ">=223", markers = "platform_system == \"Windows\""} -requests = ">=2.16.0" +requests = ">=2.23.0" +types-requests = ">=2.0.0" typing-extensions = ">=4.0.1" -websockets = ">=10.0.0" +websockets = ">=10.0.0,<14.0.0" [package.extras] -dev = ["build (>=0.9.0)", "bumpversion", "eth-tester[py-evm] (>=0.11.0b1,<0.12.0b1)", "eth-tester[py-evm] (>=0.9.0b1,<0.10.0b1)", "flaky (>=3.7.0)", "hypothesis (>=3.31.2)", "importlib-metadata (<5.0)", "ipfshttpclient (==0.8.0a2)", "pre-commit (>=2.21.0)", "py-geth (>=3.14.0,<4)", "pytest (>=7.0.0)", "pytest-asyncio (>=0.21.2,<0.23)", "pytest-mock (>=1.10)", "pytest-watch (>=4.2)", "pytest-xdist (>=1.29)", "setuptools (>=38.6.0)", "sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)", "tox (>=3.18.0)", "tqdm (>4.32)", "twine (>=1.13)", "when-changed (>=0.3.0)"] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)"] -ipfs = ["ipfshttpclient (==0.8.0a2)"] -tester = ["eth-tester[py-evm] (>=0.11.0b1,<0.12.0b1)", "eth-tester[py-evm] (>=0.9.0b1,<0.10.0b1)", "py-geth (>=3.14.0,<4)"] +dev = ["build (>=0.9.0)", "bumpversion (>=0.5.3)", "eth-tester[py-evm] (>=0.12.0b1,<0.13.0b1)", "flaky (>=3.7.0)", "hypothesis (>=3.31.2)", "ipython", "mypy (==1.10.0)", "pre-commit (>=3.4.0)", "py-geth (>=5.1.0)", "pytest (>=7.0.0)", "pytest-asyncio (>=0.18.1,<0.23)", "pytest-mock (>=1.10)", "pytest-xdist (>=2.4.0)", "setuptools (>=38.6.0)", "sphinx (>=6.0.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx_rtd_theme (>=1.0.0)", "towncrier (>=21,<22)", "tox (>=4.0.0)", "tqdm (>4.32)", "twine (>=1.13)", "wheel"] +docs = ["sphinx (>=6.0.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx_rtd_theme (>=1.0.0)", "towncrier (>=21,<22)"] +test = ["eth-tester[py-evm] (>=0.12.0b1,<0.13.0b1)", "flaky (>=3.7.0)", "hypothesis (>=3.31.2)", "mypy (==1.10.0)", "pre-commit (>=3.4.0)", "py-geth (>=5.1.0)", "pytest (>=7.0.0)", "pytest-asyncio (>=0.18.1,<0.23)", "pytest-mock (>=1.10)", "pytest-xdist (>=2.4.0)", "tox (>=4.0.0)"] +tester = ["eth-tester[py-evm] (>=0.12.0b1,<0.13.0b1)", "py-geth (>=5.1.0)"] [[package]] name = "websockets" @@ -3502,6 +4785,7 @@ version = "13.1" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "websockets-13.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f48c749857f8fb598fb890a75f540e3221d0976ed0bf879cf3c7eef34151acee"}, {file = "websockets-13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c7e72ce6bda6fb9409cc1e8164dd41d7c91466fb599eb047cfda72fe758a34a7"}, @@ -3597,6 +4881,7 @@ version = "3.0.3" description = "The comprehensive WSGI web application library." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "werkzeug-3.0.3-py3-none-any.whl", hash = "sha256:fc9645dc43e03e4d630d23143a04a7f947a9a3b5727cd535fdfe155a17cc48c8"}, {file = "werkzeug-3.0.3.tar.gz", hash = "sha256:097e5bfda9f0aba8da6b8545146def481d06aa7d3266e7448e2cccf67dd8bd18"}, @@ -3614,6 +4899,7 @@ version = "1.17.2" description = "Module for decorators, wrappers and monkey patching." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3d57c572081fed831ad2d26fd430d565b76aa277ed1d30ff4d40670b1c0dd984"}, {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5e251054542ae57ac7f3fba5d10bfff615b6c2fb09abeb37d2f1463f841ae22"}, @@ -3702,6 +4988,8 @@ version = "1.15.2" description = "Yet another URL library" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version < \"3.10\"" files = [ {file = "yarl-1.15.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e4ee8b8639070ff246ad3649294336b06db37a94bdea0d09ea491603e0be73b8"}, {file = "yarl-1.15.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a7cf963a357c5f00cb55b1955df8bbe68d2f2f65de065160a1c26b85a1e44172"}, @@ -3808,26 +5096,126 @@ idna = ">=2.0" multidict = ">=4.0" propcache = ">=0.2.0" +[[package]] +name = "yarl" +version = "1.18.3" +description = "Yet another URL library" +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version >= \"3.10\"" +files = [ + {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7df647e8edd71f000a5208fe6ff8c382a1de8edfbccdbbfe649d263de07d8c34"}, + {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c69697d3adff5aa4f874b19c0e4ed65180ceed6318ec856ebc423aa5850d84f7"}, + {file = "yarl-1.18.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:602d98f2c2d929f8e697ed274fbadc09902c4025c5a9963bf4e9edfc3ab6f7ed"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c654d5207c78e0bd6d749f6dae1dcbbfde3403ad3a4b11f3c5544d9906969dde"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5094d9206c64181d0f6e76ebd8fb2f8fe274950a63890ee9e0ebfd58bf9d787b"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35098b24e0327fc4ebdc8ffe336cee0a87a700c24ffed13161af80124b7dc8e5"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3236da9272872443f81fedc389bace88408f64f89f75d1bdb2256069a8730ccc"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2c08cc9b16f4f4bc522771d96734c7901e7ebef70c6c5c35dd0f10845270bcd"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:80316a8bd5109320d38eef8833ccf5f89608c9107d02d2a7f985f98ed6876990"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:c1e1cc06da1491e6734f0ea1e6294ce00792193c463350626571c287c9a704db"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fea09ca13323376a2fdfb353a5fa2e59f90cd18d7ca4eaa1fd31f0a8b4f91e62"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e3b9fd71836999aad54084906f8663dffcd2a7fb5cdafd6c37713b2e72be1760"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:757e81cae69244257d125ff31663249b3013b5dc0a8520d73694aed497fb195b"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b1771de9944d875f1b98a745bc547e684b863abf8f8287da8466cf470ef52690"}, + {file = "yarl-1.18.3-cp310-cp310-win32.whl", hash = "sha256:8874027a53e3aea659a6d62751800cf6e63314c160fd607489ba5c2edd753cf6"}, + {file = "yarl-1.18.3-cp310-cp310-win_amd64.whl", hash = "sha256:93b2e109287f93db79210f86deb6b9bbb81ac32fc97236b16f7433db7fc437d8"}, + {file = "yarl-1.18.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8503ad47387b8ebd39cbbbdf0bf113e17330ffd339ba1144074da24c545f0069"}, + {file = "yarl-1.18.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:02ddb6756f8f4517a2d5e99d8b2f272488e18dd0bfbc802f31c16c6c20f22193"}, + {file = "yarl-1.18.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:67a283dd2882ac98cc6318384f565bffc751ab564605959df4752d42483ad889"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d980e0325b6eddc81331d3f4551e2a333999fb176fd153e075c6d1c2530aa8a8"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b643562c12680b01e17239be267bc306bbc6aac1f34f6444d1bded0c5ce438ca"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c017a3b6df3a1bd45b9fa49a0f54005e53fbcad16633870104b66fa1a30a29d8"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75674776d96d7b851b6498f17824ba17849d790a44d282929c42dbb77d4f17ae"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ccaa3a4b521b780a7e771cc336a2dba389a0861592bbce09a476190bb0c8b4b3"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2d06d3005e668744e11ed80812e61efd77d70bb7f03e33c1598c301eea20efbb"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:9d41beda9dc97ca9ab0b9888cb71f7539124bc05df02c0cff6e5acc5a19dcc6e"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ba23302c0c61a9999784e73809427c9dbedd79f66a13d84ad1b1943802eaaf59"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6748dbf9bfa5ba1afcc7556b71cda0d7ce5f24768043a02a58846e4a443d808d"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0b0cad37311123211dc91eadcb322ef4d4a66008d3e1bdc404808992260e1a0e"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0fb2171a4486bb075316ee754c6d8382ea6eb8b399d4ec62fde2b591f879778a"}, + {file = "yarl-1.18.3-cp311-cp311-win32.whl", hash = "sha256:61b1a825a13bef4a5f10b1885245377d3cd0bf87cba068e1d9a88c2ae36880e1"}, + {file = "yarl-1.18.3-cp311-cp311-win_amd64.whl", hash = "sha256:b9d60031cf568c627d028239693fd718025719c02c9f55df0a53e587aab951b5"}, + {file = "yarl-1.18.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1dd4bdd05407ced96fed3d7f25dbbf88d2ffb045a0db60dbc247f5b3c5c25d50"}, + {file = "yarl-1.18.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7c33dd1931a95e5d9a772d0ac5e44cac8957eaf58e3c8da8c1414de7dd27c576"}, + {file = "yarl-1.18.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25b411eddcfd56a2f0cd6a384e9f4f7aa3efee14b188de13048c25b5e91f1640"}, + {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:436c4fc0a4d66b2badc6c5fc5ef4e47bb10e4fd9bf0c79524ac719a01f3607c2"}, + {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e35ef8683211db69ffe129a25d5634319a677570ab6b2eba4afa860f54eeaf75"}, + {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84b2deecba4a3f1a398df819151eb72d29bfeb3b69abb145a00ddc8d30094512"}, + {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e5a1fea0fd4f5bfa7440a47eff01d9822a65b4488f7cff83155a0f31a2ecba"}, + {file = "yarl-1.18.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0e883008013c0e4aef84dcfe2a0b172c4d23c2669412cf5b3371003941f72bb"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5a3f356548e34a70b0172d8890006c37be92995f62d95a07b4a42e90fba54272"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ccd17349166b1bee6e529b4add61727d3f55edb7babbe4069b5764c9587a8cc6"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b958ddd075ddba5b09bb0be8a6d9906d2ce933aee81100db289badbeb966f54e"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c7d79f7d9aabd6011004e33b22bc13056a3e3fb54794d138af57f5ee9d9032cb"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4891ed92157e5430874dad17b15eb1fda57627710756c27422200c52d8a4e393"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ce1af883b94304f493698b00d0f006d56aea98aeb49d75ec7d98cd4a777e9285"}, + {file = "yarl-1.18.3-cp312-cp312-win32.whl", hash = "sha256:f91c4803173928a25e1a55b943c81f55b8872f0018be83e3ad4938adffb77dd2"}, + {file = "yarl-1.18.3-cp312-cp312-win_amd64.whl", hash = "sha256:7e2ee16578af3b52ac2f334c3b1f92262f47e02cc6193c598502bd46f5cd1477"}, + {file = "yarl-1.18.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:90adb47ad432332d4f0bc28f83a5963f426ce9a1a8809f5e584e704b82685dcb"}, + {file = "yarl-1.18.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:913829534200eb0f789d45349e55203a091f45c37a2674678744ae52fae23efa"}, + {file = "yarl-1.18.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ef9f7768395923c3039055c14334ba4d926f3baf7b776c923c93d80195624782"}, + {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88a19f62ff30117e706ebc9090b8ecc79aeb77d0b1f5ec10d2d27a12bc9f66d0"}, + {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e17c9361d46a4d5addf777c6dd5eab0715a7684c2f11b88c67ac37edfba6c482"}, + {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a74a13a4c857a84a845505fd2d68e54826a2cd01935a96efb1e9d86c728e186"}, + {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41f7ce59d6ee7741af71d82020346af364949314ed3d87553763a2df1829cc58"}, + {file = "yarl-1.18.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f52a265001d830bc425f82ca9eabda94a64a4d753b07d623a9f2863fde532b53"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:82123d0c954dc58db301f5021a01854a85bf1f3bb7d12ae0c01afc414a882ca2"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:2ec9bbba33b2d00999af4631a3397d1fd78290c48e2a3e52d8dd72db3a067ac8"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:fbd6748e8ab9b41171bb95c6142faf068f5ef1511935a0aa07025438dd9a9bc1"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:877d209b6aebeb5b16c42cbb377f5f94d9e556626b1bfff66d7b0d115be88d0a"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b464c4ab4bfcb41e3bfd3f1c26600d038376c2de3297760dfe064d2cb7ea8e10"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8d39d351e7faf01483cc7ff7c0213c412e38e5a340238826be7e0e4da450fdc8"}, + {file = "yarl-1.18.3-cp313-cp313-win32.whl", hash = "sha256:61ee62ead9b68b9123ec24bc866cbef297dd266175d53296e2db5e7f797f902d"}, + {file = "yarl-1.18.3-cp313-cp313-win_amd64.whl", hash = "sha256:578e281c393af575879990861823ef19d66e2b1d0098414855dd367e234f5b3c"}, + {file = "yarl-1.18.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:61e5e68cb65ac8f547f6b5ef933f510134a6bf31bb178be428994b0cb46c2a04"}, + {file = "yarl-1.18.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fe57328fbc1bfd0bd0514470ac692630f3901c0ee39052ae47acd1d90a436719"}, + {file = "yarl-1.18.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a440a2a624683108a1b454705ecd7afc1c3438a08e890a1513d468671d90a04e"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09c7907c8548bcd6ab860e5f513e727c53b4a714f459b084f6580b49fa1b9cee"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b4f6450109834af88cb4cc5ecddfc5380ebb9c228695afc11915a0bf82116789"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9ca04806f3be0ac6d558fffc2fdf8fcef767e0489d2684a21912cc4ed0cd1b8"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77a6e85b90a7641d2e07184df5557132a337f136250caafc9ccaa4a2a998ca2c"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6333c5a377c8e2f5fae35e7b8f145c617b02c939d04110c76f29ee3676b5f9a5"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0b3c92fa08759dbf12b3a59579a4096ba9af8dd344d9a813fc7f5070d86bbab1"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:4ac515b860c36becb81bb84b667466885096b5fc85596948548b667da3bf9f24"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:045b8482ce9483ada4f3f23b3774f4e1bf4f23a2d5c912ed5170f68efb053318"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:a4bb030cf46a434ec0225bddbebd4b89e6471814ca851abb8696170adb163985"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:54d6921f07555713b9300bee9c50fb46e57e2e639027089b1d795ecd9f7fa910"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1d407181cfa6e70077df3377938c08012d18893f9f20e92f7d2f314a437c30b1"}, + {file = "yarl-1.18.3-cp39-cp39-win32.whl", hash = "sha256:ac36703a585e0929b032fbaab0707b75dc12703766d0b53486eabd5139ebadd5"}, + {file = "yarl-1.18.3-cp39-cp39-win_amd64.whl", hash = "sha256:ba87babd629f8af77f557b61e49e7c7cac36f22f871156b91e10a6e9d4f829e9"}, + {file = "yarl-1.18.3-py3-none-any.whl", hash = "sha256:b57f4f58099328dfb26c6a771d09fb20dbbae81d20cfb66141251ea063bd101b"}, + {file = "yarl-1.18.3.tar.gz", hash = "sha256:ac1801c45cbf77b6c99242eeff4fffb5e4e73a800b5c4ad4fc0be5def634d2e1"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" +propcache = ">=0.2.0" + [[package]] name = "zipp" version = "3.20.2" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version < \"3.9\"" files = [ {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +test = ["big-O", "importlib-resources ; python_version < \"3.9\"", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] [metadata] -lock-version = "2.0" +lock-version = "2.1" python-versions = ">=3.8,<4" -content-hash = "2c5be8c990e6614f5c9c3cc0c87bbfba452e126701f6788bf1caeef5205775a2" +content-hash = "72a5e38b38f9e67f18eb0179a1546ec5bd7c9e8b6a3f0b3f8c5a638d00186900" diff --git a/pyproject.toml b/pyproject.toml index fd7ab2168..abf8cb71a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,19 +7,21 @@ name = "hemera" description = "Tools for exporting Ethereum blockchain data to JSON/CSV file and postgresql" version = "1.0.0a1" authors = [ - "xuzh ", "shanshuo0918 ", + "xuzh ", + "ideal93 ", ] readme = "README.md" license = "Apache-2.0" classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", - "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", ] keywords = ["ethereum", "indexer", "explorer", "hemera"] packages = [ @@ -29,8 +31,9 @@ packages = [ [tool.poetry.dependencies] python = ">=3.8,<4" -web3 = "6.20.3" -eth-utils = ">=4.0.0" +web3 = "7.8.0" +eth-typing = ">=5.0.0" +eth-utils = "5.1.0" eth-abi = ">=5.0.1" python-dateutil = ">=2.8.0,<3" click = ">=8.0.4,<9" @@ -38,14 +41,6 @@ ethereum-dasm = "0.1.5" requests = "*" sqlalchemy = "2.0.31" psycopg2-binary = "2.9.9" -alembic = "1.13.3" -pandas = "1.5.3" -Flask = "3.0.3" -Flask-Caching = "2.3.0" -Flask-Cors = "3.0.9" -flask-limiter = "3.8.0" -flask-restx = "1.3.0" -Flask-SQLAlchemy = "3.1.1" blinker = ">=1.8.2" Werkzeug = "3.0.3" openpyxl = "3.1.3" @@ -53,25 +48,35 @@ redis = ">=4,<6" urllib3 = ">=2.2.2" dataclass-wizard = "0.22.3" pottery = "3.0.0" -eth_typing = ">=2.2.0,<5" orjson = "3.10.7" mpire = "2.10.2" dill = "0.3.9" multiprocess = "0.70.17" PyYAML = "6.0.2" -numpy = "1.24.4" +fastapi = {version = "<1.0.0,>=0.114.2", extras = ["standard"]} +pydantic = ">=2.4.0" +pydantic-settings = "<3.0.0,>=2.2.1" +sentry-sdk = {version = "<2.0.0,>=1.40.6", extras = ["fastapi"]} +sqlmodel = "*" +pyjwt = "<3.0.0,>=2.8.0" tenacity = "9.0.0" kafka-python = "2.0.2" prometheus_client = "0.21.1" sortedcontainers = "2.4.0" +deprecated = ">=1.2.13" +pandas = "1.5.3" +numpy = "1.24.4" +packaging = "*" [tool.poetry.group.dev.dependencies] pytest = ">=7.0.0" +pytest-postgresql = "*" black = "^21.5b0" isort = "^5.9.1" flake8 = "^3.9.2" ruff = "^0.0.235" pytest-cov = "*" +pygments = "<2.14.0" [tool.poetry.scripts] hemera = "hemera.cli:cli" @@ -83,7 +88,7 @@ hemera = "hemera.cli:cli" [tool.black] line-length = 120 -target-version = ["py38", "py39", "py310", "py311"] +target-version = ["py38", "py39", "py310", "py311", "py312", "py313"] [tool.isort] profile = "black" @@ -94,6 +99,7 @@ line_length = 120 [tool.pytest.ini_options] markers = [ + "api: marks tests as api tests", "serial: run this test in single thread", "indexer: Tests related to indexer", "indexer_exporter: Tests related to the indexer exporter", diff --git a/tests/hemera/app/__init__.py b/tests/hemera/app/__init__.py new file mode 100644 index 000000000..dd280572f --- /dev/null +++ b/tests/hemera/app/__init__.py @@ -0,0 +1,4 @@ +import pytest + +if __name__ == "__main__": + pytest.main(["-sv"]) diff --git a/tests/hemera/app/api/__init__.py b/tests/hemera/app/api/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/hemera/app/api/routes/__init__.py b/tests/hemera/app/api/routes/__init__.py new file mode 100644 index 000000000..c6d4031a7 --- /dev/null +++ b/tests/hemera/app/api/routes/__init__.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2024/12/19 18:22 +# @Author will +# @File __init__.py.py +# @Brief diff --git a/tests/hemera/app/api/routes/conftest.py b/tests/hemera/app/api/routes/conftest.py new file mode 100644 index 000000000..24128485b --- /dev/null +++ b/tests/hemera/app/api/routes/conftest.py @@ -0,0 +1,72 @@ +import os + +import pytest +from fastapi.testclient import TestClient +from pytest_postgresql import factories +from sqlalchemy import create_engine +from sqlmodel import Session, SQLModel, delete + +from hemera.app.main import app +from hemera.common.models.address.address_internal_transaciton import AddressInternalTransactions +from hemera.common.models.base.blocks import Blocks +from hemera.common.models.base.transactions import Transactions +from hemera.common.models.token.tokens import Tokens +from hemera.common.models.trace.contracts import Contracts +from hemera.common.models.trace.traces import ContractInternalTransactions + +postgresql_proc = factories.postgresql_proc() + +postgresql = factories.postgresql("postgresql_proc") + + +@pytest.fixture(scope="function") +def engine(postgresql): + """Create test database engine.""" + db_url = ( + f"postgresql://{postgresql.info.user}@{postgresql.info.host}:{postgresql.info.port}/{postgresql.info.dbname}" + ) + os.environ["POSTGRES_URL"] = db_url + + engine = create_engine(db_url, pool_size=20, max_overflow=20, pool_timeout=30, pool_recycle=3600) + + SQLModel.metadata.create_all(engine) + yield engine + SQLModel.metadata.drop_all(engine) + engine.dispose() + + +@pytest.fixture(scope="function") +def session(engine): + """Create a test session.""" + with Session(engine) as session: + yield session + session.rollback() + + +@pytest.fixture(autouse=True) +def clean_db(session): + """Clean database before each test""" + # Order matters due to foreign key constraints + session.exec(delete(Transactions)) + session.exec(delete(Contracts)) + session.exec(delete(Tokens)) + session.exec(delete(Blocks)) + session.exec(delete(ContractInternalTransactions)) + session.exec(delete(AddressInternalTransactions)) + session.commit() + + +@pytest.fixture(scope="session", autouse=True) +def cleanup_postgresql(postgresql_proc): + """Ensure proper cleanup of PostgreSQL process after all tests""" + try: + yield postgresql_proc + finally: + postgresql_proc.stop() + + +@pytest.mark.serial +@pytest.fixture +def client(engine): + with TestClient(app) as test_client: + yield test_client diff --git a/tests/hemera/app/api/routes/developer/__init__.py b/tests/hemera/app/api/routes/developer/__init__.py new file mode 100644 index 000000000..575e8b679 --- /dev/null +++ b/tests/hemera/app/api/routes/developer/__init__.py @@ -0,0 +1,11 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2025/2/2 14:57 +# @Author ideal93 +# @File __init__.py.py +# @Brief + +import pytest + +if __name__ == "__main__": + pytest.main(["-sv"]) diff --git a/tests/hemera/app/api/routes/developer/es_adapter/__init__.py b/tests/hemera/app/api/routes/developer/es_adapter/__init__.py new file mode 100644 index 000000000..8f8047d70 --- /dev/null +++ b/tests/hemera/app/api/routes/developer/es_adapter/__init__.py @@ -0,0 +1,10 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2025/2/2 14:58 +# @Author ideal93 +# @File __init__.py.py +# @Brief +import pytest + +if __name__ == "__main__": + pytest.main(["-sv", __file__]) diff --git a/tests/hemera/app/api/routes/developer/es_adapter/test_account_balance.py b/tests/hemera/app/api/routes/developer/es_adapter/test_account_balance.py new file mode 100644 index 000000000..5030b27ed --- /dev/null +++ b/tests/hemera/app/api/routes/developer/es_adapter/test_account_balance.py @@ -0,0 +1,98 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2025/2/2 14:59 +# @Author ideal93 +# @File test_account_balance.py +# @Brief +from datetime import datetime, timedelta + +import pytest +from fastapi.testclient import TestClient +from sqlalchemy import delete + +from hemera.app.api.routes.developer.es_adapter.helper import ( + account_balance, + account_balancehistory, + account_balancemulti, +) +from hemera.app.main import app +from hemera.common.models.trace.address_coin_balances import AddressCoinBalances +from hemera.common.utils.format_utils import hex_str_to_bytes + + +@pytest.fixture(autouse=True) +def clean_db(session): + """Clean database before each test""" + # Order matters due to foreign key constraints + session.exec(delete(AddressCoinBalances)) + + session.commit() + + +@pytest.mark.serial +@pytest.fixture +def client(engine): + with TestClient(app) as test_client: + yield test_client + + +@pytest.mark.serial +@pytest.fixture +def sample_coin_balance(clean_db, session): + """Create a set of test blocks""" + coin_balances = [] + base_time = datetime(2024, 1, 1, 12, 0, 0) + + # Create 10 consecutive blocks + for i in range(5): + block = AddressCoinBalances( + address=hex_str_to_bytes(f"0x{i:040x}"), + balance=1000000 * (i + 1), + block_number=1000 + i, + block_timestamp=base_time + timedelta(minutes=i), + ) + coin_balances.append(block) + session.add(block) + + session.commit() + return coin_balances + + +@pytest.mark.serial +@pytest.mark.es_api +def test_account_balance(client, sample_coin_balance, session): + """Test successful health check with single block""" + + balance = account_balance(session, f"0x{1:040x}") + assert balance == 2000000 + + balance = account_balance(session, f"0x{9:040x}") + assert balance is None + + +@pytest.mark.serial +@pytest.mark.es_api +def test_account_balancemulti(client, sample_coin_balance, session): + """Test fetching balance for multiple addresses""" + addresses = [f"0x{0:040x}", f"0x{1:040x}", f"0x{3:040x}"] + balances = account_balancemulti(session, addresses=addresses, tag=None) + + assert len(balances) == 3 + assert balances[0].balance == 1000000 # address 0 + assert balances[1].balance == 2000000 # address 1 + assert balances[2].balance == 4000000 # address 3 + + +@pytest.mark.serial +@pytest.mark.es_api +def test_account_balancehistory(client, sample_coin_balance, session): + """Test fetching historical balance at a specific block number""" + balance = account_balancehistory(session, f"0x{2:040x}", 1002) + assert balance == 3000000 + + balance = account_balancehistory(session, f"0x{2:040x}", 999) + assert balance == 0 + + +if __name__ == "__main__": + pytest.main(["-sv", __file__]) diff --git a/tests/hemera/app/api/routes/developer/es_adapter/test_account_token_holder.py b/tests/hemera/app/api/routes/developer/es_adapter/test_account_token_holder.py new file mode 100644 index 000000000..d42508aea --- /dev/null +++ b/tests/hemera/app/api/routes/developer/es_adapter/test_account_token_holder.py @@ -0,0 +1,191 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2025/2/4 14:09 +# @Author ideal93 +# @File test_account_token_holder.py +# @Brief + +from datetime import datetime + +import pytest +from sqlalchemy import delete +from sqlmodel import Session + +from hemera.app.api.routes.developer.es_adapter.helper import account_address_token_holding +from hemera.common.enumeration.token_type import TokenType +from hemera.common.models.token.token_balances import CurrentTokenBalances +from hemera.common.models.token.token_id_balances import CurrentTokenIdBalances +from hemera.common.models.token.tokens import Tokens +from hemera.common.utils.format_utils import hex_str_to_bytes + + +@pytest.fixture(autouse=True) +def clean_db(session: Session): + session.exec(delete(Tokens)) + session.exec(delete(CurrentTokenBalances)) + session.commit() + + +@pytest.fixture +def sample_token_data(session: Session): + """Create sample token data for testing.""" + base_time = datetime(2025, 1, 1, 12, 0, 0) + + # Create token records (ERC20, ERC721, ERC1155) + tokens = [ + Tokens( + address=hex_str_to_bytes("0x" + "A" * 40), + name="Sample ERC20", + symbol="SERC20", + decimals=18, + token_type="ERC20", + ), + Tokens( + address=hex_str_to_bytes("0x" + "B" * 40), + name="Sample ERC721", + symbol="SERC721", + decimals=0, + token_type="ERC721", + ), + Tokens( + address=hex_str_to_bytes("0x" + "C" * 40), + name="Sample ERC1155", + symbol="SERC1155", + decimals=0, + token_type="ERC1155", + ), + ] + session.add_all(tokens) + session.commit() + + # Create current token balances + balances = [ + CurrentTokenBalances( + address=hex_str_to_bytes("0x" + "1" * 40), + token_address=hex_str_to_bytes("0x" + "A" * 40), + balance=1000, + token_type="ERC20", + token_id=-1, + block_number=1000, + block_timestamp=base_time, + ), + CurrentTokenBalances( + address=hex_str_to_bytes("0x" + "1" * 40), + token_address=hex_str_to_bytes("0x" + "B" * 40), + balance=5, + token_type="ERC721", + token_id=-1, + block_number=1000, + block_timestamp=base_time, + ), + CurrentTokenIdBalances( + address=hex_str_to_bytes("0x" + "1" * 40), + token_address=hex_str_to_bytes("0x" + "C" * 40), + balance=10, + token_type="ERC1155", + token_id=2, + block_number=1000, + block_timestamp=base_time, + ), + CurrentTokenBalances( + address=hex_str_to_bytes("0x" + "2" * 40), + token_address=hex_str_to_bytes("0x" + "A" * 40), + balance=500, + block_number=1000, + ), + ] + session.add_all(balances) + session.commit() + + return { + "erc20_contract": "0x" + "a" * 40, + "erc721_contract": "0x" + "b" * 40, + "erc1155_contract": "0x" + "c" * 40, + "address_1": "0x" + "1" * 40, + "address_2": "0x" + "2" * 40, + } + + +def test_account_address_token_holding_erc20(session: Session, sample_token_data): + """Test fetching ERC20 token holdings for an address.""" + result = account_address_token_holding( + session=session, address=sample_token_data["address_1"], page=1, offset=10, token_type=TokenType.ERC20 + ) + + assert len(result) == 1 + assert result[0].TokenAddress == sample_token_data["erc20_contract"].lower() + assert result[0].TokenName == "Sample ERC20" + assert result[0].TokenSymbol == "SERC20" + assert result[0].TokenQuantity == "1000" + assert result[0].TokenType == "ERC20" + assert result[0].TokenDecimals == "18" + assert result[0].TokenID is None + + +def test_account_address_token_holding_erc721(session: Session, sample_token_data): + """Test fetching ERC721 token holdings for an address.""" + result = account_address_token_holding( + session=session, address=sample_token_data["address_1"], page=1, offset=10, token_type=TokenType.ERC721 + ) + + assert len(result) == 1 + assert result[0].TokenAddress == sample_token_data["erc721_contract"] + assert result[0].TokenName == "Sample ERC721" + assert result[0].TokenSymbol == "SERC721" + assert result[0].TokenQuantity == "5" + assert result[0].TokenType == "ERC721" + assert result[0].TokenDecimals is None + assert result[0].TokenID == None + + +def test_account_address_token_holding_erc1155(session: Session, sample_token_data): + """Test fetching ERC1155 token holdings for an address.""" + result = account_address_token_holding( + session=session, address=sample_token_data["address_1"], page=1, offset=10, token_type=TokenType.ERC1155 + ) + + assert len(result) == 1 + assert result[0].TokenAddress == sample_token_data["erc1155_contract"] + assert result[0].TokenName == "Sample ERC1155" + assert result[0].TokenSymbol == "SERC1155" + assert result[0].TokenQuantity == "10" + assert result[0].TokenType == "ERC1155" + assert result[0].TokenDecimals is None + assert result[0].TokenID == "2" + + +def test_account_address_token_holding_multiple_tokens(session: Session, sample_token_data): + """Test fetching multiple token types for an address.""" + result = account_address_token_holding( + session=session, address=sample_token_data["address_1"], page=1, offset=10, token_type=TokenType.ERC20 + ) + assert len(result) == 1 # Only one ERC20 token for address_1 + assert result[0].TokenAddress == sample_token_data["erc20_contract"] + + result = account_address_token_holding( + session=session, address=sample_token_data["address_1"], page=1, offset=10, token_type=TokenType.ERC721 + ) + assert len(result) == 1 # Only one ERC721 token for address_1 + assert result[0].TokenAddress == sample_token_data["erc721_contract"] + + result = account_address_token_holding( + session=session, address=sample_token_data["address_1"], page=1, offset=10, token_type=TokenType.ERC1155 + ) + assert len(result) == 1 # Only one ERC1155 token for address_1 + assert result[0].TokenAddress == sample_token_data["erc1155_contract"] + + +def test_account_address_token_holding_pagination(session: Session, sample_token_data): + """Test pagination when fetching token holdings for an address.""" + result_page1 = account_address_token_holding( + session=session, address=sample_token_data["address_1"], page=1, offset=2, token_type=TokenType.ERC20 + ) + result_page2 = account_address_token_holding( + session=session, address=sample_token_data["address_1"], page=2, offset=2, token_type=TokenType.ERC20 + ) + + assert len(result_page1) == 1 # ERC20 token, so 1 + + +if __name__ == "__main__": + pytest.main(["-sv", __file__]) diff --git a/tests/hemera/app/api/routes/developer/es_adapter/test_account_token_transfers.py b/tests/hemera/app/api/routes/developer/es_adapter/test_account_token_transfers.py new file mode 100644 index 000000000..4a573b9cc --- /dev/null +++ b/tests/hemera/app/api/routes/developer/es_adapter/test_account_token_transfers.py @@ -0,0 +1,433 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2025/2/3 02:55 +# @Author ideal93 +# @File test_account_token_transfers.py.py +# @Brief + +from datetime import datetime, timedelta +from typing import List + +import pytest +from sqlalchemy import delete +from sqlmodel import Session + +from hemera.app.api.routes.developer.es_adapter.helper import ( + ERC20Transfer, + ERC721Transfer, + ERC1155Transfer, + get_account_token_transfers, +) +from hemera.common.enumeration.token_type import TokenType +from hemera.common.models.base.transactions import Transactions +from hemera.common.models.token.token_transfers import ERC20TokenTransfers, ERC721TokenTransfers, ERC1155TokenTransfers +from hemera.common.models.token.tokens import Tokens +from hemera.common.utils.format_utils import hex_str_to_bytes + + +@pytest.fixture(autouse=True) +def clean_db(session: Session): + """ + Clean the database before each test by deleting from transfers, tokens, and transactions. + """ + # Delete from token transfer tables + session.exec(delete(ERC20TokenTransfers)) + session.exec(delete(ERC721TokenTransfers)) + session.exec(delete(ERC1155TokenTransfers)) + # Delete from tokens and transactions tables + session.exec(delete(Tokens)) + session.exec(delete(Transactions)) + session.commit() + + +@pytest.fixture +def sample_erc20_data(session: Session): + """ + Create sample ERC20 token transfers, tokens, and transactions. + """ + # Use fixed addresses for testing + sample_token_address = "0x" + "a" * 40 + sample_account_address = "0x" + "b" * 40 + base_time = datetime(2025, 1, 1, 12, 0, 0) + + # Create a token record for the ERC20 token + token = Tokens(address=hex_str_to_bytes(sample_token_address), name="SampleERC20", symbol="SERC20", decimals=18) + session.add(token) + session.commit() + + # Create 5 transactions and corresponding ERC20 transfer records. + for i in range(5): + tx_hash = hex_str_to_bytes(f"0x{i:064x}") + block_number = 1000 + i + tx = Transactions( + hash=tx_hash, + block_number=block_number, + block_timestamp=base_time + timedelta(minutes=i), + nonce=i, + block_hash=hex_str_to_bytes(f"0x{i:064x}"), + input=None, + transaction_index=i, + from_address=hex_str_to_bytes(sample_account_address) if i % 2 == 0 else hex_str_to_bytes("0x" + "c" * 40), + to_address=hex_str_to_bytes("0x" + "c" * 40) if i % 2 == 0 else hex_str_to_bytes(sample_account_address), + value=1000000 * (i + 1), + gas=21000, + gas_price=20000000000, + receipt_status=1, + receipt_contract_address=None, + receipt_cumulative_gas_used=21000 * (i + 1), + receipt_gas_used=21000, + ) + session.add(tx) + # Create corresponding ERC20 transfer record + transfer = ERC20TokenTransfers( + block_number=block_number, + log_index=i, + block_timestamp=tx.block_timestamp, + transaction_hash=tx_hash, + block_hash=tx.block_hash, + from_address=tx.from_address, + to_address=tx.to_address, + token_address=hex_str_to_bytes(sample_token_address), + value=1000 * (i + 1), # some arbitrary value + ) + session.add(transfer) + session.commit() + return { + "token_address": sample_token_address, + "account_address": sample_account_address, + "base_time": base_time, + } + + +@pytest.fixture +def sample_erc721_data(session: Session): + """ + Create sample ERC721 token transfers, tokens, and transactions. + """ + sample_token_address = "0x" + "d" * 40 + sample_account_address = "0x" + "e" * 40 + base_time = datetime(2025, 1, 2, 12, 0, 0) + + # Create a token record for the ERC721 token + token = Tokens( + address=hex_str_to_bytes(sample_token_address), + name="SampleERC721", + symbol="SERC721", + decimals=0, # not used for ERC721 transfers + ) + session.add(token) + session.commit() + + # Create 3 transactions and corresponding ERC721 transfer records. + for i in range(3): + tx_hash = hex_str_to_bytes(f"0x{(10+i):064x}") + block_number = 2000 + i + tx = Transactions( + hash=tx_hash, + block_number=block_number, + block_timestamp=base_time + timedelta(minutes=i), + nonce=i, + block_hash=hex_str_to_bytes(f"0x{(10+i):064x}"), + input=None, + transaction_index=i, + from_address=hex_str_to_bytes(sample_account_address) if i % 2 == 0 else hex_str_to_bytes("0x" + "f" * 40), + to_address=hex_str_to_bytes("0x" + "f" * 40) if i % 2 == 0 else hex_str_to_bytes(sample_account_address), + value=0, # ERC721 transfers typically do not use a value field + gas=30000, + gas_price=25000000000, + receipt_status=1, + receipt_contract_address=None, + receipt_cumulative_gas_used=30000 * (i + 1), + receipt_gas_used=30000, + ) + session.add(tx) + # Create corresponding ERC721 transfer record with token_id + transfer = ERC721TokenTransfers( + block_number=block_number, + block_timestamp=tx.block_timestamp, + transaction_hash=tx_hash, + log_index=i, + block_hash=tx.block_hash, + from_address=tx.from_address, + to_address=tx.to_address, + token_address=hex_str_to_bytes(sample_token_address), + token_id=i, # simple token_id + ) + session.add(transfer) + session.commit() + return { + "token_address": sample_token_address, + "account_address": sample_account_address, + "base_time": base_time, + } + + +@pytest.fixture +def sample_erc1155_data(session: Session): + """ + Create sample ERC1155 token transfers, tokens, and transactions. + """ + sample_token_address = "0x" + "1" * 40 + sample_account_address = "0x" + "2" * 40 + base_time = datetime(2025, 1, 3, 12, 0, 0) + + # Create a token record for the ERC1155 token + token = Tokens( + address=hex_str_to_bytes(sample_token_address), + name="SampleERC1155", + symbol="SERC1155", + decimals=0, # decimals not applicable for ERC1155 transfers + ) + session.add(token) + session.commit() + + # Create 4 transactions and corresponding ERC1155 transfer records. + for i in range(4): + tx_hash = hex_str_to_bytes(f"0x{(20+i):064x}") + block_number = 3000 + i + tx = Transactions( + hash=tx_hash, + block_number=block_number, + block_timestamp=base_time + timedelta(minutes=i), + nonce=i, + block_hash=hex_str_to_bytes(f"0x{(20+i):064x}"), + input=None, + transaction_index=i, + from_address=hex_str_to_bytes(sample_account_address) if i % 2 == 0 else hex_str_to_bytes("0x" + "3" * 40), + to_address=hex_str_to_bytes("0x" + "3" * 40) if i % 2 == 0 else hex_str_to_bytes(sample_account_address), + value=0, + gas=25000, + gas_price=22000000000, + receipt_status=1, + receipt_contract_address=None, + receipt_cumulative_gas_used=25000 * (i + 1), + receipt_gas_used=25000, + ) + session.add(tx) + # Create corresponding ERC1155 transfer record with token_id and value + transfer = ERC1155TokenTransfers( + block_number=block_number, + block_timestamp=tx.block_timestamp, + transaction_hash=tx_hash, + log_index=i, + block_hash=tx.block_hash, + from_address=tx.from_address, + to_address=tx.to_address, + token_address=hex_str_to_bytes(sample_token_address), + token_id=i, # token id for ERC1155 + value=500 * (i + 1), # arbitrary value transferred + ) + session.add(transfer) + session.commit() + return { + "token_address": sample_token_address, + "account_address": sample_account_address, + "base_time": base_time, + } + + +def test_returns_empty_when_no_address_or_contract(session: Session): + """ + Test that the function returns an empty list if both address and contract_address are None. + """ + results = get_account_token_transfers( + session=session, + contract_address=None, + address=None, + page=1, + offset=10, + sort_order="desc", + start_block=0, + end_block=10000, + token_type=TokenType.ERC20, + ) + assert results == [] + + +def test_filters_by_address_erc20(session: Session, sample_erc20_data): + """ + Test filtering ERC20 transfers by account address. + """ + # Use the sample account address that appears in some transfers. + account_address = sample_erc20_data["account_address"] + results: List[ERC20Transfer] = get_account_token_transfers( + session=session, + contract_address=None, + address=account_address, + page=1, + offset=10, + sort_order="asc", # ascending order so the lowest block_number comes first + start_block=1000, + end_block=1010, + token_type=TokenType.ERC20, + ) + + # From our fixture, out of 5 transfers, 3 should have the account address (either in from or to) + # depending on how we alternated the addresses. + assert len(results) >= 1 + # Check that every returned transfer has the account address in either from or to fields. + for transfer in results: + from_addr = transfer.from_address + to_addr = transfer.to + expected = account_address.lower() + # Convert to lower-case hex strings for comparison. + assert expected in (from_addr.lower(), to_addr.lower()) + # Also check that pagination and sort order work correctly. + block_numbers = [int(t.block_number) for t in results] + assert block_numbers == sorted(block_numbers) + + +def test_filters_by_contract_address_erc20(session: Session, sample_erc20_data): + """ + Test filtering ERC20 transfers by contract (token) address. + """ + token_address = sample_erc20_data["token_address"] + results: List[ERC20Transfer] = get_account_token_transfers( + session=session, + contract_address=token_address, + address=None, + page=1, + offset=10, + sort_order="desc", + start_block=1000, + end_block=1010, + token_type=TokenType.ERC20, + ) + # We expect to get all 5 transfers for this token + assert len(results) == 5 + for transfer in results: + assert transfer.contract_address.lower() == token_address.lower() + + +def test_pagination_erc20(session: Session, sample_erc20_data): + """ + Test that pagination returns the correct subset of ERC20 transfers. + """ + # Assume our fixture inserted 5 transfers. Set offset=2 so we need 3 pages. + # Page 1: transfers 0-1; Page 2: transfers 2-3; Page 3: transfer 4. + token_address = sample_erc20_data["token_address"] + # Page 1 (descending order) + results_page1 = get_account_token_transfers( + session=session, + contract_address=token_address, + address=None, + page=1, + offset=2, + sort_order="desc", + start_block=1000, + end_block=1010, + token_type=TokenType.ERC20, + ) + assert len(results_page1) == 2 + # Page 3 + results_page3 = get_account_token_transfers( + session=session, + contract_address=token_address, + address=None, + page=3, + offset=2, + sort_order="desc", + start_block=1000, + end_block=1010, + token_type=TokenType.ERC20, + ) + # Page 3 should have the remaining 1 record + assert len(results_page3) == 1 + + +def test_sort_order_erc20(session: Session, sample_erc20_data): + """ + Test that sort order is applied correctly for ERC20 transfers. + """ + token_address = sample_erc20_data["token_address"] + + # Descending order: highest block number first + results_desc = get_account_token_transfers( + session=session, + contract_address=token_address, + address=None, + page=1, + offset=10, + sort_order="desc", + start_block=1000, + end_block=1010, + token_type=TokenType.ERC20, + ) + block_numbers_desc = [int(t.block_number) for t in results_desc] + assert block_numbers_desc == sorted(block_numbers_desc, reverse=True) + + # Ascending order: lowest block number first + results_asc = get_account_token_transfers( + session=session, + contract_address=token_address, + address=None, + page=1, + offset=10, + sort_order="asc", + start_block=1000, + end_block=1010, + token_type=TokenType.ERC20, + ) + block_numbers_asc = [int(t.block_number) for t in results_asc] + assert block_numbers_asc == sorted(block_numbers_asc) + + +def test_token_type_specific_fields_erc721(session: Session, sample_erc721_data): + """ + Test that token type specific fields (like tokenID) are returned correctly for ERC721. + """ + account_address = sample_erc721_data["account_address"] + token_address = sample_erc721_data["token_address"] + + results: List[ERC721Transfer] = get_account_token_transfers( + session=session, + contract_address=token_address, + address=account_address, + page=1, + offset=10, + sort_order="asc", + start_block=2000, + end_block=2020, + token_type=TokenType.ERC721, + ) + # There are 3 transfers in our fixture. Each transfer should have a tokenID field. + assert len(results) <= 3 + for transfer in results: + # tokenID was set to i in our fixture (0, 1, 2) + assert hasattr(transfer, "token_id") + # Also check that tokenName and tokenSymbol are set correctly. + assert transfer.token_name == "SampleERC721" + assert transfer.token_symbol == "SERC721" + + +def test_token_type_specific_fields_erc1155(session: Session, sample_erc1155_data): + """ + Test that token type specific fields (like tokenValue and tokenID) are returned correctly for ERC1155. + """ + account_address = sample_erc1155_data["account_address"] + token_address = sample_erc1155_data["token_address"] + + results: List[ERC1155Transfer] = get_account_token_transfers( + session=session, + contract_address=token_address, + address=account_address, + page=1, + offset=10, + sort_order="asc", + start_block=3000, + end_block=3020, + token_type=TokenType.ERC1155, + ) + # There are 4 transfers in our fixture. + assert len(results) <= 4 + for transfer in results: + # tokenID and tokenValue should be available for ERC1155 transfers. + assert hasattr(transfer, "token_id") + assert hasattr(transfer, "token_value") + # Also check that tokenName and tokenSymbol are set correctly. + assert transfer.token_name == "SampleERC1155" + assert transfer.token_symbol == "SERC1155" + + +if __name__ == "__main__": + pytest.main(["-sv", __file__]) diff --git a/tests/hemera/app/api/routes/developer/es_adapter/test_account_transactions.py b/tests/hemera/app/api/routes/developer/es_adapter/test_account_transactions.py new file mode 100644 index 000000000..bd36fa251 --- /dev/null +++ b/tests/hemera/app/api/routes/developer/es_adapter/test_account_transactions.py @@ -0,0 +1,181 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2025/2/2 15:41 +# @Author ideal93 +# @File test_account_transactions.py +# @Brief + +from datetime import datetime, timedelta + +import pytest +from sqlmodel import delete + +from hemera.app.api.routes.developer.es_adapter.helper import account_txlist, account_txlistinternal +from hemera.common.models.base.transactions import Transactions +from hemera.common.models.trace.traces import ContractInternalTransactions +from hemera.common.utils.format_utils import hex_str_to_bytes + + +@pytest.fixture(autouse=True) +def clean_db(session): + """Clean database before each test""" + session.exec(delete(ContractInternalTransactions)) + session.exec(delete(Transactions)) + session.commit() + + +@pytest.mark.serial +@pytest.fixture +def sample_transactions(clean_db, session): + """Create a set of test transactions""" + transactions = [] + base_time = datetime(2024, 1, 1, 12, 0, 0) + + # Create 5 test transactions + for i in range(5): + tx = Transactions( + hash=hex_str_to_bytes(f"0x{i:064x}"), + block_number=1000 + i, + block_timestamp=base_time + timedelta(minutes=i), + nonce=i, + block_hash=hex_str_to_bytes(f"0x{i:064x}"), + input=None, + transaction_index=i, + from_address=hex_str_to_bytes(f"0x{1:040x}"), + to_address=hex_str_to_bytes(f"0x{2:040x}"), + value=1000000 * (i + 1), + gas=21000, + gas_price=20000000000, + receipt_status=1, + receipt_contract_address=None, + receipt_cumulative_gas_used=21000 * (i + 1), + receipt_gas_used=21000, + ) + transactions.append(tx) + session.add(tx) + + session.commit() + return transactions + + +@pytest.mark.serial +@pytest.fixture +def sample_internal_transactions(clean_db, session): + """Create a set of test internal transactions""" + internal_txs = [] + base_time = datetime(2024, 1, 1, 12, 0, 0) + + # Create 5 test internal transactions + for i in range(5): + tx = ContractInternalTransactions( + transaction_hash=hex_str_to_bytes(f"0x{i:064x}"), + block_number=1000 + i, + block_timestamp=base_time + timedelta(minutes=i), + from_address=hex_str_to_bytes(f"0x{1:040x}"), + to_address=hex_str_to_bytes(f"0x{2:040x}"), + value=1000000 * (i + 1), + trace_type="call", + gas=21000, + gas_used=21000, + trace_id=str(i), + error=0, + ) + internal_txs.append(tx) + session.add(tx) + + session.commit() + return internal_txs + + +@pytest.mark.serial +@pytest.mark.es_api +def test_account_txlist(client, sample_transactions, session): + """Test fetching normal transactions""" + # Test by address + txs = account_txlist( + session, + txhash=None, + address=f"0x{1:040x}", # from_address in sample data + start_block=1000, + end_block=1004, + page=1, + offset=10, + sort_order="desc", + ) + + assert len(txs) == 5 + assert txs[0].blockNumber == "1004" + assert txs[0].value == "5000000" + + # Test by transaction hash + tx = account_txlist( + session, + txhash=f"0x{0:064x}", + address=None, + start_block=1000, + end_block=1004, + page=1, + offset=10, + sort_order="asc", + ) + + assert len(tx) == 1 + assert tx[0].blockNumber == "1000" + assert tx[0].value == "1000000" + + +@pytest.mark.serial +@pytest.mark.es_api +def test_account_txlistinternal(client, sample_internal_transactions, session): + """Test fetching internal transactions""" + # Test by address + txs = account_txlistinternal( + session, + txhash=None, + address=f"0x{1:040x}", # from_address in sample data + start_block=1000, + end_block=1004, + page=1, + offset=10, + sort_order="desc", + ) + + assert len(txs) == 5 + assert txs[0].blockNumber == "1004" + assert txs[0].value == "5000000" + + # Test by transaction hash + tx = account_txlistinternal( + session, + txhash=f"0x{0:064x}", + address=None, + start_block=1000, + end_block=1004, + page=1, + offset=10, + sort_order="asc", + ) + + assert len(tx) == 1 + assert tx[0].blockNumber == "1000" + assert tx[0].value == "1000000" + + tx = account_txlistinternal( + session, + txhash=f"0x{0:064x}", + address=None, + start_block=1000, + end_block=1004, + page=1, + offset=10, + sort_order="desc", + ) + assert len(tx) == 1 + assert tx[0].blockNumber == "1000" + assert tx[0].value == "1000000" + assert tx[0].fromAddress == f"0x{1:040x}" + assert tx[0].toAddress == f"0x{2:040x}" + + +if __name__ == "__main__": + pytest.main(["-sv", __file__]) diff --git a/tests/hemera/app/api/routes/developer/es_adapter/test_blocks.py b/tests/hemera/app/api/routes/developer/es_adapter/test_blocks.py new file mode 100644 index 000000000..ee1e748d4 --- /dev/null +++ b/tests/hemera/app/api/routes/developer/es_adapter/test_blocks.py @@ -0,0 +1,127 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2025/2/4 12:37 +# @Author ideal93 +# @File test_blocks.py +# @Brief +from datetime import datetime, timedelta + +import pytest +from sqlmodel import Session, delete + +from hemera.app.api.routes.developer.es_adapter.helper import block_number_by_timestamp +from hemera.common.models.base.blocks import Blocks +from hemera.common.utils.format_utils import hex_str_to_bytes + + +@pytest.fixture(autouse=True) +def clean_blocks(session: Session): + session.exec(delete(Blocks)) + session.commit() + + +@pytest.fixture +def sample_blocks(session: Session): + base_time = datetime(2025, 1, 1, 12, 0, 0) + blocks = [ + Blocks(number=100, timestamp=base_time, hash=hex_str_to_bytes(f"0x{'%064x' % 100}")), + Blocks(number=101, timestamp=base_time + timedelta(minutes=1), hash=hex_str_to_bytes(f"0x{'%064x' % 101}")), + Blocks(number=102, timestamp=base_time + timedelta(minutes=2), hash=hex_str_to_bytes(f"0x{'%064x' % 102}")), + Blocks(number=103, timestamp=base_time + timedelta(minutes=3), hash=hex_str_to_bytes(f"0x{'%064x' % 103}")), + Blocks(number=104, timestamp=base_time + timedelta(minutes=4), hash=hex_str_to_bytes(f"0x{'%064x' % 104}")), + ] + session.add_all(blocks) + session.commit() + return blocks + + +def test_block_number_before(session: Session, sample_blocks): + """ + Test the "before" option: + Given a timestamp, the function should return the block with the greatest timestamp + that is less than or equal to the given timestamp. + """ + # Use a timestamp that falls between block 101 and 102. + base_time = datetime(2025, 1, 1, 12, 0, 0) + # Timestamp for 1 minute and 30 seconds after base_time. + test_timestamp = int((base_time + timedelta(minutes=1, seconds=30)).timestamp()) + test_timestamp = test_timestamp + # Expect block number 101 because: + # Block 100: base_time (0 minutes) + # Block 101: base_time + 1 minute + # Block 102: base_time + 2 minutes (too high) + result = block_number_by_timestamp(session, test_timestamp, "before") + assert result == 101, f"Expected block number 101 but got {result}" + + +def test_block_number_after(session: Session, sample_blocks): + """ + Test the "after" option: + Given a timestamp, the function should return the block with the smallest timestamp + that is greater than or equal to the given timestamp. + """ + base_time = datetime(2025, 1, 1, 12, 0, 0) + # Timestamp for 1 minute and 30 seconds after base_time. + test_timestamp = (base_time + timedelta(minutes=1, seconds=30)).timestamp() + + # Expect block number 102 because: + # Block 102 has timestamp base_time + 2 minutes, which is the smallest timestamp >= test_timestamp. + result = block_number_by_timestamp(session, test_timestamp, "after") + assert result == 102, f"Expected block number 102 but got {result}" + + +def test_block_number_by_timestamp_exact_match(session: Session, sample_blocks): + """ + Test the function when the timestamp exactly matches one of the block timestamps. + """ + base_time = datetime(2025, 1, 1, 12, 0, 0) + exact_timestamp = base_time.timestamp() # exactly block 100's timestamp + + # For "before" and "after", an exact match should return that block number. + result_before = block_number_by_timestamp(session, exact_timestamp, "before") + result_after = block_number_by_timestamp(session, exact_timestamp, "after") + assert result_before == 100, f"Expected block number 100 but got {result_before}" + assert result_after == 100, f"Expected block number 100 but got {result_after}" + + +def test_block_number_invalid_closest(session: Session, sample_blocks): + """ + Test that an invalid value for closest returns None. + """ + base_time = datetime(2025, 1, 1, 12, 0, 0) + test_timestamp = base_time.timestamp() + + result = block_number_by_timestamp(session, test_timestamp, "invalid") + assert result is None, "Expected None for an invalid closest parameter" + + +def test_block_number_no_block_before(session: Session, sample_blocks): + """ + Test the scenario where no block exists before the given timestamp. + For example, if the timestamp is earlier than the earliest block timestamp. + """ + # Set a timestamp earlier than the first block's timestamp. + base_time = datetime(2025, 1, 1, 12, 0, 0) + test_timestamp = (base_time - timedelta(seconds=1)).timestamp() + + result = block_number_by_timestamp(session, test_timestamp, "before") + # No block exists before this timestamp so result should be None. + assert result is None, f"Expected None but got {result}" + + +def test_block_number_no_block_after(session: Session, sample_blocks): + """ + Test the scenario where no block exists after the given timestamp. + For example, if the timestamp is later than the latest block timestamp. + """ + base_time = datetime(2025, 1, 1, 12, 0, 0) + # Set a timestamp later than the last block's timestamp. + test_timestamp = (base_time + timedelta(minutes=5, seconds=1)).timestamp() + + result = block_number_by_timestamp(session, test_timestamp, "after") + # No block exists after this timestamp so result should be None. + assert result is None, f"Expected None but got {result}" + + +if __name__ == "__main__": + pytest.main(["-sv", __file__]) diff --git a/tests/hemera/app/api/routes/developer/es_adapter/test_contracts.py b/tests/hemera/app/api/routes/developer/es_adapter/test_contracts.py new file mode 100644 index 000000000..2b8e5536a --- /dev/null +++ b/tests/hemera/app/api/routes/developer/es_adapter/test_contracts.py @@ -0,0 +1,86 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2025/2/4 12:37 +# @Author ideal93 +# @File test_blocks.py +# @Brief + +import pytest +from sqlmodel import Session, delete + +from hemera.app.api.routes.developer.es_adapter.helper import get_contract_creator_and_creation_tx_hash +from hemera.common.models.trace.contracts import Contracts +from hemera.common.utils.format_utils import hex_str_to_bytes + + +@pytest.fixture(autouse=True) +def clean_db(session: Session): + session.exec(delete(Contracts)) + session.commit() + + +@pytest.fixture +def sample_contract_data(session: Session): + """Create sample contract data for testing.""" + contracts = [ + Contracts( + address=hex_str_to_bytes("0x" + "A" * 40), + contract_creator=hex_str_to_bytes("0x" + "B" * 40), + transaction_hash=hex_str_to_bytes("0x" + "C" * 64), + ), + Contracts( + address=hex_str_to_bytes("0x" + "D" * 40), + contract_creator=hex_str_to_bytes("0x" + "E" * 40), + transaction_hash=hex_str_to_bytes("0x" + "F" * 64), + ), + ] + session.add_all(contracts) + session.commit() + + return { + "contract_1": "0x" + "a" * 40, + "contract_2": "0x" + "d" * 40, + } + + +def test_get_contract_creator_and_creation_tx_hash(session: Session, sample_contract_data): + """Test retrieving contract creator and transaction hash.""" + result = get_contract_creator_and_creation_tx_hash( + session=session, contract_addresses=[sample_contract_data["contract_1"], sample_contract_data["contract_2"]] + ) + + assert len(result) == 2 + assert result[0].contractAddress == sample_contract_data["contract_1"] + assert result[0].contractCreator == "0x" + "b" * 40 + assert result[0].txHash == "0x" + "c" * 64 + + assert result[1].contractAddress == sample_contract_data["contract_2"] + assert result[1].contractCreator == "0x" + "e" * 40 + assert result[1].txHash == "0x" + "f" * 64 + + +def test_get_contract_creator_and_creation_tx_hash_empty_input(session: Session): + """Test with an empty list of contract addresses.""" + result = get_contract_creator_and_creation_tx_hash(session=session, contract_addresses=[]) + assert result == [] + + +def test_get_contract_creator_and_creation_tx_hash_invalid_address(session: Session): + """Test with an invalid contract address.""" + result = get_contract_creator_and_creation_tx_hash(session=session, contract_addresses=["0x" + "f" * 40]) + assert result == [] # No matching contract address + + +def test_get_contract_creator_and_creation_tx_hash_single_address(session: Session, sample_contract_data): + """Test with a single contract address.""" + result = get_contract_creator_and_creation_tx_hash( + session=session, contract_addresses=[sample_contract_data["contract_1"]] + ) + assert len(result) == 1 + assert result[0].contractAddress == sample_contract_data["contract_1"] + assert result[0].contractCreator == "0x" + "b" * 40 + assert result[0].txHash == "0x" + "c" * 64 + + +if __name__ == "__main__": + pytest.main(["-sv", __file__]) diff --git a/tests/hemera/app/api/routes/developer/es_adapter/test_logs.py b/tests/hemera/app/api/routes/developer/es_adapter/test_logs.py new file mode 100644 index 000000000..e611ca025 --- /dev/null +++ b/tests/hemera/app/api/routes/developer/es_adapter/test_logs.py @@ -0,0 +1,254 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2025/2/3 04:01 +# @Author ideal93 +# @File test_logs.py +# @Brief +from datetime import datetime, timedelta +from typing import List + +import pytest +from sqlmodel import Session + +from hemera.app.api.routes.developer.es_adapter.helper import APILogResponse, get_event_logs +from hemera.common.models.base.logs import Logs +from hemera.common.utils.format_utils import hex_str_to_bytes + + +@pytest.fixture +def sample_logs(session: Session): + base_time = datetime(2025, 1, 1, 12, 0, 0) + + topicA = hex_str_to_bytes("0x" + "A" * 64) + topicB = hex_str_to_bytes("0x" + "B" * 64) + topicC = hex_str_to_bytes("0x" + "C" * 64) + topicD = hex_str_to_bytes("0x" + "D" * 64) + topicE = hex_str_to_bytes("0x" + "E" * 64) + empty_topic = hex_str_to_bytes("0x" + "0" * 64) + + logs = [ + # Log 1 + Logs( + transaction_hash=hex_str_to_bytes("0x" + "a" * 64), + log_index=0, + address=hex_str_to_bytes("0x1111111111111111111111111111111111111111"), + data=hex_str_to_bytes("0x11"), + block_number=1000, + block_timestamp=base_time, + block_hash=hex_str_to_bytes("0x" + "a" * 64), + topic0=topicA, + topic1=topicB, + topic2=topicC, + topic3=topicD, + ), + # Log 2 + Logs( + transaction_hash=hex_str_to_bytes("0x" + "b" * 64), + log_index=1, + address=hex_str_to_bytes("0x2222222222222222222222222222222222222222"), + data=hex_str_to_bytes("0x11"), + block_number=1001, + block_hash=hex_str_to_bytes("0x" + "b" * 64), + block_timestamp=base_time + timedelta(minutes=1), + topic0=topicA, + topic1=topicE, # Using topicE instead of topicB here + topic2=topicC, + topic3=empty_topic, # Not None, but an "empty" value + ), + # Log 3 + Logs( + transaction_hash=hex_str_to_bytes("0x" + "c" * 64), + log_index=2, + address=hex_str_to_bytes("0x3333333333333333333333333333333333333333"), + data=hex_str_to_bytes("0x11"), + block_number=1002, + block_hash=hex_str_to_bytes("0x" + "c" * 64), + block_timestamp=base_time + timedelta(minutes=2), + topic0=hex_str_to_bytes("0x" + "F" * 64), + topic1=topicB, + topic2=empty_topic, + topic3=topicD, + ), + # Log 4 + Logs( + transaction_hash=hex_str_to_bytes("0x" + "d" * 64), + log_index=3, + address=hex_str_to_bytes("0x1111111111111111111111111111111111111111"), + data=hex_str_to_bytes("0x11"), + block_number=1003, + block_hash=hex_str_to_bytes("0x" + "d" * 64), + block_timestamp=base_time + timedelta(minutes=3), + topic0=topicA, + topic1=topicB, + topic2=topicC, + topic3=topicE, + ), + # Log 5 + Logs( + transaction_hash=hex_str_to_bytes("0x" + "e" * 64), + log_index=4, + address=hex_str_to_bytes("0x4444444444444444444444444444444444444444"), + data=hex_str_to_bytes("0x11"), + block_number=1004, + block_hash=hex_str_to_bytes("0x" + "e" * 64), + block_timestamp=base_time + timedelta(minutes=4), + topic0=empty_topic, + topic1=topicB, + topic2=topicC, + topic3=topicD, + ), + # Log 6 (for pagination) + Logs( + transaction_hash=hex_str_to_bytes("0x" + "f" * 64), + log_index=5, + address=hex_str_to_bytes("0x5555555555555555555555555555555555555555"), + data=hex_str_to_bytes("0x11"), + block_number=1005, + block_hash=hex_str_to_bytes("0x" + "f" * 64), + block_timestamp=base_time + timedelta(minutes=5), + topic0=topicA, + topic1=topicB, + topic2=topicC, + topic3=topicD, + ), + ] + + session.add_all(logs) + session.commit() + return logs + + +def test_get_event_logs_no_filters(session: Session, sample_logs): + """ + Test that get_event_logs returns all logs when no topic or address filters are applied. + """ + result: List[APILogResponse] = get_event_logs( + session=session, from_block=1000, to_block=1010, page=1, offset=10, sort_order="asc" + ) + # Expect all 6 logs to be returned + assert len(result) == 6 + + # Verify the order by block_number ascending. + block_numbers = [int(r.blockNumber) for r in result] + assert block_numbers == sorted(block_numbers) + + +def test_get_event_logs_filter_by_topic0(session: Session, sample_logs): + """ + Test filtering by topic0. + """ + # Use topicA (as hex string) for filtering. + topicA_str = "0x" + "A" * 64 + result: List[APILogResponse] = get_event_logs( + session=session, topic0=topicA_str, from_block=1000, to_block=1010, page=1, offset=10, sort_order="asc" + ) + # In our sample, logs 1, 2, 4, and 6 have topic0 equal to topicA. + assert len(result) == 4 + for log in result: + returned_topic0 = log.topics[0] + assert returned_topic0.lower() == topicA_str.lower() + + +def test_get_event_logs_filter_by_address(session: Session, sample_logs): + """ + Test filtering logs by address. + """ + target_address = "0x1111111111111111111111111111111111111111".lower() + result: List[APILogResponse] = get_event_logs( + session=session, address=target_address, from_block=1000, to_block=1010, page=1, offset=10, sort_order="asc" + ) + # Two logs have this address (logs 1 and 4) + assert len(result) == 2 + for log in result: + assert log.address.lower() == target_address + + +def test_get_event_logs_combined_topics_and(session: Session, sample_logs): + """ + Test filtering logs with combined topics using the AND operator. + """ + # We want logs that have topic0 = topicA and topic1 = topicB. + topicA_str = "0x" + "A" * 64 + topicB_str = "0x" + "B" * 64 + result: List[APILogResponse] = get_event_logs( + session=session, + topic0=topicA_str, + topic1=topicB_str, + topic0_1_opr="and", + from_block=1000, + to_block=1010, + page=1, + offset=10, + sort_order="asc", + ) + # Expected logs: 1, 4, and 6 have topic0 equal to topicA and topic1 equal to topicB. + assert len(result) == 3 + for log in result: + assert topicA_str.lower() in log.topics + assert topicB_str.lower() in log.topics + + +def test_get_event_logs_combined_topics_or(session: Session, sample_logs): + """ + Test filtering logs with combined topics using the OR operator. + """ + # Request logs that have topic0 = topicA OR topic1 = topicE. + topicA_str = "0x" + "A" * 64 + topicE_str = "0x" + "E" * 64 + result: List[APILogResponse] = get_event_logs( + session=session, + topic0=topicA_str, + topic1=topicE_str, + topic0_1_opr="or", + from_block=1000, + to_block=1010, + page=1, + offset=10, + sort_order="asc", + ) + # Expected: + # - Logs with topic0 == topicA: logs 1, 2, 4, 6. + # - Logs with topic1 == topicE: log 2 (already counted) and possibly others. + # Overall, we expect 4 logs. + assert len(result) == 4 + for log in result: + assert (topicA_str.lower() in log.topics) or (topicE_str.lower() in log.topics) + + +def test_get_event_logs_pagination_and_sort(session: Session, sample_logs): + """ + Test pagination and sorting. + """ + # Request logs with offset=2 in descending order. + result_page1: List[APILogResponse] = get_event_logs( + session=session, from_block=1000, to_block=1010, page=1, offset=2, sort_order="desc" + ) + result_page2: List[APILogResponse] = get_event_logs( + session=session, from_block=1000, to_block=1010, page=2, offset=2, sort_order="desc" + ) + # Verify pagination: page1 should have 2 logs, page2 should have the next 2. + assert len(result_page1) == 2 + assert len(result_page2) == 2 + + # Verify sort order: descending order means page1 has the highest block numbers. + blocks_page1 = [int(r.blockNumber) for r in result_page1] + blocks_page2 = [int(r.blockNumber) for r in result_page2] + assert blocks_page1[0] >= blocks_page1[1] + # The last block in page1 should be greater than the first block in page2. + assert blocks_page1[-1] > blocks_page2[0] + + +def test_get_event_logs_block_range(session: Session, sample_logs): + """ + Test filtering logs by block range. + """ + # Set block range to only include logs from block 1002 to 1004. + result: List[APILogResponse] = get_event_logs( + session=session, from_block=1002, to_block=1004, page=1, offset=10, sort_order="asc" + ) + block_numbers = sorted([int(r.blockNumber) for r in result]) + assert block_numbers == [1002, 1003, 1004] + + +if __name__ == "__main__": + pytest.main(["-sv", __file__]) diff --git a/tests/hemera/app/api/routes/developer/es_adapter/test_stats.py b/tests/hemera/app/api/routes/developer/es_adapter/test_stats.py new file mode 100644 index 000000000..ae607dc3c --- /dev/null +++ b/tests/hemera/app/api/routes/developer/es_adapter/test_stats.py @@ -0,0 +1,180 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2025/2/5 00:33 +# @Author ideal93 +# @File test_stats.py +# @Brief +from datetime import datetime + +import pytest +from sqlmodel import Session, delete + +from hemera.app.api.routes.developer.es_adapter.helper import ( + stats_daily_average_block_size, + stats_daily_average_block_time, + stats_daily_block_count_and_rewards, + stats_daily_network_transaction_fee, + stats_daily_network_utilization, + stats_daily_new_address_count, + stats_daily_transaction_count, +) +from hemera.common.models.stats.daily_addresses_stats import DailyAddressesStats +from hemera.common.models.stats.daily_blocks_stats import DailyBlocksStats +from hemera.common.models.stats.daily_transactions_stats import DailyTransactionsStats + + +@pytest.fixture(autouse=True) +def clean_db(session: Session): + session.exec(delete(DailyTransactionsStats)) + session.exec(delete(DailyAddressesStats)) + session.exec(delete(DailyBlocksStats)) + session.commit() + + +@pytest.fixture +def sample_data(session: Session): + """Create sample data for testing.""" + # Create DailyTransactionsStats sample data + transactions = [ + DailyTransactionsStats(block_date=datetime(2025, 1, 1), avg_transaction_fee=1.23, cnt=1), + DailyTransactionsStats(block_date=datetime(2025, 1, 2), avg_transaction_fee=2.34, cnt=1), + ] + session.add_all(transactions) + + # Create DailyAddressesStats sample data + addresses = [ + DailyAddressesStats(block_date=datetime(2025, 1, 1), new_address_cnt=100), + DailyAddressesStats(block_date=datetime(2025, 1, 2), new_address_cnt=150), + ] + session.add_all(addresses) + + # Create DailyBlocksStats sample data + blocks = [ + DailyBlocksStats( + block_date=datetime(2025, 1, 1), avg_size=1000, avg_gas_used_percentage=45.6, cnt=10, block_interval=12.5 + ), + DailyBlocksStats( + block_date=datetime(2025, 1, 2), avg_size=1200, avg_gas_used_percentage=50.7, cnt=12, block_interval=10.8 + ), + ] + session.add_all(blocks) + + session.commit() + + +# ----------------------------------------------------------------------------- +# Test Cases for stats_daily_network_transaction_fee +# ----------------------------------------------------------------------------- +def test_stats_daily_network_transaction_fee(session: Session, sample_data): + result = stats_daily_network_transaction_fee( + session=session, + start_date=datetime(2025, 1, 1), + end_date=datetime(2025, 1, 2), + sort_order="asc", + ) + + assert len(result) == 2 + assert result[0].UTCDate == "2025-01-01" + assert result[0].transactionFee == "1.23" + assert result[1].UTCDate == "2025-01-02" + assert result[1].transactionFee == "2.34" + + +# ----------------------------------------------------------------------------- +# Test Cases for stats_daily_new_address_count +# ----------------------------------------------------------------------------- +def test_stats_daily_new_address_count(session: Session, sample_data): + result = stats_daily_new_address_count( + session=session, + start_date=datetime(2025, 1, 1), + end_date=datetime(2025, 1, 2), + sort_order="asc", + ) + + assert len(result) == 2 + assert result[0].newAddressCount == "100" + assert result[1].newAddressCount == "150" + + +# ----------------------------------------------------------------------------- +# Test Cases for stats_daily_network_utilization +# ----------------------------------------------------------------------------- +def test_stats_daily_network_utilization(session: Session, sample_data): + result = stats_daily_network_utilization( + session=session, + start_date=datetime(2025, 1, 1), + end_date=datetime(2025, 1, 2), + sort_order="asc", + ) + + assert len(result) == 2 + assert result[0].networkUtilization == "45.6" + assert result[1].networkUtilization == "50.7" + + +# ----------------------------------------------------------------------------- +# Test Cases for stats_daily_transaction_count +# ----------------------------------------------------------------------------- +def test_stats_daily_transaction_count(session: Session, sample_data): + result = stats_daily_transaction_count( + session=session, + start_date=datetime(2025, 1, 1), + end_date=datetime(2025, 1, 2), + sort_order="asc", + ) + + assert len(result) == 2 + assert result[0].transactionCount == "1" + assert result[1].transactionCount == "1" + + +# ----------------------------------------------------------------------------- +# Test Cases for stats_daily_average_block_size +# ----------------------------------------------------------------------------- +def test_stats_daily_average_block_size(session: Session, sample_data): + result = stats_daily_average_block_size( + session=session, + start_date=datetime(2025, 1, 1), + end_date=datetime(2025, 1, 2), + sort_order="asc", + ) + + assert len(result) == 2 + assert result[0].averageBlockSize == "1000" + assert result[1].averageBlockSize == "1200" + + +# ----------------------------------------------------------------------------- +# Test Cases for stats_daily_block_count_and_rewards +# ----------------------------------------------------------------------------- +def test_stats_daily_block_count_and_rewards(session: Session, sample_data): + result = stats_daily_block_count_and_rewards( + session=session, + start_date=datetime(2025, 1, 1), + end_date=datetime(2025, 1, 2), + sort_order="asc", + ) + + assert len(result) == 2 + assert result[0].blockCount == "10" + assert result[1].blockCount == "12" + + +# ----------------------------------------------------------------------------- +# Test Cases for stats_daily_average_block_time +# ----------------------------------------------------------------------------- +def test_stats_daily_average_block_time(session: Session, sample_data): + result = stats_daily_average_block_time( + session=session, + start_date=datetime(2025, 1, 1), + end_date=datetime(2025, 1, 2), + sort_order="asc", + ) + + assert len(result) == 2 + assert result[0].blockTime == "12.5" + assert result[1].blockTime == "10.8" + + +if __name__ == "__main__": + pytest.main(["-sv", __file__]) diff --git a/tests/hemera/app/api/routes/developer/es_adapter/test_tokens.py b/tests/hemera/app/api/routes/developer/es_adapter/test_tokens.py new file mode 100644 index 000000000..5c65b34f8 --- /dev/null +++ b/tests/hemera/app/api/routes/developer/es_adapter/test_tokens.py @@ -0,0 +1,349 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2025/2/3 21:43 +# @Author ideal93 +# @File test_tokens.py +# @Brief +from datetime import datetime + +import pytest +from sqlmodel import Session, delete + +from hemera.app.api.routes.developer.es_adapter.helper import ( + account_address_nft_inventory, + account_token_balance, + account_token_balance_with_block_number, + stats_token_supply, + token_holder_list, + token_info, +) +from hemera.common.enumeration.token_type import TokenType +from hemera.common.models.token.nft import NFTDetails +from hemera.common.models.token.token_balances import AddressTokenBalances, CurrentTokenBalances +from hemera.common.models.token.token_id_balances import AddressTokenIdBalances, CurrentTokenIdBalances +from hemera.common.models.token.tokens import Tokens +from hemera.common.utils.format_utils import hex_str_to_bytes + + +@pytest.fixture(autouse=True) +def clean_db(session: Session): + session.exec(delete(Tokens)) + session.exec(delete(AddressTokenBalances)) + session.exec(delete(CurrentTokenBalances)) + session.exec(delete(AddressTokenIdBalances)) + session.exec(delete(CurrentTokenIdBalances)) + session.commit() + + +@pytest.fixture +def sample_token_data(session: Session): + """Create sample token data for testing.""" + base_time = datetime(2025, 1, 1, 12, 0, 0) + # Create token records + tokens = [ + Tokens( + address=hex_str_to_bytes("0x" + "A" * 40), + name="Sample ERC20", + symbol="SERC20", + total_supply=1000000, + token_type="ERC20", + decimals=18, + ), + Tokens( + address=hex_str_to_bytes("0x" + "B" * 40), + name="Sample ERC721", + symbol="SERC721", + total_supply=1000, + token_type="ERC721", + decimals=None, + token_id=1, + ), + Tokens( + address=hex_str_to_bytes("0x" + "C" * 40), + name="Sample ERC1155", + symbol="SERC1155", + total_supply=5000, + token_type="ERC1155", + decimals=None, + token_id=2, + ), + ] + session.add_all(tokens) + session.commit() + + # Create token balances for address across different block numbers + balances = [ + AddressTokenBalances( + address=hex_str_to_bytes("0x" + "1" * 40), + token_address=hex_str_to_bytes("0x" + "A" * 40), + balance=1000, + block_number=1000, + block_timestamp=base_time, + ), + AddressTokenBalances( + address=hex_str_to_bytes("0x" + "1" * 40), + token_address=hex_str_to_bytes("0x" + "A" * 40), + balance=1500, + block_number=1005, # newer block number + block_timestamp=base_time, + ), + AddressTokenBalances( + address=hex_str_to_bytes("0x" + "1" * 40), + token_address=hex_str_to_bytes("0x" + "B" * 40), + balance=5, + block_number=1000, + block_timestamp=base_time, + ), + AddressTokenIdBalances( + address=hex_str_to_bytes("0x" + "1" * 40), + token_address=hex_str_to_bytes("0x" + "C" * 40), + token_id=2, + balance=10, + block_number=1000, + block_timestamp=base_time, + ), + ] + session.add_all(balances) + session.commit() + + # Current token balances for holders + current_balances = [ + CurrentTokenBalances( + address=hex_str_to_bytes("0x" + "1" * 40), + token_address=hex_str_to_bytes("0x" + "A" * 40), + balance=1500, + block_number=1005, # latest block for ERC20 + block_timestamp=base_time, + ), + CurrentTokenBalances( + address=hex_str_to_bytes("0x" + "1" * 40), + token_address=hex_str_to_bytes("0x" + "B" * 40), + balance=5, + block_number=1000, + block_timestamp=base_time, + ), + CurrentTokenIdBalances( + address=hex_str_to_bytes("0x" + "1" * 40), + token_address=hex_str_to_bytes("0x" + "C" * 40), + balance=10, + token_id=2, + block_number=1000, + block_timestamp=base_time, + ), + CurrentTokenBalances( + address=hex_str_to_bytes("0x" + "2" * 40), + token_address=hex_str_to_bytes("0x" + "A" * 40), + balance=500, + block_number=1000, + block_timestamp=base_time, + ), + ] + session.add_all(current_balances) + session.commit() + + return { + "erc20_contract": "0x" + "A" * 40, + "erc721_contract": "0x" + "B" * 40, + "erc1155_contract": "0x" + "C" * 40, + "address": "0x" + "1" * 40, + } + + +# ----------------------------------------------------------------------------- +# Test Cases for stats_token_supply, account_token_balance, and token_info +# ----------------------------------------------------------------------------- +def test_stats_token_supply(session: Session, sample_token_data): + """Test the total supply of tokens.""" + result = stats_token_supply(session, sample_token_data["erc20_contract"]) + assert result == 1000000 # ERC20 total supply is 1000000 + + result = stats_token_supply(session, sample_token_data["erc721_contract"]) + assert result == 1000 # ERC721 total supply is 1000 + + result = stats_token_supply(session, sample_token_data["erc1155_contract"]) + assert result == 5000 # ERC1155 total supply is 5000 + + +def test_account_token_balance(session: Session, sample_token_data): + """Test getting the token balance for an address.""" + result = account_token_balance( + session, sample_token_data["erc20_contract"], sample_token_data["address"], TokenType.ERC20 + ) + assert result == "1500" # ERC20 balance for address is 1500 (latest block balance) + + result = account_token_balance( + session, sample_token_data["erc721_contract"], sample_token_data["address"], TokenType.ERC721 + ) + assert result == "5" # ERC721 balance for address is 5 (token_id=1) + + result = account_token_balance( + session, sample_token_data["erc1155_contract"], sample_token_data["address"], TokenType.ERC1155, 2 + ) + assert result == "10" # ERC1155 balance for address is 10 (token_id=2) + + +def test_account_token_balance_with_block_number(session: Session, sample_token_data): + """Test getting token balance with a specific block number.""" + result = account_token_balance_with_block_number( + session, sample_token_data["erc20_contract"], sample_token_data["address"], 1000, TokenType.ERC20 + ) + assert result == "1000" # ERC20 balance at block number 1000 is 1000 + + result = account_token_balance_with_block_number( + session, sample_token_data["erc721_contract"], sample_token_data["address"], 1000, TokenType.ERC721, -1 + ) + assert result == "5" # ERC721 balance for token_id=1 at block number 1000 is 5 + + result = account_token_balance_with_block_number( + session, sample_token_data["erc1155_contract"], sample_token_data["address"], 1000, TokenType.ERC1155, 2 + ) + assert result == "10" # ERC1155 balance for token_id=2 at block number 1000 is 10 + + +def test_current_account_token_balance(session: Session, sample_token_data): + """Test getting current token balance (latest block number).""" + # The latest block for ERC20 has a balance of 1500 + result = account_token_balance( + session, sample_token_data["erc20_contract"], sample_token_data["address"], TokenType.ERC20 + ) + assert result == "1500" # Latest balance from CurrentTokenBalances for ERC20 + + result = account_token_balance( + session, sample_token_data["erc721_contract"], sample_token_data["address"], TokenType.ERC721, -1 + ) + assert result == "5" # Latest balance for ERC721 token + + result = account_token_balance( + session, sample_token_data["erc1155_contract"], sample_token_data["address"], TokenType.ERC1155, 2 + ) + assert result == "10" # Latest balance for ERC1155 token + + +def test_token_holder_list(session: Session, sample_token_data): + """Test getting the list of token holders.""" + result = token_holder_list(session, sample_token_data["erc20_contract"], page=1, offset=10, sort_order="desc") + assert len(result) == 2 + assert result[0].TokenHolderAddress == "0x" + "1" * 40 + assert result[1].TokenHolderAddress == "0x" + "2" * 40 + + +def test_token_info(session: Session, sample_token_data): + """Test getting token information by contract address.""" + result = token_info(session, sample_token_data["erc20_contract"]) + assert result + assert result.TokenName == "Sample ERC20" + assert result.TokenSymbol == "SERC20" + assert result.TokenTotalSupply == "1000000" + assert result.TokenType == "ERC20" + assert result.TokenDecimals == "18" + + result = token_info(session, sample_token_data["erc721_contract"]) + assert result + assert result.TokenName == "Sample ERC721" + assert result.TokenSymbol == "SERC721" + assert result.TokenTotalSupply == "1000" + assert result.TokenType == "ERC721" + assert result.TokenDecimals is None + + +@pytest.fixture +def sample_nft_data(session: Session): + """Create sample ERC721 NFT data for testing.""" + base_time = datetime(2025, 1, 1, 12, 0, 0) + + # Create ERC721 token details + nfts = [ + NFTDetails( + token_id="1", token_address=hex_str_to_bytes("0x" + "A" * 40), token_owner=hex_str_to_bytes("0x" + "1" * 40) + ), + NFTDetails( + token_id="2", token_address=hex_str_to_bytes("0x" + "A" * 40), token_owner=hex_str_to_bytes("0x" + "1" * 40) + ), + NFTDetails( + token_id="3", token_address=hex_str_to_bytes("0x" + "A" * 40), token_owner=hex_str_to_bytes("0x" + "2" * 40) + ), + ] + session.add_all(nfts) + session.commit() + + return { + "contract_address": "0x" + "a" * 40, + "address_1": "0x" + "1" * 40, + "address_2": "0x" + "2" * 40, + } + + +def test_account_address_nft_inventory(session: Session, sample_nft_data): + """Test fetching NFT inventory for a given address.""" + result = account_address_nft_inventory( + session=session, + contract_address=sample_nft_data["contract_address"], + address=sample_nft_data["address_1"], + page=1, + offset=10, + ) + + assert len(result) == 2 + assert result[0].tokenID == "1" + assert result[1].tokenID == "2" + + +def test_account_address_nft_inventory_empty_address(session: Session): + """Test fetching NFT inventory when no address is provided.""" + result = account_address_nft_inventory( + session=session, contract_address="0x" + "A" * 40, address=None, page=1, offset=10 + ) + assert result == [] + + +def test_account_address_nft_inventory_empty_contract_address(session: Session): + """Test fetching NFT inventory when no contract address is provided.""" + result = account_address_nft_inventory( + session=session, contract_address=None, address="0x" + "1" * 40, page=1, offset=10 + ) + assert result == [] + + +def test_account_address_nft_inventory_pagination(session: Session, sample_nft_data): + """Test pagination when fetching NFT inventory.""" + result_page1 = account_address_nft_inventory( + session=session, + contract_address=sample_nft_data["contract_address"], + address=sample_nft_data["address_1"], + page=1, + offset=2, + ) + result_page2 = account_address_nft_inventory( + session=session, + contract_address=sample_nft_data["contract_address"], + address=sample_nft_data["address_1"], + page=2, + offset=2, + ) + + assert len(result_page1) == 2 + assert len(result_page2) == 0 + + +def test_account_address_nft_inventory_no_tokens(session: Session, sample_nft_data): + """Test when an address has no NFTs for a given contract address.""" + result = account_address_nft_inventory( + session=session, + contract_address=sample_nft_data["contract_address"], + address=sample_nft_data["address_2"], + page=1, + offset=10, + ) + assert len(result) == 1 # Only one token belonging to address_2 + + +def test_account_address_nft_inventory_invalid_contract_address(session: Session): + """Test fetching NFT inventory with invalid contract address.""" + result = account_address_nft_inventory( + session=session, contract_address="0x" + "F" * 40, address="0x" + "1" * 40, page=1, offset=10 + ) + assert result == [] # No matching contract address + + +if __name__ == "__main__": + pytest.main(["-sv", __file__]) diff --git a/tests/hemera/app/api/routes/developer/es_adapter/test_transaction_receipt.py b/tests/hemera/app/api/routes/developer/es_adapter/test_transaction_receipt.py new file mode 100644 index 000000000..f8aeb3953 --- /dev/null +++ b/tests/hemera/app/api/routes/developer/es_adapter/test_transaction_receipt.py @@ -0,0 +1,153 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2025/2/3 03:50 +# @Author ideal93 +# @File test_contract.py.py +# @Brief + +from datetime import datetime + +import pytest +from sqlmodel import Session, delete + +from hemera.app.api.routes.developer.es_adapter.helper import ( + check_contract_execution_status, + check_transaction_receipt_status, +) +from hemera.common.models.base.transactions import Transactions +from hemera.common.models.trace.traces import Traces +from hemera.common.utils.format_utils import hex_str_to_bytes + + +@pytest.fixture(autouse=True) +def clean_db(session: Session): + """ + Clean the database before each test by deleting rows from the Traces and Transactions tables. + """ + session.exec(delete(Traces)) + session.exec(delete(Transactions)) + session.commit() + + +@pytest.fixture +def sample_trace_success(session: Session): + """ + Create a sample trace with a successful execution. + In this case, status is non-zero indicating success, so isError should be "0". + """ + # Use a sample transaction hash. + txn_hash = "0x" + "1" * 64 + trace_record = Traces( + transaction_hash=hex_str_to_bytes(txn_hash), + trace_address=[], # matching the query filter + status=1, # success status (non-zero) + error="", # no error message + trace_id="1", + ) + session.add(trace_record) + session.commit() + return txn_hash + + +@pytest.fixture +def sample_trace_failure(session: Session): + """ + Create a sample trace with a failure execution. + Here, status is 0, so isError should be "1" and errDescription should contain the error message. + """ + txn_hash = "0x" + "2" * 64 + trace_record = Traces( + transaction_hash=hex_str_to_bytes(txn_hash), + trace_address=[], + status=0, # indicates failure + error="Contract reverted due to insufficient gas", + trace_id="2", + ) + session.add(trace_record) + session.commit() + return txn_hash + + +@pytest.fixture +def sample_transaction(session: Session): + """ + Create a sample transaction with a receipt_status. + """ + txn_hash = "0x" + "3" * 64 + tx = Transactions( + hash=hex_str_to_bytes(txn_hash), + block_number=1500, + block_timestamp=datetime.utcnow(), + nonce=1, + block_hash=hex_str_to_bytes("0x" + "a" * 64), + input=None, + transaction_index=1, + from_address=hex_str_to_bytes("0x" + "b" * 40), + to_address=hex_str_to_bytes("0x" + "c" * 40), + value=500000, + gas=21000, + gas_price=10000000000, + receipt_status=1, # A sample receipt status (could be 0 or 1) + receipt_contract_address=None, + receipt_cumulative_gas_used=21000, + receipt_gas_used=21000, + ) + session.add(tx) + session.commit() + return txn_hash + + +def test_check_contract_execution_status_success(session: Session, sample_trace_success): + """ + Test that check_contract_execution_status returns a status indicating success. + """ + result = check_contract_execution_status(session, sample_trace_success) + # For a successful trace, status is non-zero so isError should be "0" and error message empty. + assert result is not None + assert result.isError == "0" + assert result.errDescription == "" + + +def test_check_contract_execution_status_failure(session: Session, sample_trace_failure): + """ + Test that check_contract_execution_status returns a status indicating failure with the proper error message. + """ + result = check_contract_execution_status(session, sample_trace_failure) + # For a failed trace, status is 0 so isError should be "1" and error message should match. + assert result is not None + assert result.isError == "1" + assert result.errDescription == "Contract reverted due to insufficient gas" + + +def test_check_contract_execution_status_none(session: Session): + """ + Test that check_contract_execution_status returns None if no trace record is found. + """ + # Use a txn_hash that was not inserted into the Traces table. + missing_txn_hash = "0x" + "f" * 64 + result = check_contract_execution_status(session, missing_txn_hash) + assert result is None + + +def test_check_transaction_receipt_status_found(session: Session, sample_transaction): + """ + Test that check_transaction_receipt_status returns the correct receipt status when the transaction exists. + """ + result = check_transaction_receipt_status(session, sample_transaction) + # The fixture sets receipt_status to 1. + assert result is not None + # The returned status is converted to string. + assert result.status == "1" + + +def test_check_transaction_receipt_status_none(session: Session): + """ + Test that check_transaction_receipt_status returns None when the transaction is not found. + """ + missing_txn_hash = "0x" + "e" * 64 + result = check_transaction_receipt_status(session, missing_txn_hash) + assert result is None + + +if __name__ == "__main__": + pytest.main(["-sv", __file__]) diff --git a/tests/hemera/app/api/routes/explorer/__init__.py b/tests/hemera/app/api/routes/explorer/__init__.py new file mode 100644 index 000000000..63c34ff6c --- /dev/null +++ b/tests/hemera/app/api/routes/explorer/__init__.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2024/12/20 15:08 +# @Author ideal +# @File __init__.py.py +# @Brief diff --git a/tests/hemera/app/api/routes/explorer/base_test.py b/tests/hemera/app/api/routes/explorer/base_test.py new file mode 100644 index 000000000..1b14ac66f --- /dev/null +++ b/tests/hemera/app/api/routes/explorer/base_test.py @@ -0,0 +1,273 @@ +from datetime import datetime, timedelta + +import pytest +from fastapi.testclient import TestClient +from sqlalchemy import delete + +from hemera.app.main import app +from hemera.common.models.base.blocks import Blocks +from hemera.common.models.base.transactions import Transactions +from hemera.common.models.token import Tokens +from hemera.common.models.trace.contracts import Contracts +from hemera.common.utils.format_utils import hex_str_to_bytes + + +@pytest.fixture(autouse=True) +def clean_db(session): + """Clean database before each test""" + # Order matters due to foreign key constraints + session.exec(delete(Transactions)) + session.exec(delete(Contracts)) + session.exec(delete(Tokens)) + session.exec(delete(Blocks)) + + session.commit() + + +@pytest.mark.serial +@pytest.fixture +def client(engine): + with TestClient(app) as test_client: + yield test_client + + +@pytest.mark.serial +@pytest.fixture +def sample_blocks(clean_db, session): + """Create a set of test blocks""" + blocks = [] + base_time = datetime(2024, 1, 1, 12, 0, 0) + + # Create 10 consecutive blocks + for i in range(10): + block = Blocks( + number=1000 + i, + hash=f"0x{i:064x}".encode(), + timestamp=base_time + timedelta(minutes=i), + ) + blocks.append(block) + session.add(block) + + session.commit() + return blocks + + +@pytest.fixture +def sample_transactions(clean_db, session): + """Create sample transactions for testing""" + transactions = [] + base_time = datetime(2024, 1, 1, 12, 0, 0) + + for i in range(5): + tx = Transactions( + hash=f"0x{i:064x}".encode(), + block_number=1000 + i, + from_address=f"0x{i:040x}".encode(), + to_address=f"0x{i+1:040x}".encode(), + value=1000000 * (i + 1), + timestamp=base_time + timedelta(minutes=i), + ) + transactions.append(tx) + session.add(tx) + + session.commit() + return transactions + + +@pytest.fixture +def sample_tokens(clean_db, session): + """Create sample tokens for testing""" + tokens = [ + Tokens( + address=f"0x{i:040x}".encode(), + name=f"Token{i}", + symbol=f"TK{i}", + decimals=18, + icon_url=f"https://example.com/token{i}.png", + ) + for i in range(3) + ] + + for token in tokens: + session.add(token) + session.commit() + return tokens + + +# Health Check Tests +@pytest.mark.serial +@pytest.mark.api +def test_health_check_success(client, session): + """Test successful health check with single block""" + test_block = Blocks( + number=12345, + hash=b"0x07e78dcf820fdee6bde4317a41e756acc281d328598183d0028e95f7f84d1bd8", + timestamp=datetime(2024, 1, 1, 12, 0, 0), + ) + session.add(test_block) + session.commit() + + response = client.get("/v1/explorer/health") + + assert response.status_code == 200 + data = response.json() + + assert data["latest_block_number"] == test_block.number + assert data["latest_block_timestamp"] == test_block.timestamp.isoformat() + assert data["status"] == "OK" + + # Verify database pool status + assert "engine_pool_status" in data + assert "read_pool_status" in data + assert "write_pool_status" in data + assert "common_pool_status" in data + + +@pytest.mark.serial +@pytest.mark.api +def test_health_check_no_blocks(client, session): + """Test health check when no blocks exist""" + session.exec(delete(Blocks)) + session.commit() + + response = client.get("/v1/explorer/health") + + assert response.status_code == 404 + assert response.json()["detail"] == "No blocks found" + + +# Stats Tests +@pytest.mark.serial +@pytest.mark.api +def test_get_stats_success(client, session, sample_blocks, sample_transactions): + """Test successful stats retrieval""" + + response = client.get("/v1/explorer/stats") + + assert response.status_code == 200 + data = response.json() + + # Verify required fields + assert "total_transactions" in data + assert "transaction_tps" in data + assert "latest_block" in data + assert "avg_block_time" in data + assert isinstance(data["total_transactions"], int) + assert isinstance(data["transaction_tps"], float) + + +@pytest.mark.serial +@pytest.mark.api +def test_get_stats_no_blocks(client, session): + """Test stats endpoint when no blocks exist""" + + session.exec(delete(Blocks)) + session.commit() + + response = client.get("/v1/explorer/stats") + assert response.status_code == 404 + assert response.json()["detail"] == "No blocks found" + + +# Transactions Per Day Tests +@pytest.mark.serial +@pytest.mark.api +def test_transactions_per_day(client, session, sample_transactions): + """Test transactions per day chart data""" + response = client.get("/v1/explorer/charts/transactions_per_day") + + assert response.status_code == 200 + data = response.json() + + assert "title" in data + assert "data" in data + assert isinstance(data["data"], list) + # Verify data structure + if data["data"]: + first_item = data["data"][0] + assert "value" in first_item + assert "count" in first_item + + +# Search Tests +@pytest.mark.serial +@pytest.mark.api +def test_search_by_block_number(client, session, sample_blocks): + """Test search by block number""" + block_number = 1000 + response = client.get(f"/v1/explorer/search?q={block_number}") + + assert response.status_code == 200 + results = response.json() + assert len(results) == 1 + assert results[0]["block_number"] == block_number + + +@pytest.mark.serial +@pytest.mark.api +def test_search_by_transaction_hash(client, session, sample_transactions): + """Test search by transaction hash""" + tx_hash = "0x0000000000000000000000000000000000000000000000000000000000000000" + response = client.get(f"/v1/explorer/search?q={tx_hash}") + + assert response.status_code == 200 + results = response.json() + if results: + assert "transaction_hash" in results[0] + + +@pytest.mark.serial +@pytest.mark.api +def test_search_by_token(client, session, sample_tokens): + """Test search by token name or symbol""" + token_query = "Token" + response = client.get(f"/v1/explorer/search?q={token_query}") + + assert response.status_code == 200 + results = response.json() + + for result in results: + assert "token_name" in result + assert "token_symbol" in result + assert "token_address" in result + + +@pytest.mark.serial +@pytest.mark.api +def test_search_empty_query(client): + """Test search with empty query""" + response = client.get("/v1/explorer/search?q=") + assert response.status_code == 422 # Validation error + + +@pytest.mark.serial +@pytest.mark.api +def test_search_invalid_address(client): + """Test search with invalid ethereum address""" + response = client.get("/v1/explorer/search?q=0xinvalid") + assert response.status_code == 200 + assert len(response.json()) == 0 + + +@pytest.mark.serial +@pytest.mark.api +def test_search_contract_address(client, session): + """Test search by contract address""" + # Create a test contract + contract_address = "0x" + "1" * 40 + contract = Contracts( + address=hex_str_to_bytes(contract_address), creator=hex_str_to_bytes("0x" + "2" * 40), created_at=datetime.now() + ) + session.add(contract) + session.commit() + + response = client.get(f"/v1/explorer/search?q={contract_address}") + + assert response.status_code == 200 + results = response.json() + assert len(results) == 1 + assert "wallet_address" in results[0] + + +if __name__ == "__main__": + pytest.main(["-sv", __file__]) diff --git a/tests/hemera/app/api/routes/explorer/block_test.py b/tests/hemera/app/api/routes/explorer/block_test.py new file mode 100644 index 000000000..190122d30 --- /dev/null +++ b/tests/hemera/app/api/routes/explorer/block_test.py @@ -0,0 +1,349 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2024/12/24 16:02 +# @Author ideal93 +# @File block_test.py.py +# @Brief + +from datetime import datetime, timedelta + +import pytest + +from hemera.app.api.routes.helper.block import _get_blocks_by_condition +from hemera.common.models.base.blocks import Blocks + + +@pytest.mark.serial +@pytest.fixture +def sample_blocks(clean_db, session): + """Create a set of test blocks including some reorged blocks""" + blocks = [] + base_time = datetime(2024, 1, 1, 12, 0, 0) + + # Create 10 consecutive blocks + for i in range(10): + block = Blocks( + number=1000 + i, + hash=bytes.fromhex(f"{i:064x}"), + timestamp=base_time + timedelta(minutes=i), + parent_hash=bytes.fromhex(f"{i-1:064x}") if i > 0 else bytes(32), + gas_limit="15000000", + gas_used=f"{5000000 + i * 100000}", + base_fee_per_gas="1000000000", + miner=bytes.fromhex(f"{i:040x}"), + transactions_count=0, + internal_transactions_count=0, + reorg=False, + ) + if i == 4: + block.transactions_count = 1 + blocks.append(block) + session.add(block) + + reorg_blocks = [ + Blocks( + number=1002, + hash=bytes.fromhex("deadbeef".ljust(64, "0")), + timestamp=base_time + timedelta(minutes=2), + parent_hash=bytes.fromhex(f"{1:064x}"), + gas_limit="15000000", + gas_used="5200000", + base_fee_per_gas="1000000000", + miner=bytes.fromhex("abc".ljust(40, "0")), + transactions_count=0, + internal_transactions_count=0, + reorg=True, + ), + Blocks( + number=1003, + hash=bytes.fromhex("deadbeef2".ljust(64, "0")), + timestamp=base_time + timedelta(minutes=3), + parent_hash=bytes.fromhex("deadbeef".ljust(64, "0")), + gas_limit="15000000", + gas_used="5300000", + base_fee_per_gas="1000000000", + miner=bytes.fromhex("def".ljust(40, "0")), + transactions_count=0, + internal_transactions_count=0, + reorg=True, + ), + ] + + for block in reorg_blocks: + session.add(block) + blocks.append(block) + + session.commit() + + return blocks + + +@pytest.mark.serial +@pytest.mark.api +def test_get_blocks_success(client, sample_blocks, session): + """Test successful retrieval of blocks with default pagination""" + response = client.get("/v1/explorer/blocks") + + assert response.status_code == 200 + data = response.json() + + assert data["page"] == 1 + assert data["size"] == 25 + assert data["total"] == 1009 # highest block number + assert len(data["data"]) == 10 # number of sample blocks + + # Verify first block data + first_block = data["data"][0] + assert first_block["number"] == 1009 + assert first_block["hash"] == "0x" + "0" * 63 + "9" + assert "timestamp" in first_block + assert first_block["transaction_count"] == 0 + assert first_block["internal_transaction_count"] == 0 + + +@pytest.mark.serial +@pytest.mark.api +def test_get_blocks_with_pagination(client, sample_blocks, session): + """Test blocks retrieval with custom pagination""" + response = client.get("/v1/explorer/blocks?page=2&size=5") + + assert response.status_code == 200 + data = response.json() + + assert data["page"] == 2 + assert data["size"] == 5 + assert len(data["data"]) == 5 + + # Verify block numbers are in descending order + block_numbers = [block["number"] for block in data["data"]] + assert block_numbers == [1004, 1003, 1002, 1001, 1000] + + +@pytest.mark.serial +@pytest.mark.api +def test_get_blocks_with_transactions(client, sample_blocks, session): + """Test blocks retrieval with transaction counts""" + response = client.get("/v1/explorer/blocks") + + assert response.status_code == 200 + data = response.json() + + # Find a block that should have a transaction + block_with_tx = next(block for block in data["data"] if block["number"] == 1004) + + assert block_with_tx["transaction_count"] == 1 + assert block_with_tx["internal_transaction_count"] == 0 + + +@pytest.mark.serial +@pytest.mark.api +def test_get_blocks_invalid_pagination(client, sample_blocks): + """Test blocks retrieval with invalid pagination parameters""" + # Test negative page + response = client.get("/v1/explorer/blocks?page=0") + assert response.status_code == 422 + + # Test negative size + response = client.get("/v1/explorer/blocks?size=0") + assert response.status_code == 422 + + +@pytest.mark.serial +@pytest.mark.api +def test_get_blocks_empty_db(client, clean_db): + """Test blocks retrieval with empty database""" + response = client.get("/v1/explorer/blocks") + + assert response.status_code == 200 + data = response.json() + + assert data["total"] == 0 + assert len(data["data"]) == 0 + assert data["page"] == 1 + assert data["size"] == 25 + + +@pytest.mark.serial +@pytest.mark.api +def test_get_blocks_response_structure(client, sample_blocks): + """Test the structure of block response data""" + response = client.get("/v1/explorer/blocks") + + assert response.status_code == 200 + data = response.json() + + # Check first block has all required fields + first_block = data["data"][0] + required_fields = { + "hash", + "number", + "timestamp", + "parent_hash", + "gas_limit", + "gas_used", + "base_fee_per_gas", + "miner", + "transaction_count", + "internal_transaction_count", + } + + assert all(field in first_block for field in required_fields) + assert isinstance(first_block["number"], int) + assert isinstance(first_block["hash"], str) + assert isinstance(first_block["transaction_count"], int) + + +def test_get_blocks_exclude_reorg(session, sample_blocks): + blocks = _get_blocks_by_condition(session, filter_condition=Blocks.reorg == False) + + assert len(blocks) == 10 + assert all(not block.reorg for block in blocks) + + blocks_at_1002 = _get_blocks_by_condition( + session, filter_condition=(Blocks.number == 1002) & (Blocks.reorg == False) + ) + assert len(blocks_at_1002) == 1 + assert not blocks_at_1002[0].reorg + assert blocks_at_1002[0].hash != ("0x" + "deadbeef".ljust(64, "0")).encode() + + +def test_blocks_api_exclude_reorg(client, session, sample_blocks): + response = client.get("/v1/explorer/blocks?page=1&size=10") + assert response.status_code == 200 + data = response.json() + + assert len(data["data"]) == 10 + assert data["page"] == 1 + assert data["size"] == 10 + assert data["total"] == 1009 + + block_numbers = [block["number"] for block in data["data"]] + assert 1002 in block_numbers + + block_1002 = next(block for block in data["data"] if block["number"] == 1002) + assert block_1002["hash"] != "0x" + "deadbeef".ljust(64, "0") + + +@pytest.fixture +def sample_blocks_details(clean_db, session): + """Create sample blocks for testing block detail API + + Creates: + - A sequence of 3 blocks with different timestamps + - A special block with known hash for testing hash-based queries + """ + base_time = datetime(2024, 1, 1, 12, 0, 0) + blocks = [] + + # Create regular blocks + for i in range(3): + block = Blocks( + number=1000 + i, + hash=bytes.fromhex(f"{i:064x}"), + parent_hash=bytes.fromhex(f"{i-1:064x}") if i > 0 else bytes(32), + timestamp=base_time + timedelta(seconds=12 * i), + gas_limit="15000000", + gas_used=f"{5000000 + i * 100000}", + base_fee_per_gas="1000000000", + miner=bytes.fromhex(f"{i:040x}"), + transactions_count=i, # Different tx counts + internal_transactions_count=i * 2, # Different internal tx counts + reorg=False, + ) + blocks.append(block) + session.add(block) + + # Create a block with known hash for testing + known_hash = "deadbeef" * 8 # 32 bytes + special_block = Blocks( + number=2000, + hash=bytes.fromhex(known_hash), + parent_hash=bytes.fromhex("f" * 64), + timestamp=base_time + timedelta(minutes=5), + gas_limit="15000000", + gas_used="5000000", + base_fee_per_gas="1000000000", + miner=bytes.fromhex("a" * 40), + transactions_count=10, + internal_transactions_count=5, + reorg=False, + ) + blocks.append(special_block) + session.add(special_block) + + session.commit() + return blocks + + +def test_get_block_detail_by_number(client, session, sample_blocks_details): + """Test getting block details using block number""" + response = client.get("/v1/explorer/block/1001") + assert response.status_code == 200 + + data = response.json() + assert data["number"] == 1001 + assert data["transaction_count"] == 1 + assert data["internal_transaction_count"] == 2 + assert data["seconds_since_last_block"] == 12.0 + assert not data["is_last_block"] + + +def test_get_block_detail_by_hash(client, session, sample_blocks_details): + """Test getting block details using block hash""" + known_hash = "0x" + "deadbeef" * 8 + response = client.get(f"/v1/explorer/block/{known_hash}") + assert response.status_code == 200 + + data = response.json() + assert data["number"] == 2000 + assert data["transaction_count"] == 10 + assert data["internal_transaction_count"] == 5 + + +def test_get_latest_block(client, session, sample_blocks_details): + """Test getting the latest block and verifying is_last_block flag""" + response = client.get("/v1/explorer/block/2000") + assert response.status_code == 200 + + data = response.json() + assert data["is_last_block"] == True + + +def test_get_block_not_found(client, session, sample_blocks_details): + """Test response when block is not found""" + # Test with non-existent block number + response = client.get("/v1/explorer/block/9999") + assert response.status_code == 404 + + # Test with non-existent block hash + response = client.get("/v1/explorer/block/0x" + "a" * 64) + assert response.status_code == 404 + + +def test_get_block_invalid_input(client, session, sample_blocks_details): + """Test response with invalid input formats""" + # Invalid hash length + response = client.get("/v1/explorer/block/0x123") + assert response.status_code == 400 + + # Invalid hex string + response = client.get("/v1/explorer/block/0x" + "g" * 64) + assert response.status_code == 400 + + # Invalid format + response = client.get("/v1/explorer/block/not-a-block") + assert response.status_code == 400 + + +def test_get_block_with_zero_counts(client, session, sample_blocks_details): + """Test block with zero transaction counts""" + response = client.get("/v1/explorer/block/1000") + assert response.status_code == 200 + + data = response.json() + assert data["transaction_count"] == 0 + assert data["internal_transaction_count"] == 0 + + +if __name__ == "__main__": + pytest.main(["-sv", __file__]) diff --git a/tests/hemera/app/api/routes/explorer/transaction_test.py b/tests/hemera/app/api/routes/explorer/transaction_test.py new file mode 100644 index 000000000..5e39098dd --- /dev/null +++ b/tests/hemera/app/api/routes/explorer/transaction_test.py @@ -0,0 +1,868 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2024/12/24 16:02 +# @Author ideal93 +# @File transaction_test.py +# @Brief + +from datetime import datetime, timedelta +from decimal import Decimal + +import pytest + +from hemera.app.core.config import settings +from hemera.common.enumeration.txn_type import AddressNftTransferType, AddressTokenTransferType, AddressTransactionType +from hemera.common.models.address.address_nft_transfers import AddressNftTransfers +from hemera.common.models.address.address_token_transfers import AddressTokenTransfers +from hemera.common.models.address.address_transactions import AddressTransactions +from hemera.common.models.base.blocks import Blocks +from hemera.common.models.base.transactions import Transactions +from hemera.common.models.token import Tokens +from hemera.common.models.token_transfers import ( + ERC20TokenTransfers, + ERC721TokenTransfers, + ERC1155TokenTransfers, + NftTransfers, +) +from hemera.common.models.trace.contracts import Contracts +from hemera.common.models.trace.traces import ContractInternalTransactions +from hemera.common.utils.format_utils import hex_str_to_bytes + + +@pytest.fixture +def sample_addresses(): + return { + "sender": "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "receiver": "0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + "token": "0xcccccccccccccccccccccccccccccccccccccccc", + } + + +@pytest.fixture +def sample_contracts(session): + contracts = [ + Contracts( + address=hex_str_to_bytes("0x0000000000000000000000000000000000000008"), + name="Uniswap V2 Router", + contract_creator=hex_str_to_bytes("0x5b6c7b13a2b82ed76f48230be0c4a13f94160c5e"), + deployed_code=b"sample bytecode 1", + block_number=12345678, + is_verified=True, + ), + Contracts( + address=hex_str_to_bytes("0x0000000000000000000000000000000000000009"), + name="Uniswap Token", + contract_creator=hex_str_to_bytes("0x4d812c19d95e76fd0194ce3c0ba2d9c04584c3e8"), + deployed_code=b"sample bytecode 2", + block_number=12345679, + is_verified=False, + ), + ] + + for contract in contracts: + session.add(contract) + session.commit() + + return contracts + + +@pytest.fixture +def sample_tokens(session, sample_addresses): + tokens = [ + Tokens( + address=hex_str_to_bytes(sample_addresses["token"]), + name="Wrapped Ether", + symbol="WETH", + token_type="ERC20", + decimals=18, + price=Decimal("1000"), + previous_price=Decimal("900"), + logo_url="https://example.com/logo.png", + market_cap=Decimal("1000000"), + on_chain_market_cap=Decimal("2000000"), + ) + ] + session.add(tokens[0]) + session.commit() + return tokens + + +@pytest.fixture +def erc20_token_transfers(session, sample_addresses, sample_tokens): + """Create sample ERC20 token transfers""" + now = datetime.utcnow() + transfers = [ + ERC20TokenTransfers( + transaction_hash=hex_str_to_bytes("0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"), + block_number=21436149, + block_timestamp=now, + from_address=hex_str_to_bytes(sample_addresses["sender"]), + to_address=hex_str_to_bytes(sample_addresses["receiver"]), + token_address=hex_str_to_bytes(sample_addresses["token"]), + value=Decimal("1000000000000000000"), # 1 token + log_index=0, + block_hash=hex_str_to_bytes("0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb1"), + ), + ERC20TokenTransfers( + transaction_hash=hex_str_to_bytes("0x2234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"), + block_number=21436149, + block_timestamp=now, + from_address=hex_str_to_bytes(sample_addresses["sender"]), + to_address=hex_str_to_bytes(sample_addresses["receiver"]), + token_address=hex_str_to_bytes(sample_addresses["token"]), + value=Decimal("2000000000000000000"), # 2 tokens + log_index=1, + block_hash=hex_str_to_bytes("0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb1"), + ), + ] + + for transfer in transfers: + session.add(transfer) + session.commit() + return transfers + + +@pytest.fixture +def erc721_token_transfers(session, sample_addresses): + """Create sample ERC721 token transfers""" + now = datetime.utcnow() + transfers = [ + ERC721TokenTransfers( + transaction_hash=hex_str_to_bytes("0x3234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"), + block_number=21436149, + block_timestamp=now, + from_address=hex_str_to_bytes(sample_addresses["sender"]), + to_address=hex_str_to_bytes(sample_addresses["receiver"]), + token_address=hex_str_to_bytes(sample_addresses["token"]), + token_id=Decimal("1"), + log_index=0, + block_hash=hex_str_to_bytes("0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb2"), + ), + ERC721TokenTransfers( + transaction_hash=hex_str_to_bytes("0x4234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"), + block_number=21436148, + block_timestamp=now - timedelta(minutes=2), + from_address=hex_str_to_bytes(sample_addresses["receiver"]), + to_address=hex_str_to_bytes(sample_addresses["sender"]), + token_address=hex_str_to_bytes(sample_addresses["token"]), + token_id=Decimal("2"), + log_index=0, + block_hash=hex_str_to_bytes("0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb3"), + ), + ] + + for transfer in transfers: + session.add(transfer) + session.commit() + return transfers + + +@pytest.fixture +def erc1155_token_transfers(session, sample_addresses): + """Create sample ERC1155 token transfers""" + now = datetime.utcnow() + transfers = [ + ERC1155TokenTransfers( + transaction_hash=hex_str_to_bytes("0x5234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"), + block_number=21436149, + block_timestamp=now, + from_address=hex_str_to_bytes(sample_addresses["sender"]), + to_address=hex_str_to_bytes(sample_addresses["receiver"]), + token_address=hex_str_to_bytes(sample_addresses["token"]), + token_id=Decimal("1"), + value=Decimal("5"), + log_index=0, + block_hash=hex_str_to_bytes("0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb4"), + ), + ERC1155TokenTransfers( + transaction_hash=hex_str_to_bytes("0x6234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"), + block_number=21436148, + block_timestamp=now - timedelta(minutes=1), + from_address=hex_str_to_bytes(sample_addresses["sender"]), + to_address=hex_str_to_bytes(sample_addresses["receiver"]), + token_address=hex_str_to_bytes(sample_addresses["token"]), + token_id=Decimal("2"), + value=Decimal("10"), + log_index=0, + block_hash=hex_str_to_bytes("0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb5"), + ), + ] + + for transfer in transfers: + session.add(transfer) + session.commit() + return transfers + + +@pytest.fixture +def nft_transfers(session, sample_addresses): + """Create sample NFT transfers in unified table""" + now = datetime.utcnow() + transfers = [ + NftTransfers( + transaction_hash=hex_str_to_bytes("0x7234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"), + block_number=21436149, + block_timestamp=now, + from_address=hex_str_to_bytes(sample_addresses["sender"]), + to_address=hex_str_to_bytes(sample_addresses["receiver"]), + token_address=hex_str_to_bytes(sample_addresses["token"]), + token_id=Decimal("1"), + value=None, # ERC721 + log_index=0, + block_hash=hex_str_to_bytes("0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb6"), + ), + NftTransfers( + transaction_hash=hex_str_to_bytes("0x8234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"), + block_number=21436148, + block_timestamp=now - timedelta(minutes=1), + from_address=hex_str_to_bytes(sample_addresses["sender"]), + to_address=hex_str_to_bytes(sample_addresses["receiver"]), + token_address=hex_str_to_bytes(sample_addresses["token"]), + token_id=Decimal("2"), + value=Decimal("5"), # ERC1155 + log_index=0, + block_hash=hex_str_to_bytes("0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb7"), + ), + ] + + for transfer in transfers: + session.add(transfer) + session.commit() + return transfers + + +@pytest.fixture +def address_token_transfers(session, sample_addresses): + """Create sample address token transfers""" + now = datetime.utcnow() + transfers = [ + AddressTokenTransfers( + address=hex_str_to_bytes(sample_addresses["sender"]), + block_number=21436149, + block_timestamp=now, + transaction_hash=hex_str_to_bytes("0x9234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"), + token_address=hex_str_to_bytes(sample_addresses["token"]), + related_address=hex_str_to_bytes(sample_addresses["receiver"]), + transfer_type=AddressTokenTransferType.SENDER.value, + value=Decimal("1000000000000000000"), + log_index=0, + block_hash=hex_str_to_bytes("0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb8"), + ), + AddressTokenTransfers( + address=hex_str_to_bytes(sample_addresses["receiver"]), + block_number=21436148, + block_timestamp=now - timedelta(minutes=1), + transaction_hash=hex_str_to_bytes("0xa234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"), + token_address=hex_str_to_bytes(sample_addresses["token"]), + related_address=hex_str_to_bytes(sample_addresses["sender"]), + transfer_type=AddressTokenTransferType.RECEIVER.value, + value=Decimal("500000000000000000"), + log_index=0, + block_hash=hex_str_to_bytes("0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb9"), + ), + ] + + for transfer in transfers: + session.add(transfer) + session.commit() + return transfers + + +@pytest.fixture +def address_nft_transfers(session, sample_addresses): + """Create sample address NFT transfers""" + now = datetime.utcnow() + transfers = [ + AddressNftTransfers( + address=hex_str_to_bytes(sample_addresses["sender"]), + block_number=21436149, + block_timestamp=now, + transaction_hash=hex_str_to_bytes("0xb234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"), + token_address=hex_str_to_bytes(sample_addresses["token"]), + related_address=hex_str_to_bytes(sample_addresses["receiver"]), + transfer_type=AddressNftTransferType.SENDER.value, + token_id=Decimal("1"), + value=None, # ERC721 + log_index=0, + block_hash=hex_str_to_bytes("0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb10"), + ), + AddressNftTransfers( + address=hex_str_to_bytes(sample_addresses["receiver"]), + block_number=21436148, + block_timestamp=now - timedelta(minutes=1), + transaction_hash=hex_str_to_bytes("0xc234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"), + token_address=hex_str_to_bytes(sample_addresses["token"]), + related_address=hex_str_to_bytes(sample_addresses["sender"]), + transfer_type=AddressNftTransferType.RECEIVER.value, + token_id=Decimal("2"), + value=Decimal("5"), # ERC1155 + log_index=0, + block_hash=hex_str_to_bytes("0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb11"), + ), + ] + + for transfer in transfers: + session.add(transfer) + session.commit() + return transfers + + +@pytest.mark.serial +@pytest.fixture +def sample_internal_transactions(clean_db, sample_contracts, session): + """Create a set of test internal transactions""" + transactions = [] + base_time = datetime(2024, 1, 1, 12, 0, 0) + + # Create 10 internal transactions + for i in range(10): + tx = ContractInternalTransactions( + trace_id=f"trace_{i}", + trace_type="call", + call_type="call", + value=str(1000000000000000000 * (i + 1)), # 1-10 ETH + error=None if i < 8 else "Reverted", + status=1 if i < 8 else 0, + block_number=1000 + i, + block_timestamp=base_time + timedelta(minutes=i), + transaction_hash=bytes.fromhex(f"{i:064x}"), + from_address=bytes.fromhex(f"{i:040x}"), + to_address=bytes.fromhex(f"{i + 1:040x}"), + ) + transactions.append(tx) + session.add(tx) + + # Create some transactions with same block number for block filtering test + for i in range(3): + tx = ContractInternalTransactions( + trace_id=f"trace_block_{i}", + trace_type="call", + call_type="call", + value=str(500000000000000000 * (i + 1)), # 0.5-1.5 ETH + error=None, + status=1, + block_number=1005, # Same block number + block_timestamp=base_time + timedelta(minutes=10, seconds=i * 15), + transaction_hash=bytes.fromhex(f"aa{i:062x}"), + from_address=bytes.fromhex(f"bb{i:038x}"), + to_address=bytes.fromhex(f"cc{i:038x}"), + ) + transactions.append(tx) + session.add(tx) + + session.commit() + return transactions + + +@pytest.mark.serial +@pytest.fixture +def sample_transactions(clean_db, session): + """Create a set of test transactions""" + transactions = [] + base_time = datetime(2024, 1, 1, 12, 0, 0) + + # Create blocks for transactions + blocks = [] + for i in range(5): + block = Blocks( + number=1000 + i, + hash=bytes.fromhex(f"bb{i:062x}"), + timestamp=base_time + timedelta(minutes=i), + parent_hash=bytes.fromhex(f"{i - 1:064x}") if i > 0 else bytes(32), + gas_limit="15000000", + gas_used=f"{5000000 + i * 100000}", + base_fee_per_gas="1000000000", + miner=bytes.fromhex(f"{i:040x}"), + transactions_count=2, # Each block has 2 transactions + internal_transactions_count=0, + reorg=False, + ) + blocks.append(block) + session.add(block) + + # Create 2 transactions for each block + for block in blocks: + for j in range(2): + tx = Transactions( + hash=bytes.fromhex(f"aa{block.number:012x}{j:050x}"), + block_number=block.number, + block_hash=block.hash, + block_timestamp=block.timestamp, + transaction_index=j, + from_address=bytes.fromhex(f"{j:040x}"), + to_address=bytes.fromhex(f"{j + 1:040x}"), + value=str(1000000000000000000 * (j + 1)), # 1-2 ETH + gas="21000", + gas_price="1000000000", + transaction_type=0, + method_id=None, + input=None, + max_fee_per_gas=None, + max_priority_fee_per_gas=None, + receipt_status=1, + receipt_gas_used="21000", + ) + transactions.append(tx) + + address_transaction_1 = AddressTransactions( + address=bytes.fromhex(f"{j:040x}"), + block_number=block.number, + transaction_index=j, + block_timestamp=block.timestamp, + transaction_hash=bytes.fromhex(f"aa{block.number:012x}{j:050x}"), + block_hash=block.hash, + txn_type=AddressTransactionType.SENDER.value, + related_address=bytes.fromhex(f"{j + 1:040x}"), + value=Decimal("1000000000000000000"), + transaction_fee=Decimal("21000") * Decimal("21000"), + receipt_status=1, + method=None, + ) + address_transaction_2 = AddressTransactions( + address=bytes.fromhex(f"{j + 1:040x}"), + block_number=block.number, + transaction_index=j, + block_timestamp=block.timestamp, + transaction_hash=bytes.fromhex(f"aa{block.number:012x}{j:050x}"), + block_hash=block.hash, + txn_type=AddressTransactionType.RECEIVER.value, + related_address=bytes.fromhex(f"{j:040x}"), + value=Decimal("1000000000000000000"), + transaction_fee=Decimal("21000") * Decimal("21000"), + receipt_status=1, + method=None, + ) + session.add(address_transaction_1) + session.add(address_transaction_2) + session.add(tx) + + session.commit() + return transactions + + +@pytest.mark.serial +@pytest.mark.api +def test_get_internal_transactions_success(client, sample_internal_transactions, session): + """Test successful retrieval of internal transactions with default pagination""" + response = client.get("/v1/explorer/internal_transactions") + print(response.json()) + assert response.status_code == 200 + data = response.json() + + assert data["page"] == 1 + assert data["size"] == 25 + assert data["total"] == 13 # 10 regular + 3 same block transactions + assert len(data["data"]) == 13 + + # Verify first transaction data + first_tx = data["data"][0] + assert first_tx["trace_id"] == "trace_9" + assert first_tx["trace_type"] == "call" + assert first_tx["from_addr"]["is_contract"] == True + assert first_tx["to_addr"]["is_contract"] == False + assert first_tx["display_value"] == "10" + assert first_tx["value"] == "10000000000000000000" + assert first_tx["status"] == 0 + assert first_tx["error"] == "Reverted" + + +@pytest.mark.serial +@pytest.mark.api +def test_get_internal_transactions_with_pagination(client, sample_internal_transactions, session): + """Test internal transactions retrieval with custom pagination""" + response = client.get("/v1/explorer/internal_transactions?page=2&size=5") + + assert response.status_code == 200 + data = response.json() + + assert data["page"] == 2 + assert data["size"] == 5 + assert len(data["data"]) == 5 + + +@pytest.mark.serial +@pytest.mark.api +def test_get_internal_transactions_by_address(client, sample_internal_transactions, session): + """Test internal transactions retrieval filtered by address""" + # Test with 'from' address + address = "0x" + "0" * 39 + "1" # First transaction's from_address + response = client.get(f"/v1/explorer/internal_transactions?address={address}") + + assert response.status_code == 200 + data = response.json() + assert len(data["data"]) > 0 + assert any(tx["from_address"] == address for tx in data["data"]) + + # Test with 'to' address + address = "0x" + "0" * 39 + "2" # First transaction's to_address + response = client.get(f"/v1/explorer/internal_transactions?address={address}") + + assert response.status_code == 200 + data = response.json() + assert len(data["data"]) > 0 + assert any(tx["to_address"] == address for tx in data["data"]) + + +@pytest.mark.serial +@pytest.mark.api +def test_get_internal_transactions_by_block(client, sample_internal_transactions, session): + """Test internal transactions retrieval filtered by block number""" + response = client.get("/v1/explorer/internal_transactions?block=1005") + + assert response.status_code == 200 + data = response.json() + assert len(data["data"]) == 4 # 1 regular + 3 same block transactions + assert all(tx["block_number"] == 1005 for tx in data["data"]) + + +@pytest.mark.serial +@pytest.mark.api +def test_get_internal_transactions_invalid_pagination(client, sample_internal_transactions): + """Test internal transactions retrieval with invalid pagination parameters""" + # Test negative page + response = client.get("/v1/explorer/internal_transactions?page=0") + assert response.status_code == 422 + + # Test negative size + response = client.get("/v1/explorer/internal_transactions?size=0") + assert response.status_code == 422 + + +@pytest.mark.serial +@pytest.mark.api +def test_get_internal_transactions_empty_db(client, clean_db): + """Test internal transactions retrieval with empty database""" + response = client.get("/v1/explorer/internal_transactions") + + assert response.status_code == 200 + data = response.json() + + assert data["total"] == 0 + assert len(data["data"]) == 0 + assert data["page"] == 1 + assert data["size"] == 25 + + +@pytest.mark.serial +@pytest.mark.api +def test_get_internal_transactions_response_structure(client, sample_internal_transactions): + """Test the structure of internal transaction response data""" + response = client.get("/v1/explorer/internal_transactions") + + assert response.status_code == 200 + data = response.json() + + # Check first transaction has all required fields + first_tx = data["data"][0] + required_fields = { + "trace_id", + "trace_type", + "call_type", + "value", + "error", + "status", + "block_number", + "block_timestamp", + "transaction_hash", + "from_address", + "to_address", + "from_addr", + "to_addr", + "display_value", + } + + assert all(field in first_tx for field in required_fields) + assert isinstance(first_tx["trace_id"], str) + assert isinstance(first_tx["block_number"], int) + assert isinstance(first_tx["status"], int) + + +@pytest.mark.serial +@pytest.mark.api +def test_get_internal_transactions_max_limit(client, sample_internal_transactions): + """Test internal transactions retrieval with pagination exceeding max limit""" + max_page = settings.MAX_INTERNAL_TRANSACTION // 25 + 1 + response = client.get(f"/v1/explorer/internal_transactions?page={max_page}&size=25") + + assert response.status_code == 400 + assert str(settings.MAX_INTERNAL_TRANSACTION) in response.json()["detail"] + + +@pytest.mark.serial +@pytest.mark.api +def test_get_transactions_success(client, sample_transactions, session): + """Test successful retrieval of transactions with default pagination""" + response = client.get("/v1/explorer/transactions") + + assert response.status_code == 200 + data = response.json() + + assert data["page"] == 1 + assert data["size"] == 25 + assert data["total"] == 10 # 5 blocks * 2 transactions + assert len(data["data"]) == 10 + + # Verify first transaction data + first_tx = data["data"][0] + assert first_tx["block_number"] == 1004 # Latest block + assert isinstance(first_tx["value"], str) + assert isinstance(first_tx["transaction_fee"], str) + assert isinstance(first_tx["value_usd"], str) + assert isinstance(first_tx["transaction_fee_usd"], str) + + +@pytest.mark.serial +@pytest.mark.api +def test_get_transactions_by_block(client, sample_transactions, session): + """Test transactions retrieval filtered by block""" + # Test by block number + response = client.get("/v1/explorer/transactions?block=1000") + + assert response.status_code == 200 + data = response.json() + assert data["total"] == 2 + assert len(data["data"]) == 2 + assert all(tx["block_number"] == 1000 for tx in data["data"]) + + # Test by block hash + block_hash = "0x" + "bb" + "0" * 62 + response = client.get(f"/v1/explorer/transactions?block={block_hash}") + + assert response.status_code == 200 + data = response.json() + assert data["total"] == 2 + assert len(data["data"]) == 2 + assert all(tx["block_number"] == 1000 for tx in data["data"]) + + +@pytest.mark.serial +@pytest.mark.api +def test_get_transactions_by_address(client, sample_transactions, session): + """Test transactions retrieval filtered by address""" + # Test with 'from' address + address = "0x" + "0" * 40 + response = client.get(f"/v1/explorer/transactions?address={address}") + + assert response.status_code == 200 + data = response.json() + assert len(data["data"]) > 0 + assert any(tx["from_address"] == address for tx in data["data"]) + + # Test with 'to' address + address = "0x" + "0" * 39 + "1" + response = client.get(f"/v1/explorer/transactions?address={address}") + + assert response.status_code == 200 + data = response.json() + assert len(data["data"]) > 0 + assert any(tx["to_address"] == address for tx in data["data"]) + + +@pytest.mark.serial +@pytest.mark.api +def test_get_transactions_by_date(client, sample_transactions, session): + """Test transactions retrieval filtered by date""" + response = client.get("/v1/explorer/transactions?date=20240101") + + assert response.status_code == 200 + data = response.json() + assert len(data["data"]) > 0 + + first_tx = data["data"][0] + tx_date = datetime.fromisoformat(first_tx["block_timestamp"]).strftime("%Y%m%d") + assert tx_date == "20240101" + + +@pytest.mark.serial +@pytest.mark.api +def test_get_transactions_invalid_block(client, sample_transactions): + """Test transactions retrieval with invalid block parameter""" + # Test invalid block number + response = client.get("/v1/explorer/transactions?block=9999999") + assert response.status_code == 400 + + # Test invalid block hash + response = client.get("/v1/explorer/transactions?block=0xinvalid") + assert response.status_code == 400 + + +@pytest.mark.serial +@pytest.mark.api +def test_get_transactions_invalid_date(client, sample_transactions): + """Test transactions retrieval with invalid date parameter""" + # Test invalid date format + response = client.get("/v1/explorer/transactions?date=2024-01-01") + assert response.status_code == 400 + + # Test invalid date value + response = client.get("/v1/explorer/transactions?date=20241301") + assert response.status_code == 400 + + +@pytest.mark.serial +@pytest.mark.api +def test_get_transactions_with_pagination(client, sample_transactions, session): + """Test transactions retrieval with custom pagination""" + response = client.get("/v1/explorer/transactions?page=2&size=3") + + assert response.status_code == 200 + data = response.json() + + assert data["page"] == 2 + assert data["size"] == 3 + assert len(data["data"]) == 3 + + +@pytest.mark.serial +@pytest.mark.api +def test_get_transactions_max_limit(client, sample_transactions, session): + """Test transactions retrieval with pagination exceeding max limit""" + max_page = settings.MAX_TRANSACTION // 25 + 1 + response = client.get(f"/v1/explorer/transactions?page={max_page}&size=25") + + assert response.status_code == 400 + assert str(settings.MAX_TRANSACTION) in response.json()["detail"] + + +@pytest.mark.serial +@pytest.mark.api +def test_get_transactions_response_structure(client, sample_transactions): + """Test the structure of transaction response data""" + response = client.get("/v1/explorer/transactions") + + assert response.status_code == 200 + data = response.json() + + # Check required response fields + required_response_fields = {"data", "total", "max_display", "page", "size"} + assert all(field in data for field in required_response_fields) + + # Check transaction data structure + first_tx = data["data"][0] + required_tx_fields = { + "hash", + "block_number", + "block_timestamp", + "transaction_index", + "from_address", + "from_addr", + "to_address", + "to_addr", + "method_id", + "receipt_status", + "transaction_fee", + "transaction_fee_usd", + "value", + "value_usd", + } + assert all(field in first_tx for field in required_tx_fields) + assert first_tx["hash"].startswith("0x") + assert isinstance(first_tx["block_number"], int) + assert isinstance(first_tx["value"], str) + + +@pytest.mark.serial +@pytest.mark.api +def test_get_transaction_token_transfers_success( + client, sample_addresses, erc20_token_transfers, erc721_token_transfers, erc1155_token_transfers, session +): + """Test successful retrieval of token transfers for a transaction""" + # Test with a transaction that has ERC20 transfers + tx_hash = "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef" + response = client.get(f"/v1/explorer/transaction/{tx_hash}/token_transfers") + + assert response.status_code == 200 + data = response.json() + assert data["total"] > 0 + + # Verify transfer data structure + first_transfer = data["data"][0] + assert first_transfer["transaction_hash"].startswith("0x") + assert isinstance(first_transfer["log_index"], int) + assert first_transfer["from_address"].startswith("0x") + assert first_transfer["to_address"].startswith("0x") + assert "from_addr" in first_transfer + assert "to_addr" in first_transfer + assert first_transfer["token_info"]["type"] == "ERC20" + + # Test with a transaction that has ERC721 transfers + tx_hash = "0x3234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef" + response = client.get(f"/v1/explorer/transaction/{tx_hash}/token_transfers") + + assert response.status_code == 200 + data = response.json() + assert data["total"] > 0 + assert data["data"][0]["token_type"] == "ERC721" + assert isinstance(data["data"][0]["token_id"], str) + + +@pytest.mark.serial +@pytest.mark.api +def test_get_transaction_token_transfers_empty(client): + """Test token transfers for transaction with no transfers""" + tx_hash = "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff" + response = client.get(f"/v1/explorer/transaction/{tx_hash}/token_transfers") + + assert response.status_code == 200 + data = response.json() + assert data["total"] == 0 + assert len(data["data"]) == 0 + + +@pytest.mark.serial +@pytest.mark.api +def test_get_transaction_token_transfers_invalid_hash(client): + """Test token transfers with invalid transaction hash""" + # Test with invalid hex + response = client.get("/v1/explorer/transaction/invalid_hash/token_transfers") + + assert response.status_code == 422 + + # Test with wrong length + response = client.get("/v1/explorer/transaction/0x123/token_transfers") + assert response.status_code == 422 + + +@pytest.mark.serial +@pytest.mark.api +def test_get_transaction_token_transfers_response_structure(client, erc20_token_transfers, session): + """Test the structure of token transfers response data""" + tx_hash = "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef" + response = client.get(f"/v1/explorer/transaction/{tx_hash}/token_transfers") + + assert response.status_code == 200 + data = response.json() + + # Check response structure + assert "total" in data + assert "data" in data + assert isinstance(data["total"], int) + assert isinstance(data["data"], list) + + # If we have transfers, check their structure + if data["data"]: + transfer = data["data"][0] + required_fields = { + "transaction_hash", + "log_index", + "from_address", + "to_address", + "token_id", + "value", + "token_type", + "token_address", + "from_addr", + "to_addr", + } + assert all(field in transfer for field in required_fields) + + # Check field types + assert transfer["transaction_hash"].startswith("0x") + assert isinstance(transfer["log_index"], int) + assert transfer["from_address"].startswith("0x") + assert transfer["to_address"].startswith("0x") + assert isinstance(transfer["token_type"], str) + + +if __name__ == "__main__": + pytest.main(["-sv", __file__]) diff --git a/tests/hemera/app/api/routes/utils/__init__.py b/tests/hemera/app/api/routes/utils/__init__.py new file mode 100644 index 000000000..d7a5f949d --- /dev/null +++ b/tests/hemera/app/api/routes/utils/__init__.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2024/12/19 19:03 +# @Author will +# @File __init__.py.py +# @Brief diff --git a/tests/hemera/app/api/routes/utils/block_test.py b/tests/hemera/app/api/routes/utils/block_test.py new file mode 100644 index 000000000..9747875a1 --- /dev/null +++ b/tests/hemera/app/api/routes/utils/block_test.py @@ -0,0 +1,187 @@ +import pytest + +from hemera.app.api.routes.helper.block import * +from hemera.app.api.routes.helper.block import ( + _get_block_by_hash, + _get_block_by_number, + _get_blocks_by_condition, + _get_last_block, +) +from hemera.common.models.base.blocks import Blocks +from hemera.common.utils.format_utils import hex_str_to_bytes + + +@pytest.fixture +def sample_blocks(session): + blocks = [ + Blocks( + number=21436149, + hash=hex_str_to_bytes("0x644aee68ccb38d2a74901f3e1279419fd62481b5567a56bcb479c38d4fd5b379"), + parent_hash=hex_str_to_bytes("0x74c72e9e6f3aa88e896cc7d9d526bdf0934d3f9c8fe178d0ed46f21d9c466434"), + timestamp=datetime.utcnow(), + ), + Blocks( + number=21436148, + hash=hex_str_to_bytes("0x74c72e9e6f3aa88e896cc7d9d526bdf0934d3f9c8fe178d0ed46f21d9c466434"), + parent_hash=hex_str_to_bytes("0x5ad9a7c932709efac7d0e0d28c762bfde1737167f6eb51572f6797acece4c957"), + timestamp=datetime.utcnow() - timedelta(seconds=12), + ), + Blocks( + number=21436147, + hash=hex_str_to_bytes("0x5ad9a7c932709efac7d0e0d28c762bfde1737167f6eb51572f6797acece4c957"), + parent_hash=hex_str_to_bytes("0x5ad9a7c932709efac7d0e0d28c762bfde1737167f6eb51572f6797acece4c957"), + timestamp=datetime.utcnow() - timedelta(seconds=24), + ), + ] + + for block in blocks: + session.add(block) + session.commit() + + return blocks + + +def test_get_last_block(session, sample_blocks): + block = _get_last_block(session) + assert block is not None + assert block.number == 21436149 + assert block.hash == hex_str_to_bytes("0x644aee68ccb38d2a74901f3e1279419fd62481b5567a56bcb479c38d4fd5b379") + + block = _get_last_block(session, columns=["number", "hash"]) + assert block.number == 21436149 + assert block.hash == hex_str_to_bytes("0x644aee68ccb38d2a74901f3e1279419fd62481b5567a56bcb479c38d4fd5b379") + with pytest.raises(AttributeError): + _ = block.timestamp + + +def test_get_block_by_number(session, sample_blocks): + block = _get_block_by_number(session, 21436148) + assert block is not None + assert block.number == 21436148 + assert block.hash == hex_str_to_bytes("0x74c72e9e6f3aa88e896cc7d9d526bdf0934d3f9c8fe178d0ed46f21d9c466434") + + block = _get_block_by_number(session, 999) + assert block is None + + block_number = _get_block_by_number(session, 21436148, columns=["number"]) + assert block_number == 21436148 + + +def test_get_block_by_hash(session, sample_blocks): + block = _get_block_by_hash(session, "0x74c72e9e6f3aa88e896cc7d9d526bdf0934d3f9c8fe178d0ed46f21d9c466434") + assert block is not None + assert block.number == 21436148 + assert block.hash == hex_str_to_bytes("0x74c72e9e6f3aa88e896cc7d9d526bdf0934d3f9c8fe178d0ed46f21d9c466434") + + block = _get_block_by_hash(session, "0xffff") + assert block is None + + with pytest.raises(ValueError): + _get_block_by_hash(session, "invalid_hash") + + +def test_get_blocks_by_condition(session, sample_blocks): + blocks = _get_blocks_by_condition(session) + assert len(blocks) == 3 + assert blocks[0].number == 21436149 + + blocks = _get_blocks_by_condition(session, filter_condition=(Blocks.number > 21436147)) + assert len(blocks) == 2 + assert all(b.number > 21436147 for b in blocks) + + blocks = _get_blocks_by_condition(session, filter_condition=Blocks.number.between(21436147, 21436149)) + assert len(blocks) == 3 + + # Test hash in condition + target_hashes = [ + hex_str_to_bytes("0x644aee68ccb38d2a74901f3e1279419fd62481b5567a56bcb479c38d4fd5b379"), + hex_str_to_bytes("0x74c72e9e6f3aa88e896cc7d9d526bdf0934d3f9c8fe178d0ed46f21d9c466434"), + ] + blocks = _get_blocks_by_condition(session, filter_condition=Blocks.hash.in_(target_hashes)) + assert len(blocks) == 2 + assert all(block.hash in target_hashes for block in blocks) + + # Test timestamp between condition + first_block_time = sample_blocks[0].timestamp + last_block_time = sample_blocks[-1].timestamp + blocks = _get_blocks_by_condition( + session, filter_condition=Blocks.timestamp.between(last_block_time, first_block_time) + ) + assert len(blocks) == 3 + assert all(last_block_time <= block.timestamp <= first_block_time for block in blocks) + + blocks = _get_blocks_by_condition(session, limit=2) + assert len(blocks) == 2 + assert blocks[0].number == 21436149 + + blocks = _get_blocks_by_condition(session, offset=1, limit=1) + assert len(blocks) == 1 + assert blocks[0].number == 21436148 + + blocks = _get_blocks_by_condition(session, columns="number") + assert len(blocks) == 3 + with pytest.raises(AttributeError): + _ = blocks[0].hash + + +def test_transaction_isolation(session, sample_blocks): + with session.begin(): + new_block = Blocks( + number=21436150, + hash=hex_str_to_bytes("0xdef0"), + parent_hash=hex_str_to_bytes("0x644aee68ccb38d2a74901f3e1279419fd62481b5567a56bcb479c38d4fd5b379"), + timestamp=datetime.utcnow(), + ) + session.add(new_block) + + last_block = _get_last_block(session) + assert last_block.number == 21436150 + + session.rollback() + + last_block = _get_last_block(session) + assert last_block.number == 21436149 + + +def test_get_blocks_default_order(session, sample_blocks): + blocks = _get_blocks_by_condition(session) + assert len(blocks) == 3 + assert [b.number for b in blocks] == [21436149, 21436148, 21436147] + + +def test_get_blocks_number_asc(session, sample_blocks): + blocks = _get_blocks_by_condition(session, order_by=Blocks.number.asc()) + assert len(blocks) == 3 + assert [b.number for b in blocks] == [21436147, 21436148, 21436149] + + +def test_get_blocks_timestamp_desc(session, sample_blocks): + blocks = _get_blocks_by_condition(session, order_by=Blocks.timestamp.desc()) + assert len(blocks) == 3 + assert blocks[0].number == 21436149 + assert blocks[1].number == 21436148 + assert blocks[2].number == 21436147 + + +def test_get_blocks_multiple_order(session, sample_blocks): + blocks = _get_blocks_by_condition(session, order_by=[Blocks.timestamp.asc(), Blocks.number.desc()]) + assert len(blocks) == 3 + assert blocks[0].number == 21436147 + assert blocks[1].number == 21436148 + assert blocks[2].number == 21436149 + + +def test_get_blocks_columns_with_order(session, sample_blocks): + blocks = _get_blocks_by_condition(session, columns=["number", "timestamp"], order_by=Blocks.timestamp.asc()) + assert len(blocks) == 3 + assert hasattr(blocks[0], "number") + assert hasattr(blocks[0], "timestamp") + with pytest.raises(AttributeError): + _ = blocks[0].hash + assert blocks[0].number == 21436147 + assert blocks[1].number == 21436148 + assert blocks[2].number == 21436149 + + +if __name__ == "__main__": + pytest.main(["-sv", __file__]) diff --git a/tests/hemera/app/api/routes/utils/contract_test.py b/tests/hemera/app/api/routes/utils/contract_test.py new file mode 100644 index 000000000..6d67568c9 --- /dev/null +++ b/tests/hemera/app/api/routes/utils/contract_test.py @@ -0,0 +1,140 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2024/12/25 14:24 +# @Author ideal93 +# @File contract_test.py.py +# @Brief + +import pytest + +from hemera.app.api.routes.helper.contract import _get_contract_by_address, _get_contracts_by_addresses +from hemera.common.models.trace.contracts import Contracts +from hemera.common.utils.format_utils import hex_str_to_bytes + + +@pytest.fixture +def sample_contracts(session): + contracts = [ + Contracts( + address=hex_str_to_bytes("0x7a250d5630b4cf539739df2c5dacb4c659f2488d"), + name="Uniswap V2 Router", + contract_creator=hex_str_to_bytes("0x5b6c7b13a2b82ed76f48230be0c4a13f94160c5e"), + deployed_code=b"sample bytecode 1", + block_number=12345678, + is_verified=True, + ), + Contracts( + address=hex_str_to_bytes("0x1f9840a85d5af5bf1d1762f925bdaddc4201f984"), + name="Uniswap Token", + contract_creator=hex_str_to_bytes("0x4d812c19d95e76fd0194ce3c0ba2d9c04584c3e8"), + deployed_code=b"sample bytecode 2", + block_number=12345679, + is_verified=True, + ), + Contracts( + address=hex_str_to_bytes("0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2"), + contract_creator=hex_str_to_bytes("0x8ba1f109551bd432803012645ac136ddd64dba72"), + deployed_code=b"sample bytecode 3", + block_number=12345680, + is_verified=False, + ), + ] + + for contract in contracts: + session.add(contract) + session.commit() + + return contracts + + +def test_get_contract_by_address(session, sample_contracts): + # Test getting existing contract with name + contract = _get_contract_by_address(session, "0x7a250d5630b4cf539739df2c5dacb4c659f2488d") + assert contract is not None + assert contract.address == hex_str_to_bytes("0x7a250d5630b4cf539739df2c5dacb4c659f2488d") + assert contract.name == "Uniswap V2 Router" + assert contract.contract_creator == hex_str_to_bytes("0x5b6c7b13a2b82ed76f48230be0c4a13f94160c5e") + + # Test getting existing contract without name + contract = _get_contract_by_address(session, "0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2") + assert contract is not None + assert contract.name is None + assert contract.contract_creator == hex_str_to_bytes("0x8ba1f109551bd432803012645ac136ddd64dba72") + + # Test getting non-existent contract + contract = _get_contract_by_address(session, "0x0000000000000000000000000000000000000000") + assert contract is None + + # Test getting specific columns + contract = _get_contract_by_address( + session, "0x7a250d5630b4cf539739df2c5dacb4c659f2488d", columns=["address", "name", "contract_creator"] + ) + assert contract.address == hex_str_to_bytes("0x7a250d5630b4cf539739df2c5dacb4c659f2488d") + assert contract.name == "Uniswap V2 Router" + assert contract.contract_creator == hex_str_to_bytes("0x5b6c7b13a2b82ed76f48230be0c4a13f94160c5e") + with pytest.raises(AttributeError): + _ = contract.deployed_code + + # Test invalid address format + with pytest.raises(ValueError): + _get_contract_by_address(session, "invalid_address") + + +def test_get_contracts_by_addresses(session, sample_contracts): + # Test getting multiple contracts + addresses = [ + "0x7a250d5630b4cf539739df2c5dacb4c659f2488d", + "0x1f9840a85d5af5bf1d1762f925bdaddc4201f984", + ] + contracts = _get_contracts_by_addresses(session, addresses) + assert len(contracts) == 2 + assert contracts[0].address == hex_str_to_bytes(addresses[0]) + assert contracts[0].name == "Uniswap V2 Router" + assert contracts[1].address == hex_str_to_bytes(addresses[1]) + assert contracts[1].name == "Uniswap Token" + + # Test with mix of named and unnamed contracts + addresses = [ + "0x7a250d5630b4cf539739df2c5dacb4c659f2488d", + "0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2", + ] + contracts = _get_contracts_by_addresses(session, addresses) + assert len(contracts) == 2 + assert contracts[0].name == "Uniswap V2 Router" + assert contracts[1].name is None + + # Test with duplicate addresses + addresses = [ + "0x7a250d5630b4cf539739df2c5dacb4c659f2488d", + "0x7a250d5630b4cf539739df2c5dacb4c659f2488d", + ] + contracts = _get_contracts_by_addresses(session, addresses) + assert len(contracts) == 1 + assert contracts[0].address == hex_str_to_bytes(addresses[0]) + + # Test with specific columns + contracts = _get_contracts_by_addresses( + session, ["0x7a250d5630b4cf539739df2c5dacb4c659f2488d"], columns=["address", "name", "contract_creator"] + ) + assert len(contracts) == 1 + assert contracts[0].name == "Uniswap V2 Router" + assert contracts[0].contract_creator == hex_str_to_bytes("0x5b6c7b13a2b82ed76f48230be0c4a13f94160c5e") + with pytest.raises(AttributeError): + _ = contracts[0].deployed_code + + # Test with invalid address format + with pytest.raises(ValueError): + _get_contracts_by_addresses(session, ["invalid_address"]) + + # Test with non-existent addresses + addresses = [ + "0x7a250d5630b4cf539739df2c5dacb4c659f2488d", + "0x0000000000000000000000000000000000000000", + ] + contracts = _get_contracts_by_addresses(session, addresses) + assert len(contracts) == 1 + assert contracts[0].address == hex_str_to_bytes(addresses[0]) + + +if __name__ == "__main__": + pytest.main(["-sv", __file__]) diff --git a/tests/hemera/app/api/routes/utils/internal_transaction_test.py b/tests/hemera/app/api/routes/utils/internal_transaction_test.py new file mode 100644 index 000000000..fc8ca8fc6 --- /dev/null +++ b/tests/hemera/app/api/routes/utils/internal_transaction_test.py @@ -0,0 +1,426 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2024/12/26 15:14 +# @Author ideal93 +# @File internal_transaction_test.py +# @Brief + +from datetime import datetime, timezone +from decimal import Decimal + +import pytest +from sqlmodel import Session + +from hemera.app.api.routes.helper.internal_transaction import ( + _get_internal_transactions_by_address_native, + get_internal_transactions_by_address, + get_internal_transactions_by_address_using_address_index, + get_internal_transactions_count_by_address, + get_internal_transactions_count_by_address_native, + get_internal_transactions_count_by_address_using_address_index, +) +from hemera.common.enumeration.txn_type import InternalTransactionType +from hemera.common.models.address.address_internal_transaciton import AddressInternalTransactions +from hemera.common.models.trace.traces import ContractInternalTransactions +from hemera.common.utils.format_utils import hex_str_to_bytes + +# Test data constants +TEST_ADDRESS = "0x742d35Cc6634C0532925a3b844Bc454e4438f44e".lower() +TEST_ADDRESS_2 = "0x742d35Cc6634C0532925a3b844Bc454e4438f44f".lower() +TEST_TRANSACTION_HASH = "0x123456789abcdef123456789abcdef123456789abcdef123456789abcdef1234".lower() +TEST_BLOCK_HASH = "0xabcdef123456789abcdef123456789abcdef123456789abcdef123456789abc1".lower() + + +@pytest.fixture(name="sample_contract_txns") +def sample_contract_txns_fixture(session: Session): + """Create sample internal transactions in the database""" + transactions = [] + # Create multiple transactions with different properties + for i in range(5): + tx = ContractInternalTransactions( + trace_id=f"trace_{i}", + from_address=hex_str_to_bytes(TEST_ADDRESS if i % 2 == 0 else TEST_ADDRESS_2), + to_address=hex_str_to_bytes(TEST_ADDRESS_2 if i % 2 == 0 else TEST_ADDRESS), + value=Decimal(str(i + 1)) * Decimal("1000000000000000000"), + gas=Decimal("21000"), + gas_used=Decimal("21000"), + trace_type="call", + call_type="call", + status=1, + block_number=1000 + i, + block_hash=hex_str_to_bytes(TEST_BLOCK_HASH), + block_timestamp=datetime(2024, 1, 1, tzinfo=timezone.utc), + transaction_hash=hex_str_to_bytes(TEST_TRANSACTION_HASH), + transaction_index=0, + ) + transactions.append(tx) + session.add(tx) + + session.commit() + return transactions + + +@pytest.fixture(name="sample_address_txns") +def sample_address_txns_fixture(session: Session): + """Create sample address internal transactions in the database""" + transactions = [] + # Create multiple transactions with different types + for i in range(5): + tx = AddressInternalTransactions( + trace_id=f"trace_{i}", + address=hex_str_to_bytes(TEST_ADDRESS), + related_address=hex_str_to_bytes(TEST_ADDRESS_2), + value=Decimal(str(i + 1)) * Decimal("1000000000000000000"), + gas=Decimal("21000"), + gas_used=Decimal("21000"), + trace_type="call", + call_type="call", + txn_type=InternalTransactionType.SENDER.value if i % 2 == 0 else InternalTransactionType.RECEIVER.value, + status=1, + block_number=1000 + i, + block_hash=hex_str_to_bytes(TEST_BLOCK_HASH), + block_timestamp=datetime(2024, 1, 1, tzinfo=timezone.utc), + transaction_hash=hex_str_to_bytes(TEST_TRANSACTION_HASH), + transaction_index=i, + ) + transactions.append(tx) + session.add(tx) + + session.commit() + return transactions + + +def test_get_internal_transactions_both_directions(session: Session, sample_contract_txns): + """Test getting transactions for both directions without using index""" + result = get_internal_transactions_by_address(session=session, address=TEST_ADDRESS, direction="both", limit=10) + + assert len(result) == 5 + # Verify first transaction details + tx = result[0] + assert tx.trace_id == "trace_4" + assert tx.value == Decimal("5000000000000000000") + assert tx.from_address == TEST_ADDRESS + assert tx.to_address == TEST_ADDRESS_2 + + +def test_get_internal_transactions_from_direction(session: Session, sample_contract_txns): + """Test getting only 'from' transactions without using index""" + result = get_internal_transactions_by_address(session=session, address=TEST_ADDRESS, direction="from", limit=10) + + assert len(result) == 3 + for tx in result: + assert tx.from_address == TEST_ADDRESS + + +def test_get_internal_transactions_to_direction(session: Session, sample_contract_txns): + """Test getting only 'to' transactions without using index""" + result = get_internal_transactions_by_address(session=session, address=TEST_ADDRESS, direction="to", limit=10) + + assert len(result) == 2 + for tx in result: + assert tx.to_address == TEST_ADDRESS + + +def test_get_internal_transactions_with_limit_offset(session: Session, sample_contract_txns): + """Test pagination functionality""" + result = get_internal_transactions_by_address(session=session, address=TEST_ADDRESS, limit=2, offset=1) + + assert len(result) == 2 + + +def test_get_internal_transactions_using_index_both_directions(session: Session, sample_address_txns): + """Test getting transactions using address index for both directions""" + result = get_internal_transactions_by_address( + session=session, address=TEST_ADDRESS, direction="both", limit=10, use_address_index=True + ) + + assert len(result) == 5 + # Verify conversion of sender/receiver types + first_tx = result[0] + assert first_tx.trace_id == "trace_4" + assert first_tx.from_address == TEST_ADDRESS + assert first_tx.to_address == TEST_ADDRESS_2 + + +def test_get_internal_transactions_using_index_from_direction(session: Session, sample_address_txns): + """Test getting only 'from' transactions using address index""" + result = get_internal_transactions_by_address( + session=session, address=TEST_ADDRESS, direction="from", limit=10, use_address_index=True + ) + + assert len(result) == 3 # Includes SENDER and SELF_CALL types + for tx in result: + assert tx.from_address == TEST_ADDRESS + + +def test_get_internal_transactions_using_index_to_direction(session: Session, sample_address_txns): + """Test getting only 'to' transactions using address index""" + result = get_internal_transactions_by_address( + session=session, address=TEST_ADDRESS, direction="to", limit=10, use_address_index=True + ) + + assert len(result) == 2 # Includes RECEIVER types + for tx in result: + assert tx.to_address == TEST_ADDRESS + + +def test_get_internal_transactions_specific_columns(session: Session, sample_contract_txns): + """Test retrieving specific columns only""" + result = get_internal_transactions_by_address( + session=session, + address=TEST_ADDRESS, + ) + + assert len(result) == 5 + tx = result[0] + assert tx.block_number is not None + assert tx.transaction_hash is not None + assert tx.value is not None + + +def test_invalid_address_format(session: Session): + """Test error handling for invalid address format""" + with pytest.raises(ValueError): + get_internal_transactions_by_address(session=session, address="invalid_address") + + +# Tests for get_internal_transactions_by_address +def test_get_internal_transactions_by_address_both(session: Session, sample_contract_txns): + """Test getting transactions from both directions using contract table""" + result = _get_internal_transactions_by_address_native(session=session, address=TEST_ADDRESS, direction="both") + + assert len(result) == 5 + # Check if transactions are ordered by block number desc + assert all(result[i].block_number >= result[i + 1].block_number for i in range(len(result) - 1)) + + # Verify some are from and some are to transactions + from_count = sum(1 for tx in result if tx.from_address == hex_str_to_bytes(TEST_ADDRESS)) + to_count = sum(1 for tx in result if tx.to_address == hex_str_to_bytes(TEST_ADDRESS)) + assert from_count > 0 and to_count > 0 + assert from_count + to_count == 5 + + +def test_get_internal_transactions_by_address_from(session: Session, sample_contract_txns): + """Test getting only from transactions using contract table""" + result = _get_internal_transactions_by_address_native(session=session, address=TEST_ADDRESS, direction="from") + + assert len(result) == 3 + assert all(tx.from_address == hex_str_to_bytes(TEST_ADDRESS) for tx in result) + + +def test_get_internal_transactions_by_address_to(session: Session, sample_contract_txns): + """Test getting only to transactions using contract table""" + result = _get_internal_transactions_by_address_native(session=session, address=TEST_ADDRESS, direction="to") + + assert len(result) == 2 + assert all(tx.to_address == hex_str_to_bytes(TEST_ADDRESS) for tx in result) + + +def test_get_internal_transactions_by_address_columns(session: Session, sample_contract_txns): + """Test getting specific columns from contract table""" + result = _get_internal_transactions_by_address_native( + session=session, address=TEST_ADDRESS, columns=["block_number", "transaction_hash"] + ) + + assert len(result) == 5 + # Verify only requested columns are available + tx = result[0] + assert hasattr(tx, "block_number") + assert hasattr(tx, "transaction_hash") + with pytest.raises(AttributeError): + _ = tx.value + + +def test_get_internal_transactions_by_address_pagination(session: Session, sample_contract_txns): + """Test pagination in contract table query""" + result = _get_internal_transactions_by_address_native(session=session, address=TEST_ADDRESS, limit=2, offset=1) + + assert len(result) == 2 + assert result[0].block_number == 1003 # Based on test data setup + + +# Tests for get_internal_transactions_by_address_using_address_index +def test_get_internal_transactions_by_address_using_index_both(session: Session, sample_address_txns): + """Test getting transactions from both directions using address index""" + result = get_internal_transactions_by_address_using_address_index( + session=session, address=TEST_ADDRESS, direction="both" + ) + + assert len(result) == 5 + assert all(tx.address == hex_str_to_bytes(TEST_ADDRESS) for tx in result) + # Check ordering + assert all(result[i].block_number >= result[i + 1].block_number for i in range(len(result) - 1)) + + +def test_get_internal_transactions_by_address_using_index_from(session: Session, sample_address_txns): + """Test getting only from transactions using address index""" + result = get_internal_transactions_by_address_using_address_index( + session=session, address=TEST_ADDRESS, direction="from" + ) + + assert len(result) == 3 # Including SENDER and SELF_CALL types + assert all( + tx.txn_type in [InternalTransactionType.SENDER.value, InternalTransactionType.SELF_CALL.value] for tx in result + ) + + +def test_get_internal_transactions_by_address_using_index_to(session: Session, sample_address_txns): + """Test getting only to transactions using address index""" + result = get_internal_transactions_by_address_using_address_index( + session=session, address=TEST_ADDRESS, direction="to" + ) + + assert len(result) == 2 # Including RECEIVER type + assert all( + tx.txn_type in [InternalTransactionType.RECEIVER.value, InternalTransactionType.SELF_CALL.value] + for tx in result + ) + + +def test_get_internal_transactions_by_address_using_index_columns(session: Session, sample_address_txns): + """Test getting specific columns using address index""" + result = get_internal_transactions_by_address_using_address_index( + session=session, address=TEST_ADDRESS, columns=["block_number", "transaction_hash", "txn_type"] + ) + + assert len(result) == 5 + # Verify only requested columns are available + tx = result[0] + assert hasattr(tx, "block_number") + assert hasattr(tx, "transaction_hash") + assert hasattr(tx, "txn_type") + with pytest.raises(AttributeError): + _ = tx.value + + +def test_get_internal_transactions_by_address_using_index_pagination(session: Session, sample_address_txns): + """Test pagination in address index query""" + result = get_internal_transactions_by_address_using_address_index( + session=session, address=TEST_ADDRESS, limit=2, offset=1 + ) + + assert len(result) == 2 + assert result[0].block_number == 1003 # Based on test data setup + + +# Tests for internal transaction count functions +def test_get_internal_transactions_count_without_index(session: Session, sample_contract_txns): + """Test getting internal transaction count without using index""" + # Test both directions + count = get_internal_transactions_count_by_address( + session=session, address=TEST_ADDRESS, direction="both", use_address_index=False + ) + assert count == 5 + + # Test from direction + count = get_internal_transactions_count_by_address( + session=session, address=TEST_ADDRESS, direction="from", use_address_index=False + ) + assert count == 3 + + # Test to direction + count = get_internal_transactions_count_by_address( + session=session, address=TEST_ADDRESS, direction="to", use_address_index=False + ) + assert count == 2 + + +def test_get_internal_transactions_count_with_index(session: Session, sample_address_txns): + """Test getting internal transaction count using address index""" + # Test both directions + count = get_internal_transactions_count_by_address( + session=session, address=TEST_ADDRESS, direction="both", use_address_index=True + ) + assert count == 5 + + # Test from direction (SENDER transactions) + count = get_internal_transactions_count_by_address( + session=session, address=TEST_ADDRESS, direction="from", use_address_index=True + ) + assert count == 3 # SENDER transactions (i % 2 == 0) + + # Test to direction (RECEIVER transactions) + count = get_internal_transactions_count_by_address( + session=session, address=TEST_ADDRESS, direction="to", use_address_index=True + ) + assert count == 2 # RECEIVER transactions (i % 2 != 0) + + +def test_get_internal_transactions_count_empty_db(session: Session, clean_db): + """Test count functions with empty database""" + # Test without index + count = get_internal_transactions_count_by_address( + session=session, + address=TEST_ADDRESS, + ) + assert count == 0 + + # Test with index + count = get_internal_transactions_count_by_address(session=session, address=TEST_ADDRESS, use_address_index=True) + assert count == 0 + + +def test_get_internal_transactions_by_address_count_direction_filters(session: Session, sample_contract_txns): + """Test direct count function with different direction filters""" + # Test default (both) direction + count = get_internal_transactions_count_by_address_native(session=session, address=TEST_ADDRESS) + assert count == 5 + + # Test from direction + count = get_internal_transactions_count_by_address_native(session=session, address=TEST_ADDRESS, direction="from") + assert count == 3 + + # Test to direction + count = get_internal_transactions_count_by_address_native(session=session, address=TEST_ADDRESS, direction="to") + assert count == 2 + + +def test_get_internal_transactions_by_address_using_index_count_types(session: Session, sample_address_txns): + """Test count function using index with different transaction types""" + # Test from direction (SENDER type) + count = get_internal_transactions_count_by_address_using_address_index( + session=session, address=TEST_ADDRESS, direction="from" + ) + # Should count SENDER transactions (where i % 2 == 0) + assert count == 3 + + # Test to direction (RECEIVER type) + count = get_internal_transactions_count_by_address_using_address_index( + session=session, address=TEST_ADDRESS, direction="to" + ) + # Should count RECEIVER transactions (where i % 2 != 0) + assert count == 2 + + +def test_get_internal_transactions_count_invalid_address(session: Session): + """Test count functions with invalid address format""" + # Test main count function + with pytest.raises(ValueError): + get_internal_transactions_count_by_address(session=session, address="invalid_address") + + # Test without index count function + with pytest.raises(ValueError): + get_internal_transactions_count_by_address_native(session=session, address="invalid_address") + + # Test with index count function + with pytest.raises(ValueError): + get_internal_transactions_count_by_address_using_address_index(session=session, address="invalid_address") + + +def test_get_internal_transactions_count_non_existent_address(session: Session, sample_contract_txns): + """Test count functions with valid but non-existent address""" + non_existent_address = "0x" + "1" * 40 + + # Test without index + count = get_internal_transactions_count_by_address(session=session, address=non_existent_address) + assert count == 0 + + # Test with index + count = get_internal_transactions_count_by_address( + session=session, address=non_existent_address, use_address_index=True + ) + assert count == 0 + + +if __name__ == "__main__": + pytest.main(["-sv", __file__]) diff --git a/tests/hemera/app/api/routes/utils/log_test.py b/tests/hemera/app/api/routes/utils/log_test.py new file mode 100644 index 000000000..a1dc625c6 --- /dev/null +++ b/tests/hemera/app/api/routes/utils/log_test.py @@ -0,0 +1,164 @@ +from datetime import datetime, timedelta + +import pytest + +from hemera.app.api.routes.helper.log import _get_logs_by_address, _get_logs_by_hash +from hemera.common.models.base.logs import Logs +from hemera.common.utils.format_utils import hex_str_to_bytes + + +@pytest.fixture +def sample_logs(session): + current_time = datetime.utcnow() + logs = [ + # Two logs for same transaction + Logs( + log_index=1, + transaction_hash=hex_str_to_bytes("0x644aee68ccb38d2a74901f3e1279419fd62481b5567a56bcb479c38d4fd5b379"), + block_hash=hex_str_to_bytes("0x6511c8974f8341a2a4f30fcff201cee1863364e978e85f9f667912d4874a3bbc"), + address=hex_str_to_bytes("0x1111111111111111111111111111111111111111"), + data=hex_str_to_bytes("0xf7d8c88300000000000000000000000000000000000000000000000000000000"), + topic0=hex_str_to_bytes("0x7fcf532c15f0a6db0bd6d0e038bea71d30d808c7d98cb3bf7268a95bf5081b65"), + topic1=hex_str_to_bytes("0x000000000000000000000000b4c79dab8f259c7aee6e5b2aa729821864227e84"), + block_number=100, + block_timestamp=current_time, + transaction_index=0, + ), + Logs( + log_index=2, + transaction_hash=hex_str_to_bytes("0x644aee68ccb38d2a74901f3e1279419fd62481b5567a56bcb479c38d4fd5b379"), + block_hash=hex_str_to_bytes("0x6511c8974f8341a2a4f30fcff201cee1863364e978e85f9f667912d4874a3bbc"), + address=hex_str_to_bytes("0x1111111111111111111111111111111111111111"), + data=hex_str_to_bytes("0xf7d8c88300000000000000000000000000000000000000000000000000000001"), + topic0=hex_str_to_bytes("0x7fcf532c15f0a6db0bd6d0e038bea71d30d808c7d98cb3bf7268a95bf5081b66"), + block_number=100, + block_timestamp=current_time, + transaction_index=0, + ), + # One log for second transaction + Logs( + log_index=1, + transaction_hash=hex_str_to_bytes("0x74c72e9e6f3aa88e896cc7d9d526bdf0934d3f9c8fe178d0ed46f21d9c466434"), + block_hash=hex_str_to_bytes("0xa945dc8e514fd116ecf87e6692dceb3692131694ab412ace1cfa01fd899581c5"), + address=hex_str_to_bytes("0x2222222222222222222222222222222222222222"), + data=hex_str_to_bytes("0xf7d8c88300000000000000000000000000000000000000000000000000000002"), + topic0=hex_str_to_bytes("0x7fcf532c15f0a6db0bd6d0e038bea71d30d808c7d98cb3bf7268a95bf5081b67"), + block_number=101, + block_timestamp=current_time - timedelta(seconds=12), + transaction_index=0, + ), + ] + + for log in logs: + session.add(log) + session.commit() + + return logs + + +def test_get_logs_by_hash(session, sample_logs): + # Test full select + hash_str = "0x644aee68ccb38d2a74901f3e1279419fd62481b5567a56bcb479c38d4fd5b379" + logs = _get_logs_by_hash(session, hash_str) + assert len(logs) == 2 + for log in logs: + assert log.transaction_hash == hex_str_to_bytes(hash_str) + assert log.topic0 is not None # Required field + assert log.address is not None # Required field + assert isinstance(log.block_hash, bytes) + assert isinstance(log.topic0, bytes) + + # Test specific columns + logs = _get_logs_by_hash(session, hash_str, columns=["log_index", "topic0", "address"]) + assert len(logs) == 2 + log_indices = {log.log_index for log in logs} + assert log_indices == {1, 2} + for log in logs: + assert hasattr(log, "log_index") + assert hasattr(log, "topic0") + assert hasattr(log, "address") + assert isinstance(log.topic0, bytes) + assert isinstance(log.address, bytes) + with pytest.raises(AttributeError): + _ = log.data # Should not have data field + + # Test non-existent hash + logs = _get_logs_by_hash(session, "0xffffffffffffffff") + assert len(logs) == 0 + + # Test invalid hash format + with pytest.raises(ValueError): + _get_logs_by_hash(session, "invalid_hash") + + +def test_get_logs_by_address(session, sample_logs): + address = "0x1111111111111111111111111111111111111111" + + # Test full select + logs = _get_logs_by_address(session, address) + assert len(logs) == 2 + for log in logs: + assert log.address == hex_str_to_bytes(address) + assert isinstance(log.topic0, bytes) + assert isinstance(log.block_hash, bytes) + + # Test pagination + logs = _get_logs_by_address(session, address, limit=1) + assert len(logs) == 1 + + logs = _get_logs_by_address(session, address, offset=1, limit=1) + assert len(logs) == 1 + assert logs[0].log_index == 1 + + # Test specific columns + logs = _get_logs_by_address(session, address, columns=["log_index", "topic0", "address"]) + assert len(logs) == 2 + for log in logs: + assert hasattr(log, "log_index") + assert hasattr(log, "topic0") + assert hasattr(log, "address") + assert isinstance(log.topic0, bytes) + assert isinstance(log.address, bytes) + assert log.address == hex_str_to_bytes(address) + with pytest.raises(AttributeError): + _ = log.data # Should not have data field + + # Test non-existent address + logs = _get_logs_by_address(session, "0x9999999999999999999999999999999999999999") + assert len(logs) == 0 + + # Test invalid address format + with pytest.raises(ValueError): + _get_logs_by_address(session, "invalid_address") + + +def test_logs_transaction_isolation(session, sample_logs): + address = "0x1111111111111111111111111111111111111111" + + with session.begin(): + new_log = Logs( + log_index=3, + transaction_hash=hex_str_to_bytes("0x644aee68ccb38d2a74901f3e1279419fd62481b5567a56bcb479c38d4fd5b379"), + block_hash=hex_str_to_bytes("0x6511c8974f8341a2a4f30fcff201cee1863364e978e85f9f667912d4874a3bbc"), + address=hex_str_to_bytes(address), + data=hex_str_to_bytes("0xf7d8c88300000000000000000000000000000000000000000000000000000003"), + topic0=hex_str_to_bytes("0x7fcf532c15f0a6db0bd6d0e038bea71d30d808c7d98cb3bf7268a95bf5081b68"), + block_number=100, + block_timestamp=sample_logs[0].block_timestamp, + transaction_index=0, + ) + session.add(new_log) + + # Should see the new log inside transaction + logs = _get_logs_by_address(session, address) + assert len(logs) == 3 + + session.rollback() + + # Should not see the new log after rollback + logs = _get_logs_by_address(session, address) + assert len(logs) == 2 + + +if __name__ == "__main__": + pytest.main(["-sv", __file__]) diff --git a/tests/hemera/app/api/routes/utils/price_test.py b/tests/hemera/app/api/routes/utils/price_test.py new file mode 100644 index 000000000..aa44ebd9f --- /dev/null +++ b/tests/hemera/app/api/routes/utils/price_test.py @@ -0,0 +1,190 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2024/12/20 14:02 +# @Author ideal93 +# @File token_test.py.py +# @Brief + +from datetime import datetime, timedelta +from decimal import Decimal + +import pytest +from sqlmodel import delete + +from hemera.app.api.routes.helper.token import get_coin_prices, get_latest_coin_price, get_token_price +from hemera.common.models.utils.prices import CoinPrices, TokenHourlyPrices, TokenPrices + + +@pytest.fixture +def sample_token_prices(session): + """Create sample token price data for testing + + Creates both current prices (TokenPrices) and historical hourly prices + (TokenHourlyPrices) for multiple tokens. + + Args: + session: SQLModel database session + + Returns: + dict: Dictionary containing current and hourly price test data + """ + # Create current token prices + current_prices = [ + TokenPrices(symbol="ETH", price=Decimal("2000.50"), timestamp=datetime.utcnow()), + TokenPrices(symbol="BTC", price=Decimal("45000.75"), timestamp=datetime.utcnow()), + TokenPrices(symbol="USDT", price=Decimal("1.0"), timestamp=datetime.utcnow()), + ] + + # Create historical hourly prices + now = datetime.utcnow() + hourly_prices = [ + TokenHourlyPrices(symbol="ETH", price=Decimal("1950.25"), timestamp=now - timedelta(hours=1)), + TokenHourlyPrices(symbol="ETH", price=Decimal("1900.75"), timestamp=now - timedelta(hours=2)), + TokenHourlyPrices(symbol="BTC", price=Decimal("44500.50"), timestamp=now - timedelta(hours=1)), + ] + + # Add all prices to database + for price in current_prices + hourly_prices: + session.add(price) + session.commit() + + return {"current": current_prices, "hourly": hourly_prices} + + +@pytest.fixture +def sample_coin_prices(session): + """Create sample coin price data for testing + + Creates daily coin prices for the last 3 days. + + Args: + session: SQLModel database session + + Returns: + dict: Dictionary containing price data and corresponding dates + """ + now = datetime.utcnow() + dates = [datetime.combine(now.date() - timedelta(days=i), datetime.min.time()) for i in range(3)] + + coin_prices = [ + CoinPrices(symbol="TEST", price=Decimal("100.50"), block_date=dates[0]), + CoinPrices(symbol="TEST", price=Decimal("98.75"), block_date=dates[1]), + CoinPrices(symbol="TEST", price=Decimal("97.25"), block_date=dates[2]), + ] + + for price in coin_prices: + session.add(price) + session.commit() + + return {"prices": coin_prices, "dates": dates} + + +def test_get_token_price_latest(session, sample_token_prices): + """Test getting latest token prices + + Tests: + 1. Getting latest price for existing tokens + 2. Getting price for non-existent token + """ + # Test getting latest price for existing tokens + eth_price = get_token_price(session, "ETH") + assert eth_price == Decimal("2000.50") + + btc_price = get_token_price(session, "BTC") + assert btc_price == Decimal("45000.75") + + # Test getting price for non-existent token + unknown_price = get_token_price(session, "UNKNOWN") + assert unknown_price == Decimal("0.0") + + +def test_get_token_price_historical(session, sample_token_prices): + """Test getting historical token prices + + Tests: + 1. Getting price at specific historical timestamps + 2. Getting price at non-existent historical timestamp + """ + now = datetime.utcnow() + + # Test getting price from 1 hour ago + eth_price = get_token_price(session, "ETH", date=now - timedelta(hours=1)) + assert eth_price == Decimal("1950.25") + + # Test getting price from 2 hours ago + eth_price = get_token_price(session, "ETH", date=now - timedelta(hours=2)) + assert eth_price == Decimal("1900.75") + + # Test getting price from non-existent timestamp + eth_price = get_token_price(session, "ETH", date=now - timedelta(days=7)) + assert eth_price == Decimal("0.0") + + +def test_get_coin_prices(session, sample_coin_prices): + """Test getting coin prices for specific dates + + Tests: + 1. Getting prices for existing dates + 2. Getting prices for non-existent dates + """ + # Test getting prices for existing dates + dates = sample_coin_prices["dates"][:2] + prices = get_coin_prices(session, dates) + + assert len(prices) == 2 + + assert prices[1].block_date == dates[1] + assert prices[1].price == Decimal("98.75") + + assert prices[0].block_date == dates[0] + assert prices[0].price == Decimal("100.50") + + # Test getting prices for future date + future_date = (datetime.utcnow() + timedelta(days=7)).date() + empty_prices = get_coin_prices(session, [future_date]) + assert len(empty_prices) == 0 + + +def test_get_latest_coin_price(session, sample_coin_prices): + """Test getting latest coin price + + Tests: + 1. Getting latest price when prices exist + 2. Getting latest price when no prices exist + """ + # Test getting latest price + latest_price = get_latest_coin_price(session) + assert latest_price == 100.50 + + # Test getting latest price with empty database + session.exec(delete(CoinPrices)) + session.commit() + + empty_price = get_latest_coin_price(session) + assert empty_price == 0.0 + + +def test_transaction_isolation_token_prices(session, sample_token_prices): + """Test transaction isolation for token prices + + Tests that price changes within a transaction are properly isolated + and can be rolled back. + """ + # Test price changes within transaction + with session.begin(): + new_price = TokenPrices(symbol="ETH", price=Decimal("2100.00"), timestamp=datetime.utcnow()) + session.add(new_price) + + # Price should be updated within transaction + current_price = get_token_price(session, "ETH") + assert current_price == Decimal("2100.00") + + session.rollback() + + # Price should be back to original after rollback + final_price = get_token_price(session, "ETH") + assert final_price == Decimal("2000.50") + + +if __name__ == "__main__": + pytest.main(["-sv", __file__]) diff --git a/tests/hemera/app/api/routes/utils/token_transfers_test.py b/tests/hemera/app/api/routes/utils/token_transfers_test.py new file mode 100644 index 000000000..6e620fc33 --- /dev/null +++ b/tests/hemera/app/api/routes/utils/token_transfers_test.py @@ -0,0 +1,687 @@ +from datetime import datetime, timedelta +from decimal import Decimal + +import pytest + +from hemera.app.api.routes.helper.token_transfers import ( + TokenTransferAbbr, + _get_erc20_token_transfers_by_hash, + _get_erc20_transfers_by_address_index, + _get_erc20_transfers_by_address_native, + _get_erc721_token_transfers_by_hash, + _get_erc721_transfers_by_address_index, + _get_erc721_transfers_by_address_native, + _get_erc1155_token_transfers_by_hash, + _get_erc1155_transfers_by_address_index, + _get_erc1155_transfers_by_address_native, + _get_nft_transfers_by_hash, + get_nft_transfers_by_address_native, + get_token_transfers_by_address, + get_token_transfers_by_hash, +) +from hemera.common.enumeration.token_type import TokenType +from hemera.common.enumeration.txn_type import AddressNftTransferType, AddressTokenTransferType +from hemera.common.models.address.address_nft_transfers import AddressNftTransfers +from hemera.common.models.address.address_token_transfers import AddressTokenTransfers +from hemera.common.models.token_transfers import ( + ERC20TokenTransfers, + ERC721TokenTransfers, + ERC1155TokenTransfers, + NftTransfers, +) +from hemera.common.utils.format_utils import bytes_to_hex_str, hex_str_to_bytes + + +@pytest.fixture +def sample_addresses(): + return { + "sender": "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "receiver": "0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + "token": "0xcccccccccccccccccccccccccccccccccccccccc", + } + + +@pytest.fixture +def erc20_token_transfers(session, sample_addresses): + """Create sample ERC20 token transfers""" + now = datetime.utcnow() + transfers = [ + ERC20TokenTransfers( + transaction_hash=hex_str_to_bytes("0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"), + block_number=21436149, + block_timestamp=now, + from_address=hex_str_to_bytes(sample_addresses["sender"]), + to_address=hex_str_to_bytes(sample_addresses["receiver"]), + token_address=hex_str_to_bytes(sample_addresses["token"]), + value=Decimal("1000000000000000000"), # 1 token + log_index=0, + block_hash=hex_str_to_bytes("0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb1"), + ), + ERC20TokenTransfers( + transaction_hash=hex_str_to_bytes("0x2234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"), + block_number=21436149, + block_timestamp=now, + from_address=hex_str_to_bytes(sample_addresses["sender"]), + to_address=hex_str_to_bytes(sample_addresses["receiver"]), + token_address=hex_str_to_bytes(sample_addresses["token"]), + value=Decimal("2000000000000000000"), # 2 tokens + log_index=1, + block_hash=hex_str_to_bytes("0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb1"), + ), + ] + + for transfer in transfers: + session.add(transfer) + session.commit() + return transfers + + +@pytest.fixture +def erc721_token_transfers(session, sample_addresses): + """Create sample ERC721 token transfers""" + now = datetime.utcnow() + transfers = [ + ERC721TokenTransfers( + transaction_hash=hex_str_to_bytes("0x3234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"), + block_number=21436149, + block_timestamp=now, + from_address=hex_str_to_bytes(sample_addresses["sender"]), + to_address=hex_str_to_bytes(sample_addresses["receiver"]), + token_address=hex_str_to_bytes(sample_addresses["token"]), + token_id=Decimal("1"), + log_index=0, + block_hash=hex_str_to_bytes("0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb2"), + ), + ERC721TokenTransfers( + transaction_hash=hex_str_to_bytes("0x4234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"), + block_number=21436148, + block_timestamp=now - timedelta(minutes=2), + from_address=hex_str_to_bytes(sample_addresses["receiver"]), + to_address=hex_str_to_bytes(sample_addresses["sender"]), + token_address=hex_str_to_bytes(sample_addresses["token"]), + token_id=Decimal("2"), + log_index=0, + block_hash=hex_str_to_bytes("0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb3"), + ), + ] + + for transfer in transfers: + session.add(transfer) + session.commit() + return transfers + + +@pytest.fixture +def erc1155_token_transfers(session, sample_addresses): + """Create sample ERC1155 token transfers""" + now = datetime.utcnow() + transfers = [ + ERC1155TokenTransfers( + transaction_hash=hex_str_to_bytes("0x5234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"), + block_number=21436149, + block_timestamp=now, + from_address=hex_str_to_bytes(sample_addresses["sender"]), + to_address=hex_str_to_bytes(sample_addresses["receiver"]), + token_address=hex_str_to_bytes(sample_addresses["token"]), + token_id=Decimal("1"), + value=Decimal("5"), + log_index=0, + block_hash=hex_str_to_bytes("0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb4"), + ), + ERC1155TokenTransfers( + transaction_hash=hex_str_to_bytes("0x6234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"), + block_number=21436148, + block_timestamp=now - timedelta(minutes=1), + from_address=hex_str_to_bytes(sample_addresses["sender"]), + to_address=hex_str_to_bytes(sample_addresses["receiver"]), + token_address=hex_str_to_bytes(sample_addresses["token"]), + token_id=Decimal("2"), + value=Decimal("10"), + log_index=0, + block_hash=hex_str_to_bytes("0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb5"), + ), + ] + + for transfer in transfers: + session.add(transfer) + session.commit() + return transfers + + +@pytest.fixture +def nft_transfers(session, sample_addresses): + """Create sample NFT transfers in unified table""" + now = datetime.utcnow() + transfers = [ + NftTransfers( + transaction_hash=hex_str_to_bytes("0x7234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"), + block_number=21436149, + block_timestamp=now, + from_address=hex_str_to_bytes(sample_addresses["sender"]), + to_address=hex_str_to_bytes(sample_addresses["receiver"]), + token_address=hex_str_to_bytes(sample_addresses["token"]), + token_id=Decimal("1"), + value=None, # ERC721 + log_index=0, + block_hash=hex_str_to_bytes("0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb6"), + ), + NftTransfers( + transaction_hash=hex_str_to_bytes("0x8234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"), + block_number=21436148, + block_timestamp=now - timedelta(minutes=1), + from_address=hex_str_to_bytes(sample_addresses["sender"]), + to_address=hex_str_to_bytes(sample_addresses["receiver"]), + token_address=hex_str_to_bytes(sample_addresses["token"]), + token_id=Decimal("2"), + value=Decimal("5"), # ERC1155 + log_index=0, + block_hash=hex_str_to_bytes("0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb7"), + ), + ] + + for transfer in transfers: + session.add(transfer) + session.commit() + return transfers + + +@pytest.fixture +def address_token_transfers(session, sample_addresses): + """Create sample address token transfers""" + now = datetime.utcnow() + transfers = [ + AddressTokenTransfers( + address=hex_str_to_bytes(sample_addresses["sender"]), + block_number=21436149, + block_timestamp=now, + transaction_hash=hex_str_to_bytes("0x9234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"), + token_address=hex_str_to_bytes(sample_addresses["token"]), + related_address=hex_str_to_bytes(sample_addresses["receiver"]), + transfer_type=AddressTokenTransferType.SENDER.value, + value=Decimal("1000000000000000000"), + log_index=0, + block_hash=hex_str_to_bytes("0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb8"), + ), + AddressTokenTransfers( + address=hex_str_to_bytes(sample_addresses["receiver"]), + block_number=21436148, + block_timestamp=now - timedelta(minutes=1), + transaction_hash=hex_str_to_bytes("0xa234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"), + token_address=hex_str_to_bytes(sample_addresses["token"]), + related_address=hex_str_to_bytes(sample_addresses["sender"]), + transfer_type=AddressTokenTransferType.RECEIVER.value, + value=Decimal("500000000000000000"), + log_index=0, + block_hash=hex_str_to_bytes("0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb9"), + ), + ] + + for transfer in transfers: + session.add(transfer) + session.commit() + return transfers + + +@pytest.fixture +def address_nft_transfers(session, sample_addresses): + """Create sample address NFT transfers""" + now = datetime.utcnow() + transfers = [ + AddressNftTransfers( + address=hex_str_to_bytes(sample_addresses["sender"]), + block_number=21436149, + block_timestamp=now, + transaction_hash=hex_str_to_bytes("0xb234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"), + token_address=hex_str_to_bytes(sample_addresses["token"]), + related_address=hex_str_to_bytes(sample_addresses["receiver"]), + transfer_type=AddressNftTransferType.SENDER.value, + token_id=Decimal("1"), + value=None, # ERC721 + log_index=0, + block_hash=hex_str_to_bytes("0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb10"), + ), + AddressNftTransfers( + address=hex_str_to_bytes(sample_addresses["receiver"]), + block_number=21436148, + block_timestamp=now - timedelta(minutes=1), + transaction_hash=hex_str_to_bytes("0xc234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"), + token_address=hex_str_to_bytes(sample_addresses["token"]), + related_address=hex_str_to_bytes(sample_addresses["sender"]), + transfer_type=AddressNftTransferType.RECEIVER.value, + token_id=Decimal("2"), + value=Decimal("5"), # ERC1155 + log_index=0, + block_hash=hex_str_to_bytes("0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb11"), + ), + ] + + for transfer in transfers: + session.add(transfer) + session.commit() + return transfers + + +def test_get_erc20_token_transfer_by_hash(session, erc20_token_transfers): + """Test querying ERC20 transfer by hash""" + # Test existing transfer + hash_str = "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef" + transfers = _get_erc20_token_transfers_by_hash(session, hash_str) + assert len(transfers) == 1 + transfer = transfers[0] + assert transfer.block_number == 21436149 + assert transfer.log_index == 0 + assert transfer.value == Decimal("1000000000000000000") + + # Test non-existent hash + transfers = _get_erc20_token_transfers_by_hash(session, "0xffff") + assert len(transfers) == 0 + + # Test invalid hash + with pytest.raises(ValueError): + _get_erc20_token_transfers_by_hash(session, "invalid_hash") + + +def test_get_erc721_token_transfer_by_hash(session, erc721_token_transfers): + """Test querying ERC721 transfer by hash""" + # Test existing transfer + hash_str = "0x3234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef" + transfers = _get_erc721_token_transfers_by_hash(session, hash_str) + assert len(transfers) == 1 + transfer = transfers[0] + assert transfer.block_number == 21436149 + assert transfer.log_index == 0 + assert transfer.token_id == Decimal("1") + + # Test non-existent hash + transfers = _get_erc721_token_transfers_by_hash(session, "0xffff") + assert len(transfers) == 0 + + # Test invalid hash + with pytest.raises(ValueError): + _get_erc721_token_transfers_by_hash(session, "invalid_hash") + + +def test_get_erc1155_token_transfer_by_hash(session, erc1155_token_transfers): + """Test querying ERC1155 transfer by hash""" + # Test existing transfer + hash_str = "0x5234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef" + transfers = _get_erc1155_token_transfers_by_hash(session, hash_str) + assert len(transfers) == 1 + transfer = transfers[0] + assert transfer.block_number == 21436149 + assert transfer.log_index == 0 + assert transfer.token_id == Decimal("1") + assert transfer.value == Decimal("5") + + # Test non-existent hash + transfers = _get_erc1155_token_transfers_by_hash(session, "0xffff") + assert len(transfers) == 0 + + # Test invalid hash + with pytest.raises(ValueError): + _get_erc1155_token_transfers_by_hash(session, "invalid_hash") + + +def test_get_nft_transfer_by_hash(session, nft_transfers): + """Test querying NFT transfer by hash from unified table""" + # Test existing ERC721 transfer + hash_str = "0x7234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef" + transfers = _get_nft_transfers_by_hash(session, hash_str, token_type="erc721") + assert len(transfers) == 1 + transfer = transfers[0] + assert transfer.block_number == 21436149 + assert transfer.value is None + assert transfer.token_id == Decimal("1") + + # Test existing ERC1155 transfer + hash_str = "0x8234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef" + transfers = _get_nft_transfers_by_hash(session, hash_str, token_type="erc1155") + assert len(transfers) == 1 + transfer = transfers[0] + assert transfer.value == Decimal("5") + assert transfer.token_id == Decimal("2") + + # Test non-existent hash + transfers = _get_nft_transfers_by_hash(session, "0xffff") + assert len(transfers) == 0 + + # Test invalid hash + with pytest.raises(ValueError): + _get_nft_transfers_by_hash(session, "invalid_hash") + + +def test_multiple_transfers_same_hash(session, sample_addresses): + """Test handling of multiple transfers in the same transaction""" + now = datetime.utcnow() + tx_hash = hex_str_to_bytes("0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee") + block_hash = hex_str_to_bytes("0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff") + + # Create multiple ERC1155 transfers with same hash but different token_ids + transfers = [ + ERC1155TokenTransfers( + transaction_hash=tx_hash, + block_number=21436149, + block_timestamp=now, + from_address=hex_str_to_bytes(sample_addresses["sender"]), + to_address=hex_str_to_bytes(sample_addresses["receiver"]), + token_address=hex_str_to_bytes(sample_addresses["token"]), + token_id=Decimal(str(i)), + value=Decimal("1"), + log_index=i, + block_hash=block_hash, + ) + for i in range(3) + ] + + for transfer in transfers: + session.add(transfer) + session.commit() + + # Query by hash should return all transfers + results = _get_erc1155_token_transfers_by_hash(session, bytes_to_hex_str(tx_hash)) + assert len(results) == 3 + assert sorted([t.token_id for t in results]) == [Decimal("0"), Decimal("1"), Decimal("2")] + + # Query by address should return all transfers + results = _get_erc1155_transfers_by_address_native(session, sample_addresses["sender"], direction="from") + assert len([t for t in results if t.transaction_hash == tx_hash]) == 3 + + +def test_response_model_conversion(session, erc20_token_transfers, address_token_transfers): + """Test conversion of different models to TokenTransfer response model""" + # Test ERC20 native table conversion + transfers = _get_erc20_token_transfers_by_hash( + session, "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef" + ) + assert len(transfers) > 0 + response = TokenTransferAbbr.from_db_model(transfers[0]) + assert isinstance(response, TokenTransferAbbr) + assert response.token_type == TokenType.ERC20.value + assert response.value is not None + assert response.token_id is None + + # Test address index table conversion + addr_transfers = _get_erc20_transfers_by_address_index( + session, bytes_to_hex_str(address_token_transfers[0].address) + ) + assert len(addr_transfers) > 0 + response = TokenTransferAbbr.from_db_model(addr_transfers[0]) + assert isinstance(response, TokenTransferAbbr) + assert response.token_type == TokenType.ERC20.value + assert response.from_address is not None + assert response.to_address is not None + + +def test_get_token_transfer_by_hash_using_unified_table(session, nft_transfers, erc20_token_transfers): + """Test querying token transfers using unified table""" + # Test ERC20 transfer included when token_type is "all" + hash_str = "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef" + transfers = get_token_transfers_by_hash(session, hash_str, token_type="ALL", use_unified_table=True) + assert len(transfers) == 1 + transfer = transfers[0] + assert transfer.token_type == TokenType.ERC20.value + assert transfer.value == 1000000000000000000 + + # Test NFT transfer filtered by type + hash_str = "0x7234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef" + transfers = get_token_transfers_by_hash(session, hash_str, token_type="ERC721", use_unified_table=True) + assert len(transfers) == 1 + transfer = transfers[0] + assert transfer.token_id == "1" + assert transfer.value is None + + +def test_get_token_transfer_by_hash_using_separate_tables( + session, erc20_token_transfers, erc721_token_transfers, erc1155_token_transfers +): + """Test querying token transfers using separate tables""" + # Test ERC20 transfer + hash_str = "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef" + transfers = get_token_transfers_by_hash(session, hash_str, token_type="ERC20", use_unified_table=False) + assert len(transfers) == 1 + transfer = transfers[0] + assert transfer.token_type == TokenType.ERC20.value + assert transfer.value == 1000000000000000000 + + # Test ERC721 transfer + hash_str = "0x3234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef" + transfers = get_token_transfers_by_hash(session, hash_str, token_type="ERC721", use_unified_table=False) + assert len(transfers) == 1 + transfer = transfers[0] + assert transfer.token_type == TokenType.ERC721.value + assert transfer.token_id == "1" + + # Test all types + hash_str = "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef" + transfers = get_token_transfers_by_hash(session, hash_str, token_type="ALL", use_unified_table=False) + assert len(transfers) == 1 # Should only find the ERC20 transfer for this hash + assert transfers[0].token_type == TokenType.ERC20.value + + +def test_get_erc20_transfers_by_address_native(session, erc20_token_transfers, sample_addresses): + """Test querying ERC20 transfers by address using native table""" + # Test from direction + transfers = _get_erc20_transfers_by_address_native(session, sample_addresses["sender"], direction="from") + assert len(transfers) == 2 + assert all(tx.from_address == hex_str_to_bytes(sample_addresses["sender"]) for tx in transfers) + + # Test to direction + transfers = _get_erc20_transfers_by_address_native(session, sample_addresses["receiver"], direction="to") + assert len(transfers) == 2 + assert all(tx.to_address == hex_str_to_bytes(sample_addresses["receiver"]) for tx in transfers) + + # Test both direction + transfers = _get_erc20_transfers_by_address_native(session, sample_addresses["sender"], direction="both") + assert len(transfers) == 2 # All transfers where address is either sender or receiver + + # Test with token_address filter + transfers = _get_erc20_transfers_by_address_native( + session, sample_addresses["sender"], token_address=sample_addresses["token"] + ) + assert len(transfers) > 0 + assert all(tx.token_address == hex_str_to_bytes(sample_addresses["token"]) for tx in transfers) + + # Test with limit and offset + transfers = _get_erc20_transfers_by_address_native(session, sample_addresses["sender"], limit=1, offset=1) + assert len(transfers) == 1 + + +def test_get_erc721_transfers_by_address_native(session, erc721_token_transfers, sample_addresses): + """Test querying ERC721 transfers by address using native table""" + # Test from direction + transfers = _get_erc721_transfers_by_address_native(session, sample_addresses["sender"], direction="from") + assert len(transfers) == 1 + assert all(tx.from_address == hex_str_to_bytes(sample_addresses["sender"]) for tx in transfers) + assert all(tx.token_id is not None for tx in transfers) + + # Test to direction + transfers = _get_erc721_transfers_by_address_native(session, sample_addresses["receiver"], direction="to") + assert len(transfers) == 1 + assert all(tx.to_address == hex_str_to_bytes(sample_addresses["receiver"]) for tx in transfers) + + # Test both direction + transfers = _get_erc721_transfers_by_address_native(session, sample_addresses["receiver"], direction="both") + assert len(transfers) == 2 + + # Test with token_address filter + transfers = _get_erc721_transfers_by_address_native( + session, sample_addresses["sender"], token_address=sample_addresses["token"] + ) + assert len(transfers) > 0 + assert all(tx.token_address == hex_str_to_bytes(sample_addresses["token"]) for tx in transfers) + + +def test_get_erc1155_transfers_by_address_native(session, erc1155_token_transfers, sample_addresses): + """Test querying ERC1155 transfers by address using native table""" + # Test from direction + transfers = _get_erc1155_transfers_by_address_native(session, sample_addresses["sender"], direction="from") + assert len(transfers) == 2 + assert all(tx.from_address == hex_str_to_bytes(sample_addresses["sender"]) for tx in transfers) + assert all(tx.value is not None for tx in transfers) + + # Test to direction + transfers = _get_erc1155_transfers_by_address_native(session, sample_addresses["receiver"], direction="to") + assert len(transfers) == 2 + assert all(tx.to_address == hex_str_to_bytes(sample_addresses["receiver"]) for tx in transfers) + + # Test with specific token_id + first_transfer = erc1155_token_transfers[0] + transfers = _get_erc1155_transfers_by_address_native( + session, sample_addresses["sender"], token_address=sample_addresses["token"] + ) + assert len(transfers) > 0 + matching_transfer = next(tx for tx in transfers if tx.token_id == first_transfer.token_id) + assert matching_transfer.value == first_transfer.value + + +def test_get_nft_transfers_by_address_native(session, nft_transfers, sample_addresses): + """Test querying NFT transfers by address from unified table""" + # Test ERC721 transfers + transfers = get_nft_transfers_by_address_native(session, sample_addresses["sender"], token_type="erc721") + assert len(transfers) >= 1 + assert all(tx.value is None for tx in transfers) # ERC721 has no value + + # Test ERC1155 transfers + transfers = get_nft_transfers_by_address_native(session, sample_addresses["sender"], token_type="erc1155") + assert len(transfers) >= 1 + assert all(tx.value is not None for tx in transfers) # ERC1155 has value + + # Test all NFT transfers + transfers = get_nft_transfers_by_address_native(session, sample_addresses["sender"], token_type="all") + assert len(transfers) >= 2 # Should include both ERC721 and ERC1155 + + +def test_get_erc20_transfers_by_address_index(session, address_token_transfers, sample_addresses): + """Test querying ERC20 transfers by address using address index""" + transfers = _get_erc20_transfers_by_address_index(session, sample_addresses["sender"]) + assert len(transfers) == 1 + assert transfers[0].address == hex_str_to_bytes(sample_addresses["sender"]) + assert transfers[0].transfer_type == AddressTokenTransferType.SENDER.value + + # Test with token address filter + transfers = _get_erc20_transfers_by_address_index( + session, sample_addresses["sender"], token_address=sample_addresses["token"] + ) + assert len(transfers) == 1 + assert all(tx.token_address == hex_str_to_bytes(sample_addresses["token"]) for tx in transfers) + + +def test_get_erc721_transfers_by_address_index(session, address_nft_transfers, sample_addresses): + """Test querying ERC721 transfers by address using address index""" + transfers = _get_erc721_transfers_by_address_index(session, sample_addresses["sender"]) + assert len(transfers) == 1 + assert transfers[0].address == hex_str_to_bytes(sample_addresses["sender"]) + assert transfers[0].value is None # ERC721 has no value + assert transfers[0].transfer_type == AddressNftTransferType.SENDER.value + + +def test_get_erc1155_transfers_by_address_index(session, address_nft_transfers, sample_addresses): + """Test querying ERC1155 transfers by address using address index""" + transfers = _get_erc1155_transfers_by_address_index(session, sample_addresses["receiver"]) + assert len(transfers) == 1 + assert transfers[0].address == hex_str_to_bytes(sample_addresses["receiver"]) + assert transfers[0].value is not None # ERC1155 has value + assert transfers[0].transfer_type == AddressNftTransferType.RECEIVER.value + + +def test_get_token_transfers_by_address_using_address_index( + session, address_token_transfers, address_nft_transfers, sample_addresses +): + """Test unified query for token transfers using address index""" + # Test ERC20 transfers + transfers = get_token_transfers_by_address( + session, sample_addresses["sender"], token_type="ERC20", use_address_index=True + ) + assert len(transfers) == 1 + assert isinstance(transfers[0], TokenTransferAbbr) + + # Test ERC721 transfers + transfers = get_token_transfers_by_address( + session, sample_addresses["sender"], token_type="ERC721", use_address_index=True + ) + assert len(transfers) == 1 + assert isinstance(transfers[0], TokenTransferAbbr) + assert transfers[0].value is None + + # Test ERC1155 transfers + transfers = get_token_transfers_by_address( + session, sample_addresses["receiver"], token_type="ERC1155", use_address_index=True + ) + assert len(transfers) == 1 + assert isinstance(transfers[0], TokenTransferAbbr) + assert transfers[0].value is not None + + # Test all token types + transfers = get_token_transfers_by_address( + session, sample_addresses["sender"], token_type="ALL", use_address_index=True + ) + assert len(transfers) == 6 # Should include both ERC20 and NFTs + + +def test_get_token_transfers_by_address_using_native_tables( + session, erc20_token_transfers, erc721_token_transfers, erc1155_token_transfers, sample_addresses +): + """Test unified query for token transfers using native tables""" + # Test filtering by token type + for token_type in ["ERC20", "ERC721", "ERC1155"]: + transfers = get_token_transfers_by_address( + session, sample_addresses["sender"], token_type=token_type, use_address_index=False + ) + assert len(transfers) > 0 + if token_type == "erc20": + assert isinstance(transfers[0], TokenTransferAbbr) + elif token_type == "erc721": + assert isinstance(transfers[0], TokenTransferAbbr) + else: + assert isinstance(transfers[0], TokenTransferAbbr) + + # Test direction filtering + transfers = get_token_transfers_by_address( + session, sample_addresses["sender"], token_type="all", direction="from", use_address_index=False + ) + assert all(tx.from_address == sample_addresses["sender"] for tx in transfers) + + # Test with token address filter + transfers = get_token_transfers_by_address( + session, + sample_addresses["sender"], + token_type="all", + token_address=sample_addresses["token"], + use_address_index=False, + ) + assert all(tx.token_address == sample_addresses["token"] for tx in transfers) + + # Test pagination + all_transfers = get_token_transfers_by_address( + session, sample_addresses["sender"], token_type="all", use_address_index=False + ) + paginated_transfers = get_token_transfers_by_address( + session, sample_addresses["sender"], token_type="all", use_address_index=False, limit=1 + ) + assert len(paginated_transfers) == 3 + assert paginated_transfers[0].transaction_hash == all_transfers[0].transaction_hash + + +def test_empty_result_handling(session): + """Test handling of queries that return no results""" + # Use a random valid address that shouldn't exist in test data + non_existent_address = "0x1111111111111111111111111111111111111111" + + # Test native table queries + transfers = _get_erc20_transfers_by_address_native(session, non_existent_address) + assert isinstance(transfers, list) + assert len(transfers) == 0 + + # Test index table queries + transfers = _get_erc20_transfers_by_address_index(session, non_existent_address) + assert isinstance(transfers, list) + assert len(transfers) == 0 + + # Test unified query + transfers = get_token_transfers_by_address(session, non_existent_address, token_type="all") + assert isinstance(transfers, list) + assert len(transfers) == 0 + + +if __name__ == "__main__": + pytest.main(["-sv", __file__]) diff --git a/tests/hemera/app/api/routes/utils/transaction_test.py b/tests/hemera/app/api/routes/utils/transaction_test.py new file mode 100644 index 000000000..c7c0f32b6 --- /dev/null +++ b/tests/hemera/app/api/routes/utils/transaction_test.py @@ -0,0 +1,196 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2024/12/22 18:01 +# @Author ideal93 +# @File transaction_test.py.py +# @Brief + +import pytest + +from hemera.app.api.routes.helper.transaction import * +from hemera.app.api.routes.helper.transaction import _get_transaction_by_hash +from hemera.common.models.base.transactions import Transactions +from hemera.common.utils.format_utils import hex_str_to_bytes + + +@pytest.fixture +def sample_transactions(session): + now = datetime.utcnow() + transactions = [ + Transactions( + hash=hex_str_to_bytes("0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"), + block_number=21436149, + block_timestamp=now, + from_address=hex_str_to_bytes("0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + to_address=hex_str_to_bytes("0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"), + transaction_index=0, + ), + Transactions( + hash=hex_str_to_bytes("0x2234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"), + block_number=21436149, + block_timestamp=now, + from_address=hex_str_to_bytes("0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + to_address=hex_str_to_bytes("0xcccccccccccccccccccccccccccccccccccccccc"), + transaction_index=1, + ), + Transactions( + hash=hex_str_to_bytes("0x3234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"), + block_number=21436148, + block_timestamp=now - timedelta(minutes=2), + from_address=hex_str_to_bytes("0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"), + to_address=hex_str_to_bytes("0xcccccccccccccccccccccccccccccccccccccccc"), + transaction_index=0, + ), + ] + + for tx in transactions: + session.add(tx) + session.commit() + + return transactions + + +@pytest.fixture +def sample_daily_stats(session): + now = datetime.utcnow() + stats = DailyTransactionsStats(block_date=now.date(), total_cnt=1000, success_cnt=950, failed_cnt=50) + session.add(stats) + session.commit() + return stats + + +def test_get_last_transaction(session, sample_transactions): + tx = get_last_transaction(session) + assert tx is not None + assert tx.block_number == 21436149 + assert tx.transaction_index == 1 + assert tx.hash == hex_str_to_bytes("0x2234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef") + + # Test with specific columns + tx = get_last_transaction(session, columns=["hash", "block_number"]) + assert tx.hash == hex_str_to_bytes("0x2234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef") + assert tx.block_number == 21436149 + with pytest.raises(AttributeError): + _ = tx.block_timestamp + + +def test_get_transaction_by_hash(session, sample_transactions): + tx = _get_transaction_by_hash(session, "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef") + assert tx is not None + assert tx.block_number == 21436149 + assert tx.transaction_index == 0 + + # Test non-existent hash + tx = _get_transaction_by_hash(session, "0xffff") + assert tx is None + + # Test invalid hash + with pytest.raises(ValueError): + _get_transaction_by_hash(session, "invalid_hash") + + # Test with specific columns + tx_block_number = _get_transaction_by_hash( + session, "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef", columns="block_number" + ) + assert tx_block_number == 21436149 + + +def test_get_transactions_by_address(session, sample_transactions): + # Test "from" direction + txs = get_transactions_by_address(session, "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", direction="from") + assert len(txs) == 2 + assert all(tx.from_address == "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" for tx in txs) + + # Test "to" direction + txs = get_transactions_by_address(session, "0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", direction="to") + assert len(txs) == 1 + assert txs[0].to_address == "0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" + + # Test "both" direction + txs = get_transactions_by_address(session, "0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", direction="both") + assert len(txs) == 2 + + # Test with limit and offset + txs = get_transactions_by_address(session, "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", limit=1, offset=1) + assert len(txs) == 1 + assert txs[0].hash == "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef" + + # Test with specific columns + txs = get_transactions_by_address( + session, + "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + ) + + +def test_get_tps_latest_10min(session, sample_transactions): + now = datetime.utcnow() + tps = get_tps_latest_10min(session, now) + assert tps == pytest.approx(0.005, rel=1e-2) # 3 transactions in 600 seconds + + +def test_get_total_transaction_count(session, sample_transactions, sample_daily_stats): + count = get_total_transaction_count(session) + assert count > 1000 # Should be daily stats total + recent transactions + + +def test_get_transactions_by_condition(session, sample_transactions): + # Test without condition + txs = get_transactions_by_condition(session) + assert len(txs) == 3 + assert txs[0].block_number == 21436149 + assert txs[0].transaction_index == 1 + + # Test with condition + txs = get_transactions_by_condition(session, filter_condition=(Transactions.block_number == 21436149)) + assert len(txs) == 2 + assert all(tx.block_number == 21436149 for tx in txs) + + # Test with limit + txs = get_transactions_by_condition(session, limit=2) + assert len(txs) == 2 + + # Test with offset + txs = get_transactions_by_condition(session, offset=1, limit=1) + assert len(txs) == 1 + assert txs[0].hash == hex_str_to_bytes("0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef") + + # Test with specific columns + txs = get_transactions_by_condition(session, columns="block_number") + assert len(txs) == 3 + with pytest.raises(AttributeError): + _ = txs[0].hash + + +def test_get_transactions_count_by_condition(session, sample_transactions): + # Test without condition + count = get_transactions_count_by_condition(session) + assert count == 3 + + # Test with condition + count = get_transactions_count_by_condition(session, filter_condition=(Transactions.block_number == 21436149)) + assert count == 2 + + +def test_transaction_isolation(session, sample_transactions): + with session.begin(): + new_tx = Transactions( + hash=hex_str_to_bytes("0x9934567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"), + block_number=21436150, + block_timestamp=datetime.utcnow(), + from_address=hex_str_to_bytes("0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + to_address=hex_str_to_bytes("0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"), + transaction_index=0, + ) + session.add(new_tx) + + last_tx = get_last_transaction(session) + assert last_tx.block_number == 21436150 + + session.rollback() + + last_tx = get_last_transaction(session) + assert last_tx.block_number == 21436149 + + +if __name__ == "__main__": + pytest.main(["-sv", __file__]) diff --git a/tests/hemera/app/core/__init__.py b/tests/hemera/app/core/__init__.py new file mode 100644 index 000000000..c6d4031a7 --- /dev/null +++ b/tests/hemera/app/core/__init__.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2024/12/19 18:22 +# @Author will +# @File __init__.py.py +# @Brief diff --git a/tests/hemera/app/service/__init__.py b/tests/hemera/app/service/__init__.py new file mode 100644 index 000000000..49ab63b98 --- /dev/null +++ b/tests/hemera/app/service/__init__.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2025/1/31 22:24 +# @Author ideal93 +# @File __init__.py.py +# @Brief diff --git a/tests/hemera/app/service/test_extra_contract_service.py b/tests/hemera/app/service/test_extra_contract_service.py new file mode 100644 index 000000000..0e34e96bf --- /dev/null +++ b/tests/hemera/app/service/test_extra_contract_service.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2025/1/31 22:25 +# @Author ideal93 +# @File test_contract_service.py.py +# @Brief diff --git a/tests/hemera/app/service/test_extra_ens_service.py b/tests/hemera/app/service/test_extra_ens_service.py new file mode 100644 index 000000000..649b3a39b --- /dev/null +++ b/tests/hemera/app/service/test_extra_ens_service.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2025/1/31 22:25 +# @Author ideal93 +# @File test_ens_service.py +# @Brief diff --git a/tests/hemera/indexer/exporters/__init__.py b/tests/hemera/indexer/exporters/__init__.py new file mode 100644 index 000000000..300e5c3e0 --- /dev/null +++ b/tests/hemera/indexer/exporters/__init__.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2025/2/25 16:37 +# @Author ideal93 +# @File __init__.py.py +# @Brief diff --git a/tests/hemera/indexer/exporters/test_postgres_item_exporter.py b/tests/hemera/indexer/exporters/test_postgres_item_exporter.py new file mode 100644 index 000000000..049f296a6 --- /dev/null +++ b/tests/hemera/indexer/exporters/test_postgres_item_exporter.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# @Time 2025/2/25 16:37 +# @Author ideal93 +# @File test_postgres_item_exporter.py.py +# @Brief