diff --git a/backend/utils/performance_data_source.py b/backend/utils/performance_data_source.py index 30e7bfd8..ca108370 100755 --- a/backend/utils/performance_data_source.py +++ b/backend/utils/performance_data_source.py @@ -3,6 +3,7 @@ import numpy as np import pandas as pd + from hummingbot.core.data_type.common import TradeType from hummingbot.strategy_v2.models.base import RunnableStatus from hummingbot.strategy_v2.models.executors import CloseType @@ -36,7 +37,7 @@ def load_trade_fill(self): def load_controllers(self): controllers = self.checkpoint_data["controllers"].copy() controllers = pd.DataFrame(controllers) - controllers["config"] = controllers["config"].apply(lambda x: json.loads(x)) + controllers["config"] = controllers["config"].apply(lambda x: json.loads(x) if isinstance(x, str) else x) controllers["datetime"] = pd.to_datetime(controllers.timestamp, unit="s") return controllers @@ -105,6 +106,8 @@ def get_executors_with_orders(executors_df: pd.DataFrame, orders: pd.DataFrame): df = (executors_df[["id", "order_ids"]] .rename(columns={"id": "executor_id", "order_ids": "order_id"}) .explode("order_id")) + if "id" in orders.columns: + orders.rename(columns={"id": "client_order_id"}, inplace=True) exec_with_orders = df.merge(orders, left_on="order_id", right_on="client_order_id", how="inner") exec_with_orders = exec_with_orders[exec_with_orders["last_status"].isin(["SellOrderCompleted", "BuyOrderCompleted"])] diff --git a/frontend/pages/performance/bot_performance/app.py b/frontend/pages/performance/bot_performance/app.py index 1a99ed99..f5a9f47d 100755 --- a/frontend/pages/performance/bot_performance/app.py +++ b/frontend/pages/performance/bot_performance/app.py @@ -10,7 +10,7 @@ display_performance_summary_table, display_tables_section, ) -from frontend.visualization.performance_etl import display_etl_section +from frontend.visualization.performance_etl import display_postgres_etl_section, display_sqlite_etl_section async def main(): @@ -19,7 +19,14 @@ async def main(): backend_api = get_backend_api_client() st.subheader("🔫 DATA SOURCE") - checkpoint_data = display_etl_section(backend_api) + selected_db = st.selectbox("Select DB connector", ["PostgreSQL", "SQLite"]) + if selected_db == "SQLite": + checkpoint_data = display_sqlite_etl_section(backend_api) + else: + checkpoint_data = await display_postgres_etl_section() + if checkpoint_data is None: + st.warning("Unable to retrieve data. Ensure the PostgreSQL database is accessible and contains relevant information.") + st.stop() data_source = PerformanceDataSource(checkpoint_data) st.divider() diff --git a/frontend/visualization/performance_etl.py b/frontend/visualization/performance_etl.py index 659267e0..d61561f6 100644 --- a/frontend/visualization/performance_etl.py +++ b/frontend/visualization/performance_etl.py @@ -1,11 +1,56 @@ import json +import os +import pandas as pd import streamlit as st +from dotenv import load_dotenv +from sqlalchemy import create_engine from backend.services.backend_api_client import BackendAPIClient -def display_etl_section(backend_api: BackendAPIClient): +async def display_postgres_etl_section(): + load_dotenv() + col1, col2, col3, col4, col5 = st.columns(5) + + with col1: + username = st.text_input("Username", os.getenv("POSTGRES_USERNAME", "postgres")) + with col2: + password = st.text_input("Password", os.getenv("POSTGRES_PASSWORD", "postgres"), type="password") + with col3: + host = st.text_input("Host", os.getenv("POSTGRES_HOST", "localhost")) + with col4: + port = st.number_input("Port", os.getenv("POSTGRES_PORT", 5432)) + with col5: + database = st.text_input("Database", os.getenv("POSTGRES_DATABASE", "postgres")) + if st.button("Fetch data"): + db_url = f"postgresql://{username}:{password}@{host}:{port}/{database}" + try: + engine = create_engine( + db_url, + pool_size=5, + max_overflow=10, + pool_timeout=30, + pool_recycle=1800, + connect_args={"connect_timeout": 10} + ) + with engine.connect() as connection: + raw_connection = connection.connection + checkpoint_data = { + "executors": pd.read_sql_query("SELECT * FROM \"Executors\"", raw_connection).to_dict('records'), + "orders": pd.read_sql_query("SELECT * FROM \"Order\"", raw_connection).to_dict('records'), + "trade_fill": pd.read_sql_query("SELECT * FROM \"TradeFill\"", raw_connection).to_dict('records'), + "controllers": pd.read_sql_query("SELECT * FROM \"Controllers\"", raw_connection).to_dict('records') + } + return checkpoint_data + except Exception as e: + print(f"Error fetching data: {e}") + return None + else: + st.stop() + + +def display_sqlite_etl_section(backend_api: BackendAPIClient): db_paths = backend_api.list_databases() dbs_dict = backend_api.read_databases(db_paths) healthy_dbs = [db["db_path"].replace("sqlite:///", "") for db in dbs_dict if db["healthy"]]