Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion backend/utils/performance_data_source.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

import numpy as np
import pandas as pd

from hummingbot.core.data_type.common import TradeType
from hummingbot.strategy_v2.models.base import RunnableStatus
from hummingbot.strategy_v2.models.executors import CloseType
Expand Down Expand Up @@ -36,7 +37,7 @@ def load_trade_fill(self):
def load_controllers(self):
controllers = self.checkpoint_data["controllers"].copy()
controllers = pd.DataFrame(controllers)
controllers["config"] = controllers["config"].apply(lambda x: json.loads(x))
controllers["config"] = controllers["config"].apply(lambda x: json.loads(x) if isinstance(x, str) else x)
controllers["datetime"] = pd.to_datetime(controllers.timestamp, unit="s")
return controllers

Expand Down Expand Up @@ -105,6 +106,8 @@ def get_executors_with_orders(executors_df: pd.DataFrame, orders: pd.DataFrame):
df = (executors_df[["id", "order_ids"]]
.rename(columns={"id": "executor_id", "order_ids": "order_id"})
.explode("order_id"))
if "id" in orders.columns:
orders.rename(columns={"id": "client_order_id"}, inplace=True)
exec_with_orders = df.merge(orders, left_on="order_id", right_on="client_order_id", how="inner")
exec_with_orders = exec_with_orders[exec_with_orders["last_status"].isin(["SellOrderCompleted",
"BuyOrderCompleted"])]
Expand Down
11 changes: 9 additions & 2 deletions frontend/pages/performance/bot_performance/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
display_performance_summary_table,
display_tables_section,
)
from frontend.visualization.performance_etl import display_etl_section
from frontend.visualization.performance_etl import display_postgres_etl_section, display_sqlite_etl_section


async def main():
Expand All @@ -19,7 +19,14 @@ async def main():
backend_api = get_backend_api_client()

st.subheader("🔫 DATA SOURCE")
checkpoint_data = display_etl_section(backend_api)
selected_db = st.selectbox("Select DB connector", ["PostgreSQL", "SQLite"])
if selected_db == "SQLite":
checkpoint_data = display_sqlite_etl_section(backend_api)
else:
checkpoint_data = await display_postgres_etl_section()
if checkpoint_data is None:
st.warning("Unable to retrieve data. Ensure the PostgreSQL database is accessible and contains relevant information.")
st.stop()
data_source = PerformanceDataSource(checkpoint_data)
st.divider()

Expand Down
47 changes: 46 additions & 1 deletion frontend/visualization/performance_etl.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,56 @@
import json
import os

import pandas as pd
import streamlit as st
from dotenv import load_dotenv
from sqlalchemy import create_engine

from backend.services.backend_api_client import BackendAPIClient


def display_etl_section(backend_api: BackendAPIClient):
async def display_postgres_etl_section():
load_dotenv()
col1, col2, col3, col4, col5 = st.columns(5)

with col1:
username = st.text_input("Username", os.getenv("POSTGRES_USERNAME", "postgres"))
with col2:
password = st.text_input("Password", os.getenv("POSTGRES_PASSWORD", "postgres"), type="password")
with col3:
host = st.text_input("Host", os.getenv("POSTGRES_HOST", "localhost"))
with col4:
port = st.number_input("Port", os.getenv("POSTGRES_PORT", 5432))
with col5:
database = st.text_input("Database", os.getenv("POSTGRES_DATABASE", "postgres"))
if st.button("Fetch data"):
db_url = f"postgresql://{username}:{password}@{host}:{port}/{database}"
try:
engine = create_engine(
db_url,
pool_size=5,
max_overflow=10,
pool_timeout=30,
pool_recycle=1800,
connect_args={"connect_timeout": 10}
)
with engine.connect() as connection:
raw_connection = connection.connection
checkpoint_data = {
"executors": pd.read_sql_query("SELECT * FROM \"Executors\"", raw_connection).to_dict('records'),
"orders": pd.read_sql_query("SELECT * FROM \"Order\"", raw_connection).to_dict('records'),
"trade_fill": pd.read_sql_query("SELECT * FROM \"TradeFill\"", raw_connection).to_dict('records'),
"controllers": pd.read_sql_query("SELECT * FROM \"Controllers\"", raw_connection).to_dict('records')
}
return checkpoint_data
except Exception as e:
print(f"Error fetching data: {e}")
return None
else:
st.stop()


def display_sqlite_etl_section(backend_api: BackendAPIClient):
db_paths = backend_api.list_databases()
dbs_dict = backend_api.read_databases(db_paths)
healthy_dbs = [db["db_path"].replace("sqlite:///", "") for db in dbs_dict if db["healthy"]]
Expand Down