Skip to content

Commit d5f1741

Browse files
committed
Merge branch 'main' of github.com:activeloopai/deeplake
2 parents 46cc07b + 0e26444 commit d5f1741

File tree

5 files changed

+10
-35
lines changed

5 files changed

+10
-35
lines changed

cpp/deeplake_pg/extension_init.cpp

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -238,8 +238,8 @@ void initialize_guc_parameters()
238238
"allowing multiple PostgreSQL instances to share the same tables. "
239239
"This adds latency for remote storage (S3, GCS) due to catalog sync operations.",
240240
&pg::stateless_enabled,
241-
false,
242-
PGC_USERSET,
241+
true,
242+
PGC_POSTMASTER,
243243
0,
244244
nullptr,
245245
nullptr,

postgres/tests/py_tests/test_startup_latency.py

Lines changed: 0 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -100,7 +100,6 @@ async def measure_connection_latency(
100100
database: str = "postgres",
101101
with_extension: bool = True,
102102
root_path: Optional[str] = None,
103-
stateless_enabled: bool = False,
104103
run_first_query: bool = True,
105104
create_table: bool = False,
106105
table_name: str = "latency_test",
@@ -113,7 +112,6 @@ async def measure_connection_latency(
113112
database: Database to connect to
114113
with_extension: Whether to load pg_deeplake extension
115114
root_path: If set, configure deeplake.root_path
116-
stateless_enabled: Whether to enable stateless mode
117115
run_first_query: Whether to measure first query time
118116
create_table: Whether to measure table creation time
119117
table_name: Name for test table
@@ -144,10 +142,6 @@ async def measure_connection_latency(
144142
await conn.execute("CREATE EXTENSION pg_deeplake")
145143
metrics.extension_load_time_ms = (time.perf_counter() - ext_start) * 1000
146144

147-
# Set stateless mode if requested
148-
if stateless_enabled:
149-
await conn.execute("SET deeplake.stateless_enabled = true")
150-
151145
# 3. Measure root_path set time (triggers catalog loading in stateless mode)
152146
if root_path:
153147
root_start = time.perf_counter()
@@ -188,7 +182,6 @@ async def measure_catalog_discovery_latency(
188182
port: int,
189183
root_path: str,
190184
num_tables: int,
191-
stateless_enabled: bool = True,
192185
) -> LatencyMetrics:
193186
"""
194187
Measure time to discover existing tables from catalog.
@@ -214,8 +207,6 @@ async def measure_catalog_discovery_latency(
214207
ext_start = time.perf_counter()
215208
await conn.execute("DROP EXTENSION IF EXISTS pg_deeplake CASCADE")
216209
await conn.execute("CREATE EXTENSION pg_deeplake")
217-
if stateless_enabled:
218-
await conn.execute("SET deeplake.stateless_enabled = true")
219210
metrics.extension_load_time_ms = (time.perf_counter() - ext_start) * 1000
220211

221212
# Set root_path - this triggers catalog discovery
@@ -349,7 +340,6 @@ async def test_stateless_catalog_loading_latency(pg_server, temp_root_path):
349340
try:
350341
await setup_conn.execute("DROP EXTENSION IF EXISTS pg_deeplake CASCADE")
351342
await setup_conn.execute("CREATE EXTENSION pg_deeplake")
352-
await setup_conn.execute("SET deeplake.stateless_enabled = true")
353343
await setup_conn.execute(f"SET deeplake.root_path = '{temp_root_path}'")
354344

355345
# Create multiple tables to populate the catalog
@@ -383,7 +373,6 @@ async def test_stateless_catalog_loading_latency(pg_server, temp_root_path):
383373
port=5432,
384374
root_path=temp_root_path,
385375
num_tables=num_tables,
386-
stateless_enabled=True,
387376
)
388377
report.add(metrics)
389378
print(f"Run {i+1}:")
@@ -425,7 +414,6 @@ async def test_stateless_vs_nonstateless_comparison(pg_server, temp_root_path):
425414
metrics = await measure_connection_latency(
426415
with_extension=True,
427416
root_path=temp_root_path,
428-
stateless_enabled=False,
429417
run_first_query=True,
430418
create_table=True,
431419
table_name=f"nonstateless_test_{i}",
@@ -441,7 +429,6 @@ async def test_stateless_vs_nonstateless_comparison(pg_server, temp_root_path):
441429
metrics = await measure_connection_latency(
442430
with_extension=True,
443431
root_path=temp_root_path,
444-
stateless_enabled=True,
445432
run_first_query=True,
446433
create_table=True,
447434
table_name=f"stateless_test_{i}",
@@ -492,7 +479,6 @@ async def test_multi_table_catalog_scaling(pg_server, temp_root_path):
492479
try:
493480
await setup_conn.execute("DROP EXTENSION IF EXISTS pg_deeplake CASCADE")
494481
await setup_conn.execute("CREATE EXTENSION pg_deeplake")
495-
await setup_conn.execute("SET deeplake.stateless_enabled = true")
496482
await setup_conn.execute(f"SET deeplake.root_path = '{temp_root_path}'")
497483

498484
# Create tables
@@ -512,7 +498,6 @@ async def test_multi_table_catalog_scaling(pg_server, temp_root_path):
512498
port=5432,
513499
root_path=temp_root_path,
514500
num_tables=num_tables,
515-
stateless_enabled=True,
516501
)
517502
report.add(metrics)
518503

@@ -593,7 +578,6 @@ async def test_cold_start_simulation(pg_server, temp_root_path):
593578
try:
594579
await setup_conn.execute("DROP EXTENSION IF EXISTS pg_deeplake CASCADE")
595580
await setup_conn.execute("CREATE EXTENSION pg_deeplake")
596-
await setup_conn.execute("SET deeplake.stateless_enabled = true")
597581
await setup_conn.execute(f"SET deeplake.root_path = '{temp_root_path}'")
598582

599583
await setup_conn.execute("""
@@ -633,10 +617,6 @@ async def test_cold_start_simulation(pg_server, temp_root_path):
633617
try:
634618
# Extension is already loaded via shared_preload_libraries
635619
# Just configure the session (simulating a new backend)
636-
ext_start = time.perf_counter()
637-
await conn.execute("SET deeplake.stateless_enabled = true")
638-
ext_time = (time.perf_counter() - ext_start) * 1000
639-
640620
root_start = time.perf_counter()
641621
await conn.execute(f"SET deeplake.root_path = '{temp_root_path}'")
642622
root_time = (time.perf_counter() - root_start) * 1000
@@ -650,7 +630,6 @@ async def test_cold_start_simulation(pg_server, temp_root_path):
650630

651631
print(f"\nRun {run + 1}:")
652632
print(f" Connection: {conn_time:8.2f} ms")
653-
print(f" Session config: {ext_time:8.2f} ms")
654633
print(f" Root path set: {root_time:8.2f} ms")
655634
print(f" First query: {query_time:8.2f} ms (count={count})")
656635
print(f" TOTAL COLD START: {total_time:8.2f} ms")

postgres/tests/py_tests/test_stateless_catalog_resilience.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,8 +28,6 @@ async def test_stateless_bootstrap_permission_error_keeps_backend_alive(db_conn:
2828
- SET deeplake.root_path fails with a PostgreSQL error
2929
- Same connection remains usable afterwards
3030
"""
31-
await db_conn.execute("SET deeplake.stateless_enabled = true")
32-
3331
readonly_root = Path(temp_dir_for_postgres) / "readonly_root"
3432
readonly_root.mkdir(parents=True, exist_ok=True)
3533
os.chmod(readonly_root, 0o555)

postgres/tests/py_tests/test_stateless_multi_instance.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -213,7 +213,6 @@ async def primary_conn(pg_server):
213213
# Setup: Clean extension state
214214
await conn.execute("DROP EXTENSION IF EXISTS pg_deeplake CASCADE")
215215
await conn.execute("CREATE EXTENSION pg_deeplake")
216-
await conn.execute("SET deeplake.stateless_enabled = true")
217216
yield conn
218217
finally:
219218
await conn.close()
@@ -323,7 +322,7 @@ async def test_stateless_data_sync_between_instances(
323322
try:
324323
# Setup extension (create if not exists for session-scoped instance reuse)
325324
await conn_b.execute("CREATE EXTENSION IF NOT EXISTS pg_deeplake")
326-
await conn_b.execute("SET deeplake.stateless_enabled = true")
325+
327326

328327
# Setting root_path should automatically discover and register tables from catalog
329328
await conn_b.execute(f"SET deeplake.root_path = '{shared_root_path}'")
@@ -412,7 +411,7 @@ async def test_stateless_concurrent_writes(
412411
conn_b = await second_instance.connect()
413412
try:
414413
await conn_b.execute("CREATE EXTENSION IF NOT EXISTS pg_deeplake")
415-
await conn_b.execute("SET deeplake.stateless_enabled = true")
414+
416415

417416
# Setting root_path should auto-discover tables from deeplake catalog
418417
await conn_b.execute(f"SET deeplake.root_path = '{shared_root_path}'")
@@ -515,7 +514,7 @@ async def test_stateless_multiple_tables_discovery(
515514
conn_b = await second_instance.connect()
516515
try:
517516
await conn_b.execute("CREATE EXTENSION IF NOT EXISTS pg_deeplake")
518-
await conn_b.execute("SET deeplake.stateless_enabled = true")
517+
519518

520519
# Setting root_path should auto-discover ALL tables from deeplake catalog
521520
await conn_b.execute(f"SET deeplake.root_path = '{shared_root_path}'")
@@ -683,7 +682,7 @@ async def test_stateless_varchar1_catalog_sync(
683682
conn_b = await second_instance.connect()
684683
try:
685684
await conn_b.execute("CREATE EXTENSION IF NOT EXISTS pg_deeplake")
686-
await conn_b.execute("SET deeplake.stateless_enabled = true")
685+
687686
await conn_b.execute(f"SET deeplake.root_path = '{shared_root_path}'")
688687

689688
# Verify table was auto-discovered

postgres/tests/py_tests/test_stateless_reserved_schema.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,6 @@ async def primary_conn(pg_server):
7575
try:
7676
await conn.execute("DROP EXTENSION IF EXISTS pg_deeplake CASCADE")
7777
await conn.execute("CREATE EXTENSION pg_deeplake")
78-
await conn.execute("SET deeplake.stateless_enabled = true")
7978
yield conn
8079
finally:
8180
await conn.close()
@@ -144,7 +143,7 @@ async def test_catalog_sync_default_schema(
144143

145144
try:
146145
await conn_b.execute("CREATE EXTENSION IF NOT EXISTS pg_deeplake")
147-
await conn_b.execute("SET deeplake.stateless_enabled = true")
146+
148147

149148
# This is the critical part - setting root_path triggers catalog sync
150149
# which should properly quote "default" schema name in generated DDL
@@ -236,7 +235,7 @@ async def test_catalog_sync_multiple_reserved_schemas(
236235

237236
try:
238237
await conn_b.execute("CREATE EXTENSION IF NOT EXISTS pg_deeplake")
239-
await conn_b.execute("SET deeplake.stateless_enabled = true")
238+
240239
await conn_b.execute(f"SET deeplake.root_path = '{shared_root_path}'")
241240

242241
# Verify all tables discovered
@@ -322,7 +321,7 @@ async def test_catalog_sync_default_schema_with_indexes(
322321

323322
try:
324323
await conn_b.execute("CREATE EXTENSION IF NOT EXISTS pg_deeplake")
325-
await conn_b.execute("SET deeplake.stateless_enabled = true")
324+
326325
await conn_b.execute(f"SET deeplake.root_path = '{shared_root_path}'")
327326

328327
# Verify table discovered
@@ -378,7 +377,7 @@ async def test_catalog_sync_default_schema_write_from_secondary(
378377

379378
try:
380379
await conn_b.execute("CREATE EXTENSION IF NOT EXISTS pg_deeplake")
381-
await conn_b.execute("SET deeplake.stateless_enabled = true")
380+
382381
await conn_b.execute(f"SET deeplake.root_path = '{shared_root_path}'")
383382

384383
# Insert from Instance B

0 commit comments

Comments
 (0)