Skip to content

Commit a98bb87

Browse files
committed
Merge branch 'devel' into fix/3269-fixes-athena-refresh-mode
2 parents 3544617 + 4d25a6c commit a98bb87

File tree

21 files changed

+609
-197
lines changed

21 files changed

+609
-197
lines changed

.github/workflows/main.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ name: dlt
22

33
on:
44
pull_request:
5-
branches: [ master, devel ]
5+
branches: [ master, devel, runtime ]
66

77
concurrency:
88
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}

.github/workflows/test_tools_dashboard.yml

Lines changed: 2 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -96,7 +96,7 @@ jobs:
9696
if: matrix.python-version != '3.14.0-beta.4'
9797

9898
- name: Install playwright & deps
99-
run: playwright install && playwright install-deps
99+
run: playwright install chromium && playwright install-deps
100100
if: matrix.python-version != '3.14.0-beta.4'
101101

102102
# Run workspace dashboard unit tests
@@ -107,16 +107,8 @@ jobs:
107107
# Run workspace dashboard e2e tests (does not pass with python 3.9
108108
- name: Run dashboard e2e
109109
run: |
110-
marimo run --headless dlt/_workspace/helpers/dashboard/dlt_dashboard.py -- -- --pipelines-dir _storage/.dlt/pipelines/ --with_test_identifiers true & pytest --browser chromium tests/e2e
111-
if: matrix.python-version != '3.9' && matrix.python-version != '3.14.0-beta.4' && matrix.os != 'windows-latest'
112-
113-
# note that this test will pass only when running from cmd shell (_storage\.dlt\pipelines\ must stay)
114-
- name: Run dashboard e2e windows
115-
run: |
116-
start marimo run --headless dlt/_workspace/helpers/dashboard/dlt_dashboard.py -- -- --pipelines-dir _storage\.dlt\pipelines\ --with_test_identifiers true
117-
timeout /t 6 /nobreak
118110
pytest --browser chromium tests/e2e
119-
if: matrix.python-version != '3.9' && matrix.python-version != '3.14.0-beta.4' && matrix.os == 'windows-latest'
111+
if: matrix.python-version != '3.9' && matrix.python-version != '3.14.0-beta.4'
120112

121113
matrix_job_required_check:
122114
name: common | common tests

Makefile

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -173,9 +173,6 @@ test-e2e-dashboard:
173173
test-e2e-dashboard-headed:
174174
uv run pytest --headed --browser chromium tests/e2e
175175

176-
start-dlt-dashboard-e2e:
177-
uv run marimo run --headless dlt/_workspace/helpers/dashboard/dlt_dashboard.py -- -- --pipelines-dir _storage/.dlt/pipelines --with_test_identifiers true
178-
179176
# creates the dashboard test pipelines globally for manual testing of the dashboard app and cli
180177
create-test-pipelines:
181178
uv run python tests/workspace/helpers/dashboard/example_pipelines.py
Lines changed: 21 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -1,46 +1,40 @@
1-
"""A template that is a good start for vibe coding REST API Source. Works best with `dlt ai` command cursor rules"""
1+
"""Template for building a `dlt` pipeline to ingest data from a REST API."""
22

33
import dlt
4-
from dlt.sources.rest_api import (
5-
RESTAPIConfig,
6-
rest_api_resources,
7-
)
4+
from dlt.sources.rest_api import rest_api_resources
5+
from dlt.sources.rest_api.typing import RESTAPIConfig
86

97

8+
# if no argument is provided, `access_token` is read from `.dlt/secrets.toml`
109
@dlt.source
11-
def source(access_token=dlt.secrets.value):
10+
def rest_api_source(access_token: str = dlt.secrets.value):
11+
"""Define dlt resources from REST API endpoints."""
1212
config: RESTAPIConfig = {
1313
"client": {
14-
# TODO: place valid base url here
14+
# TODO set base URL for the REST API
1515
"base_url": "https://example.com/v1/",
16-
# TODO: configure the right auth or remove if api does not need authentication
17-
# NOTE: pass secrets and other configuration in source function signature
18-
"auth": {
19-
"type": "bearer",
20-
"token": access_token,
21-
},
16+
# TODO configure the right authentication method or remove
17+
"auth": {"type": "bearer", "token": access_token},
2218
},
2319
"resources": [
24-
# TODO: add resource definitions here
20+
# TODO define resources per endpoint
2521
],
22+
# set `resource_defaults` to apply configuration to all endpoints
2623
}
2724

2825
yield from rest_api_resources(config)
2926

3027

31-
def get_data() -> None:
32-
pipeline = dlt.pipeline(
33-
pipeline_name="rest_api_github",
34-
destination="duckdb",
35-
dataset_name="rest_api_data",
36-
)
37-
38-
# TODO: during debugging feel free to pass access token explicitly
39-
# NOTE: use `secrets.toml` or env variables to pass configuration in production
40-
access_token = "my_access_token"
41-
load_info = pipeline.run(source(access_token))
42-
print(load_info) # noqa
28+
pipeline = dlt.pipeline(
29+
pipeline_name="rest_api_ingest",
30+
destination="duckdb",
31+
# `refresh="drop_sources"` ensures the data and the state is cleaned
32+
# on each `pipeline.run()`; remove the argument once you have a
33+
# working pipeline.
34+
refresh="drop_sources",
35+
)
4336

4437

4538
if __name__ == "__main__":
46-
get_data()
39+
load_info = pipeline.run(rest_api_source())
40+
print(load_info) # noqa: T201

dlt/_workspace/cli/_init_command.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -340,7 +340,7 @@ def init_pipeline_at_destination(
340340
transformed_nodes = source_detection.find_call_arguments_to_replace(
341341
visitor,
342342
[
343-
("destination", destination_type or "duckdb"),
343+
("destination", destination_type or "duckdb", True),
344344
],
345345
source_configuration.src_pipeline_script,
346346
)
@@ -372,9 +372,9 @@ def init_pipeline_at_destination(
372372
transformed_nodes = source_detection.find_call_arguments_to_replace(
373373
visitor,
374374
[
375-
("destination", destination_type),
376-
("pipeline_name", display_source_name + "_pipeline"),
377-
("dataset_name", display_source_name + "_data"),
375+
("destination", destination_type, True),
376+
("pipeline_name", display_source_name + "_pipeline", True),
377+
("dataset_name", display_source_name + "_data", False),
378378
],
379379
source_configuration.src_pipeline_script,
380380
)

dlt/_workspace/cli/source_detection.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,9 @@
1414

1515

1616
def find_call_arguments_to_replace(
17-
visitor: PipelineScriptVisitor, replace_nodes: List[Tuple[str, str]], init_script_name: str
17+
visitor: PipelineScriptVisitor,
18+
replace_nodes: List[Tuple[str, str, bool]],
19+
init_script_name: str,
1820
) -> List[Tuple[ast.AST, ast.AST]]:
1921
# the input tuple (call argument name, replacement value)
2022
# the returned tuple (node, replacement value, node type)
@@ -23,7 +25,7 @@ def find_call_arguments_to_replace(
2325
known_calls: Dict[str, List[inspect.BoundArguments]] = visitor.known_calls
2426
for arg_name, calls in known_calls.items():
2527
for args in calls:
26-
for t_arg_name, t_value in replace_nodes:
28+
for t_arg_name, t_value, _ in replace_nodes:
2729
dn_node: ast.AST = args.arguments.get(t_arg_name)
2830
if dn_node is not None:
2931
if not isinstance(dn_node, ast.Constant) or not isinstance(dn_node.value, str):
@@ -36,8 +38,8 @@ def find_call_arguments_to_replace(
3638
replaced_args.add(t_arg_name)
3739

3840
# there was at least one replacement
39-
for t_arg_name, _ in replace_nodes:
40-
if t_arg_name not in replaced_args:
41+
for t_arg_name, _, required in replace_nodes:
42+
if t_arg_name not in replaced_args and required:
4143
raise CliCommandInnerException(
4244
"init",
4345
f"The pipeline script {init_script_name} is not explicitly passing the"

0 commit comments

Comments
 (0)