Skip to content

Commit 5110ee9

Browse files
committed
Add more memory tests, but skip them in the main test flow
1 parent 38c5ebf commit 5110ee9

File tree

1 file changed

+89
-23
lines changed

1 file changed

+89
-23
lines changed

test/test_xarray_io.py

Lines changed: 89 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -14,34 +14,34 @@
1414
import flopy4.xarray_jinja.filters as filters
1515

1616
test_combinations = [
17-
# (1_000, 100),
18-
# (1_000_000, 1_000),
17+
(1_000, 100),
18+
(1_000_000, 1_000),
1919
(1_000_000, 10_000),
2020
# (10_000_000, 1_000),
21-
(10_000_000, 10_000),
21+
# (10_000_000, 10_000),
2222
# (100_000_000, 10_000),
23-
(100_000_000, 100_000),
24-
(100_000_000, 1_000_000), # 1_000_000 is about 8MB of chunks.
25-
(
26-
100_000_000,
27-
10_000_000,
28-
), # 10_000_000 is about 80MB of chunks. Copilot advised 100MB.
23+
# (100_000_000, 100_000),
24+
# (100_000_000, 1_000_000), # 1_000_000 is about 8MB of chunks.
25+
# (
26+
# 1_000_000_000,
27+
# 10_000_000,
28+
# ), # 10_000_000 is about 80MB of chunks. Copilot advised 100MB.
2929
]
3030

3131

3232
@pytest.fixture(scope="module")
3333
def memory_file():
3434
with open("memory.md", "w") as f:
35-
f.write("| test | args | memory (MB) | \n")
36-
f.write("| --- | --- | --- | \n")
35+
f.write("| test | args | memory (MB) |\n")
36+
f.write("| --- | --- | --- |\n")
3737
yield f
3838

3939

4040
@pytest.fixture(scope="module")
4141
def time_file():
4242
with open("times.md", "w") as f:
43-
f.write("| test | args | time (s) | \n")
44-
f.write("| --- | --- | --- | \n")
43+
f.write("| test | args | time (s) |\n")
44+
f.write("| --- | --- | --- |\n")
4545
yield f
4646

4747

@@ -67,16 +67,17 @@ def mem_check_function(
6767
) -> None:
6868
tracemalloc.start()
6969
func(*args)
70-
snapshot = tracemalloc.take_snapshot()
70+
size, peak = tracemalloc.get_traced_memory()
7171
tracemalloc.stop()
7272

73-
stats_formatted = "".join(
74-
[f"<li>{s}</li>" for s in snapshot.statistics("lineno")[:10]]
75-
)
7673
name = getattr(func, "__name__", "unknown")
77-
memory_file.write(
78-
f"| {name} | {print_args} | <ol>{stats_formatted}</ol> |\n"
79-
)
74+
# stats_formatted = "".join(
75+
# [f"<li>{s}</li>" for s in snapshot.statistics("lineno")[:10]]
76+
# )
77+
# memory_file.write(
78+
# f"| {name} | {print_args} | <ol>{stats_formatted}</ol> |\n"
79+
# )
80+
memory_file.write(f"| {name} | {print_args} | {peak / 10**6} MB |\n")
8081

8182

8283
def create_and_write_jinja(file_path, data: xr.DataArray):
@@ -91,13 +92,14 @@ def create_and_write_jinja(file_path, data: xr.DataArray):
9192
generator = env.get_template("disu_template.disu.jinja").generate(
9293
data=data
9394
)
94-
with np.printoptions(precision=4, linewidth=sys.maxsize):
95+
with np.printoptions(
96+
precision=4, linewidth=sys.maxsize, threshold=sys.maxsize
97+
):
9598
with open(file_path, "w") as f:
9699
f.writelines(generator)
97100

98101

99102
@pytest.mark.parametrize("max_size,chunks", test_combinations)
100-
@pytest.mark.skip("Too slow for large data")
101103
@pytest.mark.timing
102104
def test_xarray_to_text_jinja(tmp_path, max_size, chunks, time_file):
103105
data = xr.DataArray(da.arange(0, max_size, 1), dims="x")
@@ -161,7 +163,6 @@ def create_and_write_np_savetxt(file_path, data: xr.DataArray):
161163

162164

163165
@pytest.mark.parametrize("max_size,chunks", test_combinations)
164-
@pytest.mark.skip("Too slow for large data")
165166
@pytest.mark.timing
166167
def test_xarray_to_text_np_savetxt(tmp_path, max_size, chunks, time_file):
167168
data = xr.DataArray(da.arange(0, max_size, 1), dims="x")
@@ -221,6 +222,7 @@ def test_xarray_to_text_extras(tmp_path, max_size, chunks, time_file):
221222

222223
@pytest.mark.parametrize("max_size,chunks", test_combinations)
223224
@pytest.mark.memory
225+
@pytest.mark.skip("Memory tests take a long time to run")
224226
def test_xarray_to_text_extras_mem(tmp_path, max_size, chunks, memory_file):
225227
data = xr.DataArray(da.arange(0, max_size, 1), dims="x")
226228
data = data.chunk(chunks)
@@ -236,3 +238,67 @@ def test_xarray_to_text_extras_mem(tmp_path, max_size, chunks, memory_file):
236238
with open(file_path, "r") as f:
237239
output = f.readlines()
238240
assert len(output) == 3
241+
242+
243+
@pytest.mark.parametrize("max_size,chunks", test_combinations)
244+
@pytest.mark.memory
245+
@pytest.mark.skip("Memory tests take a long time to run")
246+
def test_xarray_to_text_pandas_mem(tmp_path, max_size, chunks, memory_file):
247+
data = xr.DataArray(da.arange(0, max_size, 1), dims="x")
248+
data = data.chunk(chunks)
249+
file_path = tmp_path / "test_xarray_to_text_pandas.disu"
250+
251+
mem_check_function(
252+
create_and_write_pandas,
253+
(file_path, data),
254+
memory_file,
255+
print_args={"max_size": max_size, "chunks": chunks},
256+
)
257+
258+
with open(file_path, "r") as f:
259+
output = f.readlines()
260+
assert len(output) == 3
261+
262+
263+
@pytest.mark.parametrize("max_size,chunks", test_combinations)
264+
@pytest.mark.memory
265+
@pytest.mark.skip("Memory tests take a long time to run")
266+
def test_xarray_to_text_np_savetext_mem(
267+
tmp_path, max_size, chunks, memory_file
268+
):
269+
data = xr.DataArray(da.arange(0, max_size, 1), dims="x")
270+
data = data.chunk(chunks)
271+
file_path = tmp_path / "test_xarray_to_text_np_savetext.disu"
272+
273+
mem_check_function(
274+
create_and_write_np_savetxt,
275+
(file_path, data),
276+
memory_file,
277+
print_args={"max_size": max_size, "chunks": chunks},
278+
)
279+
280+
with open(file_path, "r") as f:
281+
output = f.readlines()
282+
assert len(output) == 3
283+
284+
285+
@pytest.mark.parametrize("max_size,chunks", test_combinations)
286+
@pytest.mark.memory
287+
@pytest.mark.skip("Memory tests take a long time to run")
288+
def test_xarray_to_text_jinja_mem(tmp_path, max_size, chunks, memory_file):
289+
data = xr.DataArray(da.arange(0, max_size, 1), dims="x")
290+
data = data.chunk(chunks)
291+
file_path = tmp_path / "test_xarray_to_text_jinja.disu"
292+
293+
mem_check_function(
294+
create_and_write_jinja,
295+
(file_path, data),
296+
memory_file,
297+
print_args={"max_size": max_size, "chunks": chunks},
298+
)
299+
300+
with open(file_path, "r") as f:
301+
output = f.readlines()
302+
assert (
303+
len(output) == 2 + max_size / chunks
304+
) # begin + end + lines of data

0 commit comments

Comments
 (0)