diff --git a/heat/core/printing.py b/heat/core/printing.py index 660c333e39..49380470dc 100644 --- a/heat/core/printing.py +++ b/heat/core/printing.py @@ -210,7 +210,8 @@ def __repr__(dndarray) -> str: Returns a printable representation of the passed DNDarray. Unlike the __str__ method, which prints a representation targeted at users, this method targets developers by showing key internal parameters of the DNDarray. """ - return f"" + tensor_string = torch._tensor_str._tensor_str(dndarray.larray, __INDENT + 1) + return f"DNDarray(MPI-rank: {dndarray.comm.rank}, Shape: {dndarray.shape}, Split: {dndarray.split}, Local Shape: {dndarray.lshape}, Device: {dndarray.device}, Dtype: {dndarray.dtype.__name__}, Data:\n{' ' * __INDENT} {tensor_string})" def _torch_data(dndarray, summarize) -> DNDarray: diff --git a/heat/core/tests/test_printing.py b/heat/core/tests/test_printing.py index fd6e382e2a..9a363d0402 100644 --- a/heat/core/tests/test_printing.py +++ b/heat/core/tests/test_printing.py @@ -431,14 +431,36 @@ def test_split_2_above_threshold(self): self.assertEqual(comparison, __str) def test___repr__(self): - a = ht.array([1, 2, 3, 4]) + a = ht.array([1, 2, 3, 4], split=0) r = a.__repr__() + expect_meta = f"DNDarray(MPI-rank: {a.comm.rank}, Shape: {a.shape}, Split: {a.split}, Local Shape: {a.lshape}, Device: {a.device}, Dtype: {a.dtype.__name__}, Data:" + self.assertEqual(r[:r.index('\n')], expect_meta) + + if ht.comm.size == 1: + loc_data_str = '1, 2, 3, 4' + elif ht.comm.size == 2: + loc_data_str = f'{ht.comm.rank*2+1}, {ht.comm.rank*2+2}' + elif ht.comm.size == 3: + if ht.comm.rank == 0: + loc_data_str = '1, 2' + else: + loc_data_str = f'{ht.comm.rank + 2}' + else: + if ht.comm.rank < 4: + loc_data_str = f'{ht.comm.rank + 1}' + else: + loc_data_str = '' + + expect = f'{expect_meta}\n [{loc_data_str}])' + self.assertEqual( r, - f"", + expect, ) + + class TestPrintingGPU(TestCase): def test_print_GPU(self): # this test case also includes GPU now, checking the output is not done; only test whether the routine itself works...