Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 6 additions & 6 deletions python/paddle/pir/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,7 @@ def default_startup_program():
Returns type:

Examples:
.. code-block:: python
.. code-block:: pycon

>>> import paddle

Expand Down Expand Up @@ -202,7 +202,7 @@ def default_main_program():
Program: A ``Program`` which holding the descriptions of OPs and tensors in the network.

Examples:
.. code-block:: python
.. code-block:: pycon

>>> import paddle

Expand All @@ -213,7 +213,7 @@ def default_main_program():
>>> out = paddle.add(x, y)

>>> # print the number of blocks in the program, 1 in this case
>>> print(paddle.static.default_main_program().num_blocks) # 1
>>> print(paddle.static.default_main_program().num_blocks) # 1
>>> # print the default_main_program
>>> print(paddle.static.default_main_program())
"""
Expand Down Expand Up @@ -275,7 +275,7 @@ def program_guard(main_program, startup_program=None):
Default: None.

Examples:
.. code-block:: python
.. code-block:: pycon
:name: code-example-1

>>> import paddle
Expand All @@ -291,7 +291,7 @@ def program_guard(main_program, startup_program=None):
to construct either of startup program or main program.

Examples:
.. code-block:: python
.. code-block:: pycon
:name: code-example-2

>>> import paddle
Expand Down Expand Up @@ -556,7 +556,7 @@ def set_state_dict(program, state_dict, scope=None):
None

Examples:
.. code-block:: python
.. code-block:: pycon

>>> import paddle
>>> import paddle.static as static
Expand Down
47 changes: 23 additions & 24 deletions python/paddle/pir/math_op_patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -193,12 +193,12 @@ def cuda(self, device_id=None, blocking=True):
Examples:
In Static Graph Mode:

.. code-block:: python
.. code-block:: pycon

>>> import paddle
>>> paddle.enable_static()

>>> x = paddle.static.data(name="x", shape=[2,2], dtype='float32')
>>> x = paddle.static.data(name="x", shape=[2, 2], dtype='float32')
>>> y = x.cpu()
>>> z = y.cuda()
"""
Expand Down Expand Up @@ -284,7 +284,7 @@ def _ndim(self):
the dimension

Examples:
.. code-block:: python
.. code-block:: pycon

>>> import paddle

Expand All @@ -306,7 +306,7 @@ def ndimension(self):
the dimension

Examples:
.. code-block:: python
.. code-block:: pycon

>>> import paddle

Expand All @@ -328,7 +328,7 @@ def dim(self):
the dimension

Examples:
.. code-block:: python
.. code-block:: pycon

>>> import paddle

Expand Down Expand Up @@ -418,17 +418,16 @@ def astype(self, dtype):
Examples:
In Static Graph Mode:

.. code-block:: python
.. code-block:: pycon

>>> import paddle
>>> paddle.enable_static()
>>> startup_prog = paddle.static.Program()
>>> main_prog = paddle.static.Program()
>>> with paddle.static.program_guard(startup_prog, main_prog):
... original_value = paddle.static.data(name = "new_value", shape=[2,2], dtype='float32')
... original_value = paddle.static.data(name="new_value", shape=[2, 2], dtype='float32')
... new_value = original_value.astype('int64')
... print(f"new value's dtype is: {new_value.dtype}")
...
new Tensor's dtype is: paddle.int64

"""
Expand Down Expand Up @@ -628,7 +627,7 @@ def _T_(self):
If `n` is the dimensions of `x` , `x.T` is equivalent to `x.transpose([n-1, n-2, ..., 0])`.

Examples:
.. code-block:: python
.. code-block:: pycon

>>> import paddle
>>> paddle.enable_static()
Expand Down Expand Up @@ -657,7 +656,7 @@ def _mT_(self):
If `n` is the dimensions of `x` , `x.mT` is equivalent to `x.transpose([0, 1, ..., n-1, n-2])`.

Examples:
.. code-block:: python
.. code-block:: pycon

>>> import paddle
>>> paddle.enable_static()
Expand Down Expand Up @@ -697,7 +696,7 @@ def _new_full_(
By default, the returned Tensor has the same dtype and place as this tensor.

Examples:
.. code-block:: python
.. code-block:: pycon

>>> import paddle
>>> paddle.enable_static()
Expand Down Expand Up @@ -744,7 +743,7 @@ def _new_empty_(
By default, the returned Tensor has the same dtype and place as this tensor.

Examples:
.. code-block:: python
.. code-block:: pycon

>>> import paddle
>>> paddle.enable_static()
Expand Down Expand Up @@ -790,7 +789,7 @@ def _new_ones_(
By default, the returned Tensor has the same dtype and place as this tensor.

Examples:
.. code-block:: python
.. code-block:: pycon

>>> import paddle
>>> paddle.enable_static()
Expand Down Expand Up @@ -837,7 +836,7 @@ def _new_zeros_(
By default, the returned Tensor has the same dtype and place as this tensor.

Examples:
.. code-block:: python
.. code-block:: pycon

>>> import paddle
>>> paddle.enable_static()
Expand Down Expand Up @@ -984,7 +983,7 @@ def clone(self):
Tensor, The cloned Tensor.

Examples:
.. code-block:: python
.. code-block:: pycon

>>> import paddle

Expand All @@ -1011,7 +1010,7 @@ def clear_gradient(self):
Returns: None

Examples:
.. code-block:: python
.. code-block:: pycon

>>> import paddle
>>> import numpy as np
Expand Down Expand Up @@ -1191,10 +1190,10 @@ def to(self, *args, **kwargs):
Tensor: self

Examples:
.. code-block:: python
.. code-block:: pycon

>>> import paddle
>>> x = paddle.to_tensor([1,2,3])
>>> x = paddle.to_tensor([1, 2, 3])
>>> print(x)
Tensor(shape=[3], dtype=int64, place=Place(gpu:0), stop_gradient=True,
[1, 2, 3])
Expand All @@ -1211,7 +1210,7 @@ def to(self, *args, **kwargs):
>>> print(x)
Tensor(shape=[3], dtype=int16, place=Place(gpu:0), stop_gradient=True,
[1, 2, 3])
>>> y = paddle.to_tensor([4,5,6])
>>> y = paddle.to_tensor([4, 5, 6])
>>> y
Tensor(shape=[3], dtype=int64, place=Place(gpu:0), stop_gradient=True,
[4, 5, 6])
Expand Down Expand Up @@ -1373,7 +1372,7 @@ def numpy(self):
Returns type:
ndarray: dtype is same as current Variable
Examples:
.. code-block:: python
.. code-block:: pycon

>>> import paddle
>>> import paddle.base as base
Expand Down Expand Up @@ -1402,7 +1401,7 @@ def tolist(self):
list: Elements have the same dtype as current Variable

Examples:
.. code-block:: python
.. code-block:: pycon

>>> import paddle
>>> import paddle.base as base
Expand Down Expand Up @@ -1432,7 +1431,7 @@ def requires_grad(self) -> bool:
Setting requires_grad=True is equivalent to setting stop_gradient=False.

Examples:
.. code-block:: python
.. code-block:: pycon

>>> import paddle
>>> x = paddle.randn([2, 3])
Expand Down Expand Up @@ -1477,10 +1476,10 @@ def itemsize(self) -> int:
Returns the number of bytes allocated on the machine for a single element of the Tensor.

Examples:
.. code-block:: python
.. code-block:: pycon

>>> import paddle
>>> x = paddle.randn((2,3),dtype=paddle.float64)
>>> x = paddle.randn((2, 3), dtype=paddle.float64)
>>> x.itemsize
8
"""
Expand Down
10 changes: 6 additions & 4 deletions python/paddle/reader/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,22 +43,24 @@

An example implementation for single item data reader creator:

.. code-block:: python
.. code-block:: pycon

>>> def reader_creator_random_image(width, height):
... def reader():
... while True:
... yield numpy.random.uniform(-1, 1, size=width*height)
... yield numpy.random.uniform(-1, 1, size=width * height)
...
... return reader

An example implementation for multiple item data reader creator:

.. code-block:: python
.. code-block:: pycon

>>> def reader_creator_random_image_and_label(width, height, label):
... def reader():
... while True:
... yield numpy.random.uniform(-1, 1, size=width*height), label
... yield numpy.random.uniform(-1, 1, size=width * height), label
...
... return reader

"""
Expand Down
Loading
Loading