Skip to content

Commit 65a72ef

Browse files
authored
[CodeStyle] Change python to pycon - part6 (#77608)
1 parent 711589a commit 65a72ef

File tree

4 files changed

+50
-51
lines changed

4 files changed

+50
-51
lines changed

python/paddle/pir/core.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -171,7 +171,7 @@ def default_startup_program():
171171
Returns type:
172172
173173
Examples:
174-
.. code-block:: python
174+
.. code-block:: pycon
175175
176176
>>> import paddle
177177
@@ -202,7 +202,7 @@ def default_main_program():
202202
Program: A ``Program`` which holding the descriptions of OPs and tensors in the network.
203203
204204
Examples:
205-
.. code-block:: python
205+
.. code-block:: pycon
206206
207207
>>> import paddle
208208
@@ -213,7 +213,7 @@ def default_main_program():
213213
>>> out = paddle.add(x, y)
214214
215215
>>> # print the number of blocks in the program, 1 in this case
216-
>>> print(paddle.static.default_main_program().num_blocks) # 1
216+
>>> print(paddle.static.default_main_program().num_blocks) # 1
217217
>>> # print the default_main_program
218218
>>> print(paddle.static.default_main_program())
219219
"""
@@ -275,7 +275,7 @@ def program_guard(main_program, startup_program=None):
275275
Default: None.
276276
277277
Examples:
278-
.. code-block:: python
278+
.. code-block:: pycon
279279
:name: code-example-1
280280
281281
>>> import paddle
@@ -291,7 +291,7 @@ def program_guard(main_program, startup_program=None):
291291
to construct either of startup program or main program.
292292
293293
Examples:
294-
.. code-block:: python
294+
.. code-block:: pycon
295295
:name: code-example-2
296296
297297
>>> import paddle
@@ -556,7 +556,7 @@ def set_state_dict(program, state_dict, scope=None):
556556
None
557557
558558
Examples:
559-
.. code-block:: python
559+
.. code-block:: pycon
560560
561561
>>> import paddle
562562
>>> import paddle.static as static

python/paddle/pir/math_op_patch.py

Lines changed: 23 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -193,12 +193,12 @@ def cuda(self, device_id=None, blocking=True):
193193
Examples:
194194
In Static Graph Mode:
195195
196-
.. code-block:: python
196+
.. code-block:: pycon
197197
198198
>>> import paddle
199199
>>> paddle.enable_static()
200200
201-
>>> x = paddle.static.data(name="x", shape=[2,2], dtype='float32')
201+
>>> x = paddle.static.data(name="x", shape=[2, 2], dtype='float32')
202202
>>> y = x.cpu()
203203
>>> z = y.cuda()
204204
"""
@@ -284,7 +284,7 @@ def _ndim(self):
284284
the dimension
285285
286286
Examples:
287-
.. code-block:: python
287+
.. code-block:: pycon
288288
289289
>>> import paddle
290290
@@ -306,7 +306,7 @@ def ndimension(self):
306306
the dimension
307307
308308
Examples:
309-
.. code-block:: python
309+
.. code-block:: pycon
310310
311311
>>> import paddle
312312
@@ -328,7 +328,7 @@ def dim(self):
328328
the dimension
329329
330330
Examples:
331-
.. code-block:: python
331+
.. code-block:: pycon
332332
333333
>>> import paddle
334334
@@ -418,17 +418,16 @@ def astype(self, dtype):
418418
Examples:
419419
In Static Graph Mode:
420420
421-
.. code-block:: python
421+
.. code-block:: pycon
422422
423423
>>> import paddle
424424
>>> paddle.enable_static()
425425
>>> startup_prog = paddle.static.Program()
426426
>>> main_prog = paddle.static.Program()
427427
>>> with paddle.static.program_guard(startup_prog, main_prog):
428-
... original_value = paddle.static.data(name = "new_value", shape=[2,2], dtype='float32')
428+
... original_value = paddle.static.data(name="new_value", shape=[2, 2], dtype='float32')
429429
... new_value = original_value.astype('int64')
430430
... print(f"new value's dtype is: {new_value.dtype}")
431-
...
432431
new Tensor's dtype is: paddle.int64
433432
434433
"""
@@ -628,7 +627,7 @@ def _T_(self):
628627
If `n` is the dimensions of `x` , `x.T` is equivalent to `x.transpose([n-1, n-2, ..., 0])`.
629628
630629
Examples:
631-
.. code-block:: python
630+
.. code-block:: pycon
632631
633632
>>> import paddle
634633
>>> paddle.enable_static()
@@ -657,7 +656,7 @@ def _mT_(self):
657656
If `n` is the dimensions of `x` , `x.mT` is equivalent to `x.transpose([0, 1, ..., n-1, n-2])`.
658657
659658
Examples:
660-
.. code-block:: python
659+
.. code-block:: pycon
661660
662661
>>> import paddle
663662
>>> paddle.enable_static()
@@ -697,7 +696,7 @@ def _new_full_(
697696
By default, the returned Tensor has the same dtype and place as this tensor.
698697
699698
Examples:
700-
.. code-block:: python
699+
.. code-block:: pycon
701700
702701
>>> import paddle
703702
>>> paddle.enable_static()
@@ -744,7 +743,7 @@ def _new_empty_(
744743
By default, the returned Tensor has the same dtype and place as this tensor.
745744
746745
Examples:
747-
.. code-block:: python
746+
.. code-block:: pycon
748747
749748
>>> import paddle
750749
>>> paddle.enable_static()
@@ -790,7 +789,7 @@ def _new_ones_(
790789
By default, the returned Tensor has the same dtype and place as this tensor.
791790
792791
Examples:
793-
.. code-block:: python
792+
.. code-block:: pycon
794793
795794
>>> import paddle
796795
>>> paddle.enable_static()
@@ -837,7 +836,7 @@ def _new_zeros_(
837836
By default, the returned Tensor has the same dtype and place as this tensor.
838837
839838
Examples:
840-
.. code-block:: python
839+
.. code-block:: pycon
841840
842841
>>> import paddle
843842
>>> paddle.enable_static()
@@ -984,7 +983,7 @@ def clone(self):
984983
Tensor, The cloned Tensor.
985984
986985
Examples:
987-
.. code-block:: python
986+
.. code-block:: pycon
988987
989988
>>> import paddle
990989
@@ -1011,7 +1010,7 @@ def clear_gradient(self):
10111010
Returns: None
10121011
10131012
Examples:
1014-
.. code-block:: python
1013+
.. code-block:: pycon
10151014
10161015
>>> import paddle
10171016
>>> import numpy as np
@@ -1191,10 +1190,10 @@ def to(self, *args, **kwargs):
11911190
Tensor: self
11921191
11931192
Examples:
1194-
.. code-block:: python
1193+
.. code-block:: pycon
11951194
11961195
>>> import paddle
1197-
>>> x = paddle.to_tensor([1,2,3])
1196+
>>> x = paddle.to_tensor([1, 2, 3])
11981197
>>> print(x)
11991198
Tensor(shape=[3], dtype=int64, place=Place(gpu:0), stop_gradient=True,
12001199
[1, 2, 3])
@@ -1211,7 +1210,7 @@ def to(self, *args, **kwargs):
12111210
>>> print(x)
12121211
Tensor(shape=[3], dtype=int16, place=Place(gpu:0), stop_gradient=True,
12131212
[1, 2, 3])
1214-
>>> y = paddle.to_tensor([4,5,6])
1213+
>>> y = paddle.to_tensor([4, 5, 6])
12151214
>>> y
12161215
Tensor(shape=[3], dtype=int64, place=Place(gpu:0), stop_gradient=True,
12171216
[4, 5, 6])
@@ -1373,7 +1372,7 @@ def numpy(self):
13731372
Returns type:
13741373
ndarray: dtype is same as current Variable
13751374
Examples:
1376-
.. code-block:: python
1375+
.. code-block:: pycon
13771376
13781377
>>> import paddle
13791378
>>> import paddle.base as base
@@ -1402,7 +1401,7 @@ def tolist(self):
14021401
list: Elements have the same dtype as current Variable
14031402
14041403
Examples:
1405-
.. code-block:: python
1404+
.. code-block:: pycon
14061405
14071406
>>> import paddle
14081407
>>> import paddle.base as base
@@ -1432,7 +1431,7 @@ def requires_grad(self) -> bool:
14321431
Setting requires_grad=True is equivalent to setting stop_gradient=False.
14331432
14341433
Examples:
1435-
.. code-block:: python
1434+
.. code-block:: pycon
14361435
14371436
>>> import paddle
14381437
>>> x = paddle.randn([2, 3])
@@ -1477,10 +1476,10 @@ def itemsize(self) -> int:
14771476
Returns the number of bytes allocated on the machine for a single element of the Tensor.
14781477
14791478
Examples:
1480-
.. code-block:: python
1479+
.. code-block:: pycon
14811480
14821481
>>> import paddle
1483-
>>> x = paddle.randn((2,3),dtype=paddle.float64)
1482+
>>> x = paddle.randn((2, 3), dtype=paddle.float64)
14841483
>>> x.itemsize
14851484
8
14861485
"""

python/paddle/reader/__init__.py

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@
3131
items. It can be any function with no parameter that creates a iterable
3232
(anything can be used in :code:`for x in iterable`)\:
3333
34-
.. code-block:: python
34+
.. code-block:: pycon
3535
3636
>>> iterable = data_reader()
3737
@@ -43,22 +43,24 @@
4343
4444
An example implementation for single item data reader creator:
4545
46-
.. code-block:: python
46+
.. code-block:: pycon
4747
4848
>>> def reader_creator_random_image(width, height):
4949
... def reader():
5050
... while True:
51-
... yield numpy.random.uniform(-1, 1, size=width*height)
51+
... yield numpy.random.uniform(-1, 1, size=width * height)
52+
...
5253
... return reader
5354
5455
An example implementation for multiple item data reader creator:
5556
56-
.. code-block:: python
57+
.. code-block:: pycon
5758
5859
>>> def reader_creator_random_image_and_label(width, height, label):
5960
... def reader():
6061
... while True:
61-
... yield numpy.random.uniform(-1, 1, size=width*height), label
62+
... yield numpy.random.uniform(-1, 1, size=width * height), label
63+
...
6264
... return reader
6365
6466
"""

0 commit comments

Comments
 (0)