Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[xdoctest] reformat example code with google style in 192-197 #55926

Merged
merged 15 commits into from
Aug 25, 2023
22 changes: 11 additions & 11 deletions python/paddle/nn/functional/input.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,11 +72,11 @@ def one_hot(x, num_classes, name=None):
Examples:
.. code-block:: python

import paddle
>>> import paddle
# Correspond to the first example above, where label.shape is 4 and one_hot_label.shape is [4, 4].
label = paddle.to_tensor([1, 1, 3, 0], dtype='int64')
>>> label = paddle.to_tensor([1, 1, 3, 0], dtype='int64')
# label.shape = [4]
one_hot_label = paddle.nn.functional.one_hot(label, num_classes=4)
>>> one_hot_label = paddle.nn.functional.one_hot(label, num_classes=4)
# one_hot_label.shape = [4, 4]
# one_hot_label = [[0., 1., 0., 0.],
# [0., 1., 0., 0.],
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

需要改成 print 的形式

Expand Down Expand Up @@ -166,24 +166,24 @@ def embedding(x, weight, padding_idx=None, sparse=False, name=None):

.. code-block:: python

import paddle
import paddle.nn as nn
>>> import paddle
>>> import paddle.nn as nn

x0 = paddle.arange(3, 6).reshape((3, 1)).astype(paddle.int64)
w0 = paddle.full(shape=(10, 3), fill_value=2).astype(paddle.float32)
>>> x0 = paddle.arange(3, 6).reshape((3, 1)).astype(paddle.int64)
>>> w0 = paddle.full(shape=(10, 3), fill_value=2).astype(paddle.float32)

# x.data = [[3], [4], [5]]
# x.shape = [3, 1]
x = paddle.to_tensor(x0, stop_gradient=False)
>>> x = paddle.to_tensor(x0, stop_gradient=False)

# w.data = [[2. 2. 2.] ... [2. 2. 2.]]
# w.shape = [10, 3]
w = paddle.to_tensor(w0, stop_gradient=False)
>>> w = paddle.to_tensor(w0, stop_gradient=False)

# emb.data = [[[2., 2., 2.]], [[2., 2., 2.]], [[2., 2., 2.]]]
# emb.shape = [3, 1, 3]
emb = nn.functional.embedding(
x=x, weight=w, sparse=True, name="embedding")
>>> emb = nn.functional.embedding(
... x=x, weight=w, sparse=True, name="embedding")

"""
padding_idx = (
Expand Down