Skip to content

Commit fbf0a35

Browse files
authored
Merge pull request #15771 from sneaxiy/release/1.3-fix-api-arg0
Fix doc
2 parents e80596e + 3eaab3f commit fbf0a35

File tree

3 files changed

+100
-20
lines changed

3 files changed

+100
-20
lines changed

paddle/fluid/API.spec

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -473,11 +473,11 @@ paddle.fluid.LoDTensor.has_valid_recursive_sequence_lengths has_valid_recursive_
473473
paddle.fluid.LoDTensor.lod lod(self: paddle.fluid.core.LoDTensor) -> List[List[int]]
474474
paddle.fluid.LoDTensor.recursive_sequence_lengths recursive_sequence_lengths(self: paddle.fluid.core.LoDTensor) -> List[List[int]]
475475
paddle.fluid.LoDTensor.set 1. set(self: paddle.fluid.core.Tensor, arg0: numpy.ndarray[float32], arg1: paddle::platform::CPUPlace) -> None 2. set(self: paddle.fluid.core.Tensor, arg0: numpy.ndarray[int32], arg1: paddle::platform::CPUPlace) -> None 3. set(self: paddle.fluid.core.Tensor, arg0: numpy.ndarray[float64], arg1: paddle::platform::CPUPlace) -> None 4. set(self: paddle.fluid.core.Tensor, arg0: numpy.ndarray[int64], arg1: paddle::platform::CPUPlace) -> None 5. set(self: paddle.fluid.core.Tensor, arg0: numpy.ndarray[bool], arg1: paddle::platform::CPUPlace) -> None 6. set(self: paddle.fluid.core.Tensor, arg0: numpy.ndarray[uint16], arg1: paddle::platform::CPUPlace) -> None 7. set(self: paddle.fluid.core.Tensor, arg0: numpy.ndarray[uint8], arg1: paddle::platform::CPUPlace) -> None 8. set(self: paddle.fluid.core.Tensor, arg0: numpy.ndarray[int8], arg1: paddle::platform::CPUPlace) -> None 9. set(self: paddle.fluid.core.Tensor, arg0: numpy.ndarray[float32], arg1: paddle::platform::CUDAPlace) -> None 10. set(self: paddle.fluid.core.Tensor, arg0: numpy.ndarray[int32], arg1: paddle::platform::CUDAPlace) -> None 11. set(self: paddle.fluid.core.Tensor, arg0: numpy.ndarray[float64], arg1: paddle::platform::CUDAPlace) -> None 12. set(self: paddle.fluid.core.Tensor, arg0: numpy.ndarray[int64], arg1: paddle::platform::CUDAPlace) -> None 13. set(self: paddle.fluid.core.Tensor, arg0: numpy.ndarray[bool], arg1: paddle::platform::CUDAPlace) -> None 14. set(self: paddle.fluid.core.Tensor, arg0: numpy.ndarray[uint16], arg1: paddle::platform::CUDAPlace) -> None 15. set(self: paddle.fluid.core.Tensor, arg0: numpy.ndarray[uint8], arg1: paddle::platform::CUDAPlace) -> None 16. set(self: paddle.fluid.core.Tensor, arg0: numpy.ndarray[int8], arg1: paddle::platform::CUDAPlace) -> None 17. set(self: paddle.fluid.core.Tensor, arg0: numpy.ndarray[float32], arg1: paddle::platform::CUDAPinnedPlace) -> None 18. set(self: paddle.fluid.core.Tensor, arg0: numpy.ndarray[int32], arg1: paddle::platform::CUDAPinnedPlace) -> None 19. set(self: paddle.fluid.core.Tensor, arg0: numpy.ndarray[float64], arg1: paddle::platform::CUDAPinnedPlace) -> None 20. set(self: paddle.fluid.core.Tensor, arg0: numpy.ndarray[int64], arg1: paddle::platform::CUDAPinnedPlace) -> None 21. set(self: paddle.fluid.core.Tensor, arg0: numpy.ndarray[bool], arg1: paddle::platform::CUDAPinnedPlace) -> None 22. set(self: paddle.fluid.core.Tensor, arg0: numpy.ndarray[uint16], arg1: paddle::platform::CUDAPinnedPlace) -> None 23. set(self: paddle.fluid.core.Tensor, arg0: numpy.ndarray[uint8], arg1: paddle::platform::CUDAPinnedPlace) -> None 24. set(self: paddle.fluid.core.Tensor, arg0: numpy.ndarray[int8], arg1: paddle::platform::CUDAPinnedPlace) -> None
476-
paddle.fluid.LoDTensor.set_lod set_lod(self: paddle.fluid.core.LoDTensor, arg0: List[List[int]]) -> None
477-
paddle.fluid.LoDTensor.set_recursive_sequence_lengths set_recursive_sequence_lengths(self: paddle.fluid.core.LoDTensor, arg0: List[List[int]]) -> None
476+
paddle.fluid.LoDTensor.set_lod set_lod(self: paddle.fluid.core.LoDTensor, lod: List[List[int]]) -> None
477+
paddle.fluid.LoDTensor.set_recursive_sequence_lengths set_recursive_sequence_lengths(self: paddle.fluid.core.LoDTensor, recursive_sequence_lengths: List[List[int]]) -> None
478478
paddle.fluid.LoDTensor.shape shape(self: paddle.fluid.core.Tensor) -> List[int]
479479
paddle.fluid.LoDTensorArray.__init__ __init__(self: paddle.fluid.core.LoDTensorArray) -> None
480-
paddle.fluid.LoDTensorArray.append append(self: paddle.fluid.core.LoDTensorArray, arg0: paddle.fluid.core.LoDTensor) -> None
480+
paddle.fluid.LoDTensorArray.append append(self: paddle.fluid.core.LoDTensorArray, tensor: paddle.fluid.core.LoDTensor) -> None
481481
paddle.fluid.CPUPlace.__init__ __init__(self: paddle.fluid.core.CPUPlace) -> None
482482
paddle.fluid.CUDAPlace.__init__ __init__(self: paddle.fluid.core.CUDAPlace, arg0: int) -> None
483483
paddle.fluid.CUDAPinnedPlace.__init__ __init__(self: paddle.fluid.core.CUDAPinnedPlace) -> None

paddle/fluid/pybind/pybind.cc

Lines changed: 93 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -372,7 +372,13 @@ PYBIND11_MODULE(core, m) {
372372
PADDLE_ENFORCE(CheckLoD(new_lod, vectorize(self.dims()).front()),
373373
"the provided lod info is invalid");
374374
self.set_lod(new_lod);
375-
})
375+
},
376+
py::arg("lod"), R"DOC(
377+
Set LoD of the LoDTensor.
378+
379+
Args:
380+
lod (List[List[int]]): the lod to be set.
381+
)DOC")
376382
.def("set_recursive_sequence_lengths",
377383
[](LoDTensor &self, const std::vector<std::vector<size_t>>
378384
&recursive_sequence_lengths) {
@@ -388,7 +394,17 @@ PYBIND11_MODULE(core, m) {
388394
CheckLoD(new_offset_lod, vectorize(self.dims()).front()),
389395
"the provided recursive_sequence_lengths info is invalid");
390396
self.set_lod(new_offset_lod);
391-
})
397+
},
398+
py::arg("recursive_sequence_lengths"), R"DOC(
399+
Set LoD of the LoDTensor according to recursive sequence length.
400+
401+
For example, if recursive_sequence_lengths=[[2, 3]], meaning that
402+
there are two sequences with length 2 and 3 respectively, the
403+
corresponding lod would be [[0, 2, 2+3]], i.e, [[0, 2, 5]].
404+
405+
Args:
406+
recursive_sequence_lengths (List[List[int]]): sequence lengths.
407+
)DOC")
392408
.def("lod",
393409
[](LoDTensor &self) -> std::vector<std::vector<size_t>> {
394410
// output the offset-based lod info
@@ -397,7 +413,13 @@ PYBIND11_MODULE(core, m) {
397413
new_lod.reserve(lod.size());
398414
std::copy(lod.begin(), lod.end(), std::back_inserter(new_lod));
399415
return new_lod;
400-
})
416+
},
417+
R"DOC(
418+
Return the LoD of the LoDTensor.
419+
420+
Returns:
421+
out (List[List[int]]): the lod of the LoDTensor.
422+
)DOC")
401423
// Set above comments of set_lod.
402424
.def("recursive_sequence_lengths",
403425
[](LoDTensor &self) -> std::vector<std::vector<size_t>> {
@@ -407,12 +429,25 @@ PYBIND11_MODULE(core, m) {
407429
new_lod.reserve(lod.size());
408430
std::copy(lod.begin(), lod.end(), std::back_inserter(new_lod));
409431
return new_lod;
410-
})
411-
.def("has_valid_recursive_sequence_lengths", [](LoDTensor &self) -> bool {
412-
// Check that the lod info is valid and match the outermost
413-
// dimension of the LoDTensor data
414-
return CheckLoD(self.lod(), vectorize(self.dims()).front());
415-
});
432+
},
433+
R"DOC(
434+
Return the sequence length of the LoDTensor corresponding to LoD.
435+
436+
Returns:
437+
out (List[List[int]): the sequence lengths.
438+
)DOC")
439+
.def("has_valid_recursive_sequence_lengths",
440+
[](LoDTensor &self) -> bool {
441+
// Check that the lod info is valid and match the outermost
442+
// dimension of the LoDTensor data
443+
return CheckLoD(self.lod(), vectorize(self.dims()).front());
444+
},
445+
R"DOC(
446+
Check whether the lod of the LoDTensor is valid.
447+
448+
Returns:
449+
out (bool): whether the lod is valid.
450+
)DOC");
416451

417452
py::class_<SelectedRows>(m, "SelectedRows")
418453
.def("__init__",
@@ -548,18 +583,58 @@ All parameter, weight, gradient are variables in Paddle.
548583
[](Scope &self, const std::string &name) -> Variable * {
549584
return self.Var(name);
550585
},
586+
py::arg("name"),
587+
R"DOC(
588+
Find or create variable named :code:`name` in the current scope.
589+
590+
If the variable named :code:`name` does not exist in the
591+
current scope, the variable would be created. Otherwise,
592+
return the existing variable.
593+
594+
Args:
595+
name (str): the variable name.
596+
597+
Returns:
598+
out (core.Variable): the found or created variable.
599+
)DOC",
600+
py::return_value_policy::reference)
601+
.def("find_var", &Scope::FindVar, py::arg("name"),
602+
R"DOC(
603+
Find variable named :code:`name` in the current scope or
604+
its parent scope. Return None if not found.
605+
606+
Args:
607+
name (str): the variable name.
608+
609+
Returns:
610+
out (core.Variable|None): the found variable or None.
611+
)DOC",
551612
py::return_value_policy::reference)
552-
.def("find_var", &Scope::FindVar, py::return_value_policy::reference)
553613
.def("new_scope", [](Scope &self) -> Scope * { return &self.NewScope(); },
614+
R"DOC(
615+
Create a new sub-scope of the current scope.
616+
617+
Returns:
618+
out (core._Scope): the created sub-scope.
619+
)DOC",
554620
py::return_value_policy::reference)
555-
.def("drop_kids", &Scope::DropKids);
621+
.def("drop_kids", &Scope::DropKids,
622+
R"DOC(
623+
Delete all sub-scopes of the current scope.
624+
)DOC");
556625

557626
m.def("Scope",
558627
[]() -> Scope * {
559628
auto *s = new Scope();
560629
ScopePool::Instance().Insert(std::unique_ptr<Scope>(s));
561630
return s;
562631
},
632+
R"DOC(
633+
Create a new scope.
634+
635+
Returns:
636+
out (core._Scope): the created scope.
637+
)DOC",
563638
py::return_value_policy::reference);
564639

565640
//! @note: Be careful! PyBind will return std::string as an unicode, not
@@ -782,11 +857,13 @@ All parameter, weight, gradient are variables in Paddle.
782857
self[i].ShareDataWith(t);
783858
self[i].set_lod(t.lod());
784859
})
785-
.def("append", [](LoDTensorArray &self, const LoDTensor &t) {
786-
self.emplace_back();
787-
self.back().ShareDataWith(t);
788-
self.back().set_lod(t.lod());
789-
});
860+
.def("append",
861+
[](LoDTensorArray &self, const LoDTensor &t) {
862+
self.emplace_back();
863+
self.back().ShareDataWith(t);
864+
self.back().set_lod(t.lod());
865+
},
866+
py::arg("tensor"), "Append a LoDensor to LoDTensorArray.");
790867

791868
m.def("IsInplace",
792869
[](std::string op) -> bool { return operators::IsInplace(op); });

python/paddle/fluid/layers/io.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,10 @@ def data(name,
5656
5757
Args:
5858
name(str): The name/alias of the function
59-
shape(list): Tuple declaring the shape.
59+
shape(list): Tuple declaring the shape. If :code:`append_batch_size` is
60+
True and there is no -1 inside :code:`shape`, it should be
61+
considered as the shape of the each sample. Otherwise, it
62+
should be considered as the shape of the batched data.
6063
append_batch_size(bool):
6164
1. If true, it prepends -1 to the shape.
6265
For example if shape=[1], the resulting shape is [-1, 1].

0 commit comments

Comments
 (0)