@@ -372,7 +372,13 @@ PYBIND11_MODULE(core, m) {
372
372
PADDLE_ENFORCE (CheckLoD (new_lod, vectorize (self.dims ()).front ()),
373
373
" the provided lod info is invalid" );
374
374
self.set_lod (new_lod);
375
- })
375
+ },
376
+ py::arg (" lod" ), R"DOC(
377
+ Set LoD of the LoDTensor.
378
+
379
+ Args:
380
+ lod (List[List[int]]): the lod to be set.
381
+ )DOC" )
376
382
.def (" set_recursive_sequence_lengths" ,
377
383
[](LoDTensor &self, const std::vector<std::vector<size_t >>
378
384
&recursive_sequence_lengths) {
@@ -388,7 +394,17 @@ PYBIND11_MODULE(core, m) {
388
394
CheckLoD (new_offset_lod, vectorize (self.dims ()).front ()),
389
395
" the provided recursive_sequence_lengths info is invalid" );
390
396
self.set_lod (new_offset_lod);
391
- })
397
+ },
398
+ py::arg (" recursive_sequence_lengths" ), R"DOC(
399
+ Set LoD of the LoDTensor according to recursive sequence length.
400
+
401
+ For example, if recursive_sequence_lengths=[[2, 3]], meaning that
402
+ there are two sequences with length 2 and 3 respectively, the
403
+ corresponding lod would be [[0, 2, 2+3]], i.e, [[0, 2, 5]].
404
+
405
+ Args:
406
+ recursive_sequence_lengths (List[List[int]]): sequence lengths.
407
+ )DOC" )
392
408
.def (" lod" ,
393
409
[](LoDTensor &self) -> std::vector<std::vector<size_t >> {
394
410
// output the offset-based lod info
@@ -397,7 +413,13 @@ PYBIND11_MODULE(core, m) {
397
413
new_lod.reserve (lod.size ());
398
414
std::copy (lod.begin (), lod.end (), std::back_inserter (new_lod));
399
415
return new_lod;
400
- })
416
+ },
417
+ R"DOC(
418
+ Return the LoD of the LoDTensor.
419
+
420
+ Returns:
421
+ out (List[List[int]]): the lod of the LoDTensor.
422
+ )DOC" )
401
423
// Set above comments of set_lod.
402
424
.def (" recursive_sequence_lengths" ,
403
425
[](LoDTensor &self) -> std::vector<std::vector<size_t >> {
@@ -407,12 +429,25 @@ PYBIND11_MODULE(core, m) {
407
429
new_lod.reserve (lod.size ());
408
430
std::copy (lod.begin (), lod.end (), std::back_inserter (new_lod));
409
431
return new_lod;
410
- })
411
- .def (" has_valid_recursive_sequence_lengths" , [](LoDTensor &self) -> bool {
412
- // Check that the lod info is valid and match the outermost
413
- // dimension of the LoDTensor data
414
- return CheckLoD (self.lod (), vectorize (self.dims ()).front ());
415
- });
432
+ },
433
+ R"DOC(
434
+ Return the sequence length of the LoDTensor corresponding to LoD.
435
+
436
+ Returns:
437
+ out (List[List[int]): the sequence lengths.
438
+ )DOC" )
439
+ .def (" has_valid_recursive_sequence_lengths" ,
440
+ [](LoDTensor &self) -> bool {
441
+ // Check that the lod info is valid and match the outermost
442
+ // dimension of the LoDTensor data
443
+ return CheckLoD (self.lod (), vectorize (self.dims ()).front ());
444
+ },
445
+ R"DOC(
446
+ Check whether the lod of the LoDTensor is valid.
447
+
448
+ Returns:
449
+ out (bool): whether the lod is valid.
450
+ )DOC" );
416
451
417
452
py::class_<SelectedRows>(m, " SelectedRows" )
418
453
.def (" __init__" ,
@@ -548,18 +583,58 @@ All parameter, weight, gradient are variables in Paddle.
548
583
[](Scope &self, const std::string &name) -> Variable * {
549
584
return self.Var (name);
550
585
},
586
+ py::arg (" name" ),
587
+ R"DOC(
588
+ Find or create variable named :code:`name` in the current scope.
589
+
590
+ If the variable named :code:`name` does not exist in the
591
+ current scope, the variable would be created. Otherwise,
592
+ return the existing variable.
593
+
594
+ Args:
595
+ name (str): the variable name.
596
+
597
+ Returns:
598
+ out (core.Variable): the found or created variable.
599
+ )DOC" ,
600
+ py::return_value_policy::reference)
601
+ .def (" find_var" , &Scope::FindVar, py::arg (" name" ),
602
+ R"DOC(
603
+ Find variable named :code:`name` in the current scope or
604
+ its parent scope. Return None if not found.
605
+
606
+ Args:
607
+ name (str): the variable name.
608
+
609
+ Returns:
610
+ out (core.Variable|None): the found variable or None.
611
+ )DOC" ,
551
612
py::return_value_policy::reference)
552
- .def (" find_var" , &Scope::FindVar, py::return_value_policy::reference)
553
613
.def (" new_scope" , [](Scope &self) -> Scope * { return &self.NewScope (); },
614
+ R"DOC(
615
+ Create a new sub-scope of the current scope.
616
+
617
+ Returns:
618
+ out (core._Scope): the created sub-scope.
619
+ )DOC" ,
554
620
py::return_value_policy::reference)
555
- .def (" drop_kids" , &Scope::DropKids);
621
+ .def (" drop_kids" , &Scope::DropKids,
622
+ R"DOC(
623
+ Delete all sub-scopes of the current scope.
624
+ )DOC" );
556
625
557
626
m.def (" Scope" ,
558
627
[]() -> Scope * {
559
628
auto *s = new Scope ();
560
629
ScopePool::Instance ().Insert (std::unique_ptr<Scope>(s));
561
630
return s;
562
631
},
632
+ R"DOC(
633
+ Create a new scope.
634
+
635
+ Returns:
636
+ out (core._Scope): the created scope.
637
+ )DOC" ,
563
638
py::return_value_policy::reference);
564
639
565
640
// ! @note: Be careful! PyBind will return std::string as an unicode, not
@@ -782,11 +857,13 @@ All parameter, weight, gradient are variables in Paddle.
782
857
self[i].ShareDataWith (t);
783
858
self[i].set_lod (t.lod ());
784
859
})
785
- .def (" append" , [](LoDTensorArray &self, const LoDTensor &t) {
786
- self.emplace_back ();
787
- self.back ().ShareDataWith (t);
788
- self.back ().set_lod (t.lod ());
789
- });
860
+ .def (" append" ,
861
+ [](LoDTensorArray &self, const LoDTensor &t) {
862
+ self.emplace_back ();
863
+ self.back ().ShareDataWith (t);
864
+ self.back ().set_lod (t.lod ());
865
+ },
866
+ py::arg (" tensor" ), " Append a LoDensor to LoDTensorArray." );
790
867
791
868
m.def (" IsInplace" ,
792
869
[](std::string op) -> bool { return operators::IsInplace (op); });
0 commit comments