12
12
# See the License for the specific language governing permissions and
13
13
# limitations under the License.
14
14
15
- # TODO: Variables: make_channel
16
- # TODO: Operators: send, close_channel, recv, go, select
17
15
from layers .control_flow import BlockGuard
18
- from layer_helper import LayerHelper
16
+ from layer_helper import LayerHelper , unique_name
17
+ from layers import fill_constant
19
18
import core
19
+
20
20
__all__ = [
21
21
'Go' ,
22
22
'make_channel' ,
@@ -46,27 +46,35 @@ def construct_go_op(self):
46
46
parent_block = main_program .block (main_program .current_block ()
47
47
.parent_idx )
48
48
49
+ inner_outputs = set ()
49
50
x_name_list = set ()
50
- out_vars = []
51
51
for op in go_block .ops :
52
52
# Iterate over all operators, get all the inputs
53
53
# and add as input to the Go operator.
54
54
for iname in op .input_names :
55
55
for in_var_name in op .input (iname ):
56
- x_name_list .add (in_var_name )
56
+ if in_var_name not in inner_outputs :
57
+ x_name_list .add (in_var_name )
57
58
58
- # Iterate over all operators , get all the outputs
59
- # add to the output list of Go operator only if
60
- # they exist in the parent block.
61
59
for oname in op .output_names :
62
60
for out_var_name in op .output (oname ):
63
- if out_var_name in parent_block .vars :
64
- out_vars .add (parent_block .var (out_var_name ))
61
+ inner_outputs .add (out_var_name )
62
+
63
+ # Iterate over all operators , get all the outputs
64
+ # add to the output list of Go operator only if
65
+ # they exist in the parent block.
66
+ out_vars = []
67
+ for inner_out_name in inner_outputs :
68
+ if inner_out_name in parent_block .vars :
69
+ out_vars .append (parent_block .var (inner_out_name ))
65
70
66
71
parent_block .append_op (
67
72
type = 'go' ,
68
- inputs = {'X' : [parent_block .var (x_name ) for x_name in x_name_list ]},
69
- outputs = {'Out' : out_vars },
73
+ inputs = {
74
+ 'X' :
75
+ [parent_block .var_recursive (x_name ) for x_name in x_name_list ]
76
+ },
77
+ outputs = {},
70
78
attrs = {'sub_block' : go_block })
71
79
72
80
@@ -88,8 +96,8 @@ def make_channel(dtype, capacity=0):
88
96
`channel_close`, and `Go` to design a concurrent Paddle program.
89
97
90
98
Args:
91
- dtype (ParamAttr|int ): Data type of the data sent in the channel.
92
- This data type should be one of the Paddle supported data types .
99
+ dtype (ParamAttr|string ): Data type of the data sent in the channel.
100
+ This data type should be the string name of a numpy data type .
93
101
capacity (ParamAttr|int): Size of the channel. Defaults to 0 for
94
102
to create an unbuffered channel.
95
103
@@ -106,22 +114,24 @@ def make_channel(dtype, capacity=0):
106
114
fluid.channel_send(ch, 100)
107
115
fluid.channel_close(ch)
108
116
"""
109
- helper = LayerHelper ('make_channel ' , ** locals ())
117
+ helper = LayerHelper ('channel_create ' , ** locals ())
110
118
main_program = helper .main_program
111
119
make_channel_block = main_program .current_block ()
112
120
113
121
# Make a channel variable (using the channel data type) and make sure it
114
122
# persists into the global scope.
115
123
channel = helper .create_variable (
116
- dtype = core .VarDesc .VarType .CHANNEL , persistable = True )
124
+ name = unique_name .generate ('channel' ),
125
+ type = core .VarDesc .VarType .CHANNEL ,
126
+ persistable = True )
117
127
118
128
create_channel_op = make_channel_block .append_op (
119
129
type = "channel_create" ,
120
130
outputs = {"Out" : channel },
121
131
attrs = {"data_type" : dtype ,
122
132
"capacity" : capacity })
123
133
124
- return create_channel_op
134
+ return channel
125
135
126
136
127
137
def channel_send (channel , value ):
@@ -133,7 +143,7 @@ def channel_send(channel, value):
133
143
Args:
134
144
channel (Variable|Channel): Channel variable created using
135
145
`make_channel`.
136
-
146
+ value (Variable): Value to send to channel
137
147
Returns:
138
148
Variable: The boolean status on whether or not the channel
139
149
successfully sent the passed value.
@@ -149,7 +159,11 @@ def channel_send(channel, value):
149
159
helper = LayerHelper ('channel_send' , ** locals ())
150
160
main_program = helper .main_program
151
161
channel_send_block = main_program .current_block ()
152
- status = helper .create_variable (dtype = core .VarDesc .VarType .TENSOR )
162
+
163
+ status = helper .create_variable (
164
+ name = unique_name .generate ('status' ),
165
+ type = core .VarDesc .VarType .LOD_TENSOR ,
166
+ dtype = core .VarDesc .VarType .BOOL )
153
167
154
168
channel_send_op = channel_send_block .append_op (
155
169
type = "channel_send" ,
@@ -159,10 +173,10 @@ def channel_send(channel, value):
159
173
},
160
174
outputs = {"Status" : status })
161
175
162
- return channel_send_op
176
+ return status
163
177
164
178
165
- def channel_recv (channel , dtype ):
179
+ def channel_recv (channel , return_value ):
166
180
"""
167
181
Receives a value through a channel variable. Used by an unbuffered or
168
182
buffered channel within a concurrent Go block to get data from originally
@@ -172,11 +186,10 @@ def channel_recv(channel, dtype):
172
186
Args:
173
187
channel (Variable|Channel): Channel variable created using
174
188
`make_channel`.
175
- dtype (Variable|int): Data type of the data expected to be read in the
176
- channel. This data type should be one of the Paddle supported data
177
- types.
189
+ return_value (Variable): Variable to set as a result of running channel_recv_op
178
190
179
191
Returns:
192
+ Variable: The received value from the channel.
180
193
Variable: The boolean status on whether or not the channel
181
194
successfully received the passed value.
182
195
@@ -185,24 +198,26 @@ def channel_recv(channel, dtype):
185
198
186
199
ch = fluid.make_channel(dtype='int32', capacity=10)
187
200
with fluid.Go():
188
- fluid.channel_recv(ch, 'int32')
201
+ returned_value = fluid.channel_recv(ch, 'int32')
189
202
190
203
# Code to send data through the channel.
191
204
"""
192
205
helper = LayerHelper ('channel_recv' , ** locals ())
193
206
main_program = helper .main_program
194
207
channel_recv_block = main_program .current_block ()
195
208
196
- return_value = helper .create_variable (dtype = dtype )
197
- status = helper .create_variable (dtype = core .VarDesc .VarType .TENSOR )
209
+ status = helper .create_variable (
210
+ name = unique_name .generate ('status' ),
211
+ type = core .VarDesc .VarType .LOD_TENSOR ,
212
+ dtype = core .VarDesc .VarType .BOOL )
198
213
199
214
channel_recv_op = channel_recv_block .append_op (
200
215
type = "channel_recv" ,
201
216
inputs = {"Channel" : channel },
202
217
outputs = {"Out" : return_value ,
203
218
"Status" : status })
204
219
205
- return channel_recv_op
220
+ return return_value , status
206
221
207
222
208
223
def channel_close (channel ):
@@ -228,5 +243,3 @@ def channel_close(channel):
228
243
229
244
channel_close_op = channel_close_block .append_op (
230
245
type = "channel_close" , inputs = {"Channel" : channel })
231
-
232
- return channel_close_op
0 commit comments