@@ -3,6 +3,7 @@ use std::iter;
3
3
use std:: result:: Result ;
4
4
use std:: time:: { Duration , Instant } ;
5
5
6
+ use graph:: components:: store:: UnitStream ;
6
7
use graph:: { components:: store:: SubscriptionManager , prelude:: * } ;
7
8
8
9
use crate :: runner:: ResultSizeMetrics ;
@@ -40,7 +41,7 @@ pub struct SubscriptionExecutionOptions {
40
41
pub result_size : Arc < ResultSizeMetrics > ,
41
42
}
42
43
43
- pub async fn execute_subscription (
44
+ pub fn execute_subscription (
44
45
subscription : Subscription ,
45
46
schema : Arc < ApiSchema > ,
46
47
options : SubscriptionExecutionOptions ,
@@ -53,10 +54,10 @@ pub async fn execute_subscription(
53
54
options. max_complexity ,
54
55
options. max_depth ,
55
56
) ?;
56
- execute_prepared_subscription ( query, options) . await
57
+ execute_prepared_subscription ( query, options)
57
58
}
58
59
59
- pub ( crate ) async fn execute_prepared_subscription (
60
+ pub ( crate ) fn execute_prepared_subscription (
60
61
query : Arc < crate :: execution:: Query > ,
61
62
options : SubscriptionExecutionOptions ,
62
63
) -> Result < SubscriptionResult , SubscriptionError > {
@@ -72,15 +73,15 @@ pub(crate) async fn execute_prepared_subscription(
72
73
"query" => & query. query_text,
73
74
) ;
74
75
75
- let source_stream = create_source_event_stream ( query. clone ( ) , & options) . await ?;
76
+ let source_stream = create_source_event_stream ( query. clone ( ) , & options) ?;
76
77
let response_stream = map_source_to_response_stream ( query, options, source_stream) ;
77
78
Ok ( response_stream)
78
79
}
79
80
80
- async fn create_source_event_stream (
81
+ fn create_source_event_stream (
81
82
query : Arc < crate :: execution:: Query > ,
82
83
options : & SubscriptionExecutionOptions ,
83
- ) -> Result < StoreEventStreamBox , SubscriptionError > {
84
+ ) -> Result < UnitStream , SubscriptionError > {
84
85
let resolver = StoreResolver :: for_subscription (
85
86
& options. logger ,
86
87
query. schema . id ( ) . clone ( ) ,
@@ -123,35 +124,31 @@ async fn create_source_event_stream(
123
124
let field = fields. 1 [ 0 ] ;
124
125
let argument_values = coerce_argument_values ( & ctx. query , subscription_type. as_ref ( ) , field) ?;
125
126
126
- resolve_field_stream ( & ctx, & subscription_type, field, argument_values) . await
127
+ resolve_field_stream ( & ctx, & subscription_type, field, argument_values)
127
128
}
128
129
129
- async fn resolve_field_stream (
130
+ fn resolve_field_stream (
130
131
ctx : & ExecutionContext < impl Resolver > ,
131
132
object_type : & s:: ObjectType ,
132
133
field : & q:: Field ,
133
134
_argument_values : HashMap < & str , r:: Value > ,
134
- ) -> Result < StoreEventStreamBox , SubscriptionError > {
135
+ ) -> Result < UnitStream , SubscriptionError > {
135
136
ctx. resolver
136
137
. resolve_field_stream ( & ctx. query . schema . document ( ) , object_type, field)
137
- . await
138
138
. map_err ( SubscriptionError :: from)
139
139
}
140
140
141
141
fn map_source_to_response_stream (
142
142
query : Arc < crate :: execution:: Query > ,
143
143
options : SubscriptionExecutionOptions ,
144
- source_stream : StoreEventStreamBox ,
144
+ source_stream : UnitStream ,
145
145
) -> QueryResultStream {
146
146
// Create a stream with a single empty event. By chaining this in front
147
147
// of the real events, we trick the subscription into executing its query
148
148
// at least once. This satisfies the GraphQL over Websocket protocol
149
149
// requirement of "respond[ing] with at least one GQL_DATA message", see
150
150
// https://github.com/apollographql/subscriptions-transport-ws/blob/master/PROTOCOL.md#gql_data
151
- let trigger_stream = futures03:: stream:: iter ( vec ! [ Ok ( Arc :: new( StoreEvent {
152
- tag: 0 ,
153
- changes: Default :: default ( ) ,
154
- } ) ) ] ) ;
151
+ let trigger_stream = futures03:: stream:: once ( async { } ) ;
155
152
156
153
let SubscriptionExecutionOptions {
157
154
logger,
@@ -165,43 +162,34 @@ fn map_source_to_response_stream(
165
162
result_size,
166
163
} = options;
167
164
168
- Box :: new (
169
- trigger_stream
170
- . chain ( source_stream. compat ( ) )
171
- . then ( move |res| match res {
172
- Err ( ( ) ) => {
173
- futures03:: future:: ready ( Arc :: new ( QueryExecutionError :: EventStreamError . into ( ) ) )
174
- . boxed ( )
175
- }
176
- Ok ( event) => execute_subscription_event (
177
- logger. clone ( ) ,
178
- store. clone ( ) ,
179
- subscription_manager. cheap_clone ( ) ,
180
- query. clone ( ) ,
181
- event,
182
- timeout,
183
- max_first,
184
- max_skip,
185
- result_size. cheap_clone ( ) ,
186
- )
187
- . boxed ( ) ,
188
- } ) ,
189
- )
165
+ trigger_stream
166
+ . chain ( source_stream)
167
+ . then ( move |( ) | {
168
+ execute_subscription_event (
169
+ logger. clone ( ) ,
170
+ store. clone ( ) ,
171
+ subscription_manager. cheap_clone ( ) ,
172
+ query. clone ( ) ,
173
+ timeout,
174
+ max_first,
175
+ max_skip,
176
+ result_size. cheap_clone ( ) ,
177
+ )
178
+ . boxed ( )
179
+ } )
180
+ . boxed ( )
190
181
}
191
182
192
183
async fn execute_subscription_event (
193
184
logger : Logger ,
194
185
store : Arc < dyn QueryStore > ,
195
186
subscription_manager : Arc < dyn SubscriptionManager > ,
196
187
query : Arc < crate :: execution:: Query > ,
197
- event : Arc < StoreEvent > ,
198
188
timeout : Option < Duration > ,
199
189
max_first : u32 ,
200
190
max_skip : u32 ,
201
191
result_size : Arc < ResultSizeMetrics > ,
202
192
) -> Arc < QueryResult > {
203
- debug ! ( logger, "Execute subscription event" ; "event" => format!( "{:?}" , event) ) ;
204
-
205
193
let resolver = match StoreResolver :: at_block (
206
194
& logger,
207
195
store,
0 commit comments