@@ -123,6 +123,7 @@ static JS::PersistentRootedObjectVector *pending_body_reads;
123
123
124
124
// TODO(performance): introduce a version that writes into an existing buffer, and use that
125
125
// with the hostcall buffer where possible.
126
+ // https://github.com/fastly/js-compute-runtime/issues/215
126
127
UniqueChars encode (JSContext *cx, HandleString str, size_t *encoded_len) {
127
128
UniqueChars text = JS_EncodeStringToUTF8 (cx, str);
128
129
if (!text)
@@ -197,8 +198,10 @@ static char *read_from_handle_all(JSContext *cx, HandleType handle, size_t *nwri
197
198
bool read_until_zero) {
198
199
// TODO(performance): investigate passing a size hint in situations where we might know
199
200
// the final size, e.g. via the `content-length` header.
201
+ // https://github.com/fastly/js-compute-runtime/issues/216
200
202
size_t buf_size = HANDLE_READ_CHUNK_SIZE;
201
203
// TODO(performance): make use of malloc slack.
204
+ // https://github.com/fastly/js-compute-runtime/issues/217
202
205
char *buf = static_cast <char *>(JS_malloc (cx, buf_size));
203
206
if (!buf) {
204
207
JS_ReportOutOfMemory (cx);
@@ -223,6 +226,7 @@ static char *read_from_handle_all(JSContext *cx, HandleType handle, size_t *nwri
223
226
}
224
227
225
228
// TODO(performance): make use of malloc slack, and use a smarter buffer growth strategy.
229
+ // https://github.com/fastly/js-compute-runtime/issues/217
226
230
size_t new_size = buf_size + HANDLE_READ_CHUNK_SIZE;
227
231
new_buf = static_cast <char *>(JS_realloc (cx, buf, buf_size, new_size));
228
232
if (!new_buf) {
@@ -740,6 +744,7 @@ bool bodyAll(JSContext *cx, CallArgs args, HandleObject self) {
740
744
// we need to manually read all chunks from the stream.
741
745
// TODO(performance): ensure that we're properly shortcutting reads from TransformStream
742
746
// readables.
747
+ // https://github.com/fastly/js-compute-runtime/issues/218
743
748
RootedObject stream (cx, body_stream (self));
744
749
if (stream && !builtins::NativeStreamSource::stream_is_body (cx, stream)) {
745
750
if (!consume_content_stream_for_bodyAll (cx, self, stream, body_parser)) {
@@ -2148,8 +2153,10 @@ bool constructor(JSContext *cx, unsigned argc, Value *vp) {
2148
2153
// e.g. to represent cache entries. While that's perhaps not ideal to begin
2149
2154
// with, it exists, so we should handle it in a good way, and not be
2150
2155
// superfluously slow.
2156
+ // https://github.com/fastly/js-compute-runtime/issues/219
2151
2157
// TODO(performance): enable creating Response objects during the init phase, and only
2152
2158
// creating the host-side representation when processing requests.
2159
+ // https://github.com/fastly/js-compute-runtime/issues/220
2153
2160
ResponseHandle response_handle = {.handle = INVALID_HANDLE};
2154
2161
if (!HANDLE_RESULT (cx, xqd_resp_new (&response_handle))) {
2155
2162
return false ;
@@ -2926,6 +2933,7 @@ bool append(JSContext *cx, unsigned argc, Value *vp) {
2926
2933
*
2927
2934
* Assumes that both the name and value are valid and normalized.
2928
2935
* TODO(performance): fully skip normalization.
2936
+ * https://github.com/fastly/js-compute-runtime/issues/221
2929
2937
*/
2930
2938
bool maybe_add (JSContext *cx, HandleObject self, const char *name, const char *value) {
2931
2939
MOZ_ASSERT (is_instance (self));
0 commit comments