diff --git a/src/polyfill/fetch.js b/src/polyfill/fetch.js new file mode 100644 index 0000000000000..979335699a84e --- /dev/null +++ b/src/polyfill/fetch.js @@ -0,0 +1,80 @@ +// Fetch polyfill from https://github.com/developit/unfetch +// License: +//============================================================================== +// Copyright (c) 2017 Jason Miller +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +//============================================================================== + +#if !POLYFILL +#error "this file should never be included unless POLYFILL is set" +#endif + +if (typeof globalThis.fetch == 'undefined') { + globalThis.fetch = function (url, options) { + options = options || {}; + return new Promise((resolve, reject) => { + const request = new XMLHttpRequest(); + const keys = []; + const headers = {}; + + request.responseType = 'arraybuffer'; + + const response = () => ({ + ok: ((request.status / 100) | 0) == 2, // 200-299 + statusText: request.statusText, + status: request.status, + url: request.responseURL, + text: () => Promise.resolve(request.responseText), + json: () => Promise.resolve(request.responseText).then(JSON.parse), + blob: () => Promise.resolve(new Blob([request.response])), + arrayBuffer: () => Promise.resolve(request.response), + clone: response, + headers: { + keys: () => keys, + entries: () => keys.map((n) => [n, request.getResponseHeader(n)]), + get: (n) => request.getResponseHeader(n), + has: (n) => request.getResponseHeader(n) != null, + }, + }); + + request.open(options.method || "get", url, true); + + request.onload = () => { + request + .getAllResponseHeaders() + .toLowerCase() + .replace(/^(.+?):/gm, (m, key) => { + headers[key] || keys.push((headers[key] = key)); + }); + resolve(response()); + }; + + request.onerror = reject; + + request.withCredentials = options.credentials == "include"; + + for (const i in options.headers) { + request.setRequestHeader(i, options.headers[i]); + } + + request.send(options.body || null); + }); + } +} diff --git a/src/shell.js b/src/shell.js index add93992b4960..e0e99753c067c 100644 --- a/src/shell.js +++ b/src/shell.js @@ -61,6 +61,11 @@ var Module = typeof {{{ EXPORT_NAME }}} != 'undefined' ? {{{ EXPORT_NAME }}} : { // See https://caniuse.com/mdn-javascript_builtins_bigint64array #include "polyfill/bigint64array.js" #endif + +#if MIN_CHROME_VERSION < 40 || MIN_FIREFOX_VERSION < 39 || MIN_SAFARI_VERSION < 103000 +// See https://caniuse.com/fetch +#include "polyfill/fetch.js" +#endif #endif // POLYFILL #if MODULARIZE diff --git a/src/web_or_worker_shell_read.js b/src/web_or_worker_shell_read.js index 3ae5842e8f298..7bdf9868761cd 100644 --- a/src/web_or_worker_shell_read.js +++ b/src/web_or_worker_shell_read.js @@ -22,17 +22,12 @@ } readAsync = (url, onload, onerror) => { - var xhr = new XMLHttpRequest(); - xhr.open('GET', url, true); - xhr.responseType = 'arraybuffer'; - xhr.onload = () => { - if (xhr.status == 200 || (xhr.status == 0 && xhr.response)) { // file URLs can return 0 - onload(xhr.response); - return; + fetch(url) + .then(response => { + if(response.ok) { + return response.arrayBuffer(); } - onerror(); - }; - xhr.onerror = onerror; - xhr.send(null); - } - + return Promise.reject(new Error(response.statusText + ' : ' + response.url)); + }) + .then(onload, onerror) + }; diff --git a/test/common.py b/test/common.py index c19be77f01e00..a342524e42d30 100644 --- a/test/common.py +++ b/test/common.py @@ -2270,7 +2270,7 @@ def compile_btest(self, filename, args, reporting=Reporting.FULL): if reporting == Reporting.FULL: # If C reporting (i.e. the REPORT_RESULT macro) is required we # also include report_result.c and force-include report_result.h - self.run_process([EMCC, '-c', '-I' + TEST_ROOT, + self.run_process([EMCC, '-c', '-fPIC', '-I' + TEST_ROOT, '-DEMTEST_PORT_NUMBER=%d' % self.port, test_file('report_result.c')] + self.get_emcc_args(compile_only=True)) args += ['report_result.o', '-include', test_file('report_result.h')] diff --git a/test/other/test_unoptimized_code_size_no_asserts.js.size b/test/other/test_unoptimized_code_size_no_asserts.js.size index 587cc797c2641..6a0a9c8c4c3ff 100644 --- a/test/other/test_unoptimized_code_size_no_asserts.js.size +++ b/test/other/test_unoptimized_code_size_no_asserts.js.size @@ -1 +1 @@ -31420 +31418 diff --git a/test/test_browser.py b/test/test_browser.py index 957883dbe17cb..2164e15e821c9 100644 --- a/test/test_browser.py +++ b/test/test_browser.py @@ -699,7 +699,8 @@ def setup(assetLocalization):


+ {{{ SCRIPT }}} + + ''') + + def test(args, expect_fail): + self.compile_btest('main.cpp', args + ['--preload-file', path, '--shell-file', 'on_window_error_shell.html', '-o', 'a.out.html']) + js = read_file('a.out.js') + if expect_fail: + create_file('a.out.js', 'fetch = undefined;\n' + js) + return self.run_browser('a.out.html', '/report_result?TypeError: fetch is not a function') + else: + return self.run_browser('a.out.html', '/report_result?1') + + test([], expect_fail=True) + test(['-sLEGACY_VM_SUPPORT'], expect_fail=False) + test(['-sLEGACY_VM_SUPPORT', '-sNO_POLYFILL'], expect_fail=True) + + def test_fetch_polyfill_shared_lib(self): + create_file('library.c', r''' + #include + int library_func() { + return 42; + } + ''') + create_file('main.c', r''' + #include + #include + #include + #include + int main() { + int found = EM_ASM_INT( + return preloadedWasm['/library.so'] !== undefined; + ); + void *lib_handle = dlopen("/library.so", RTLD_NOW); + typedef int (*voidfunc)(); + voidfunc x = (voidfunc)dlsym(lib_handle, "library_func"); + printf("Got val: %d\n", x()); + assert(x() == 42); + return 0; + } + ''') + create_file('on_window_error_shell.html', r''' + +
+

+ + {{{ SCRIPT }}} + + ''') + + self.run_process([EMCC, 'library.c', '-sSIDE_MODULE', '-O2', '-o', 'library.so']) + + def test(args, expect_fail): + self.compile_btest('main.c', ['library.so', '-sMAIN_MODULE', '--shell-file', 'on_window_error_shell.html', '-o', 'a.out.html']) + js = read_file('a.out.js') + if expect_fail: + create_file('a.out.js', 'fetch = undefined;\n' + js) + return self.run_browser('a.out.html', '/report_result?abort:TypeError') + else: + return self.run_browser('a.out.html', '/report_result?1') + + test([], expect_fail=True) + test(['-sLEGACY_VM_SUPPORT'], expect_fail=False) + test(['-sLEGACY_VM_SUPPORT', '-sNO_POLYFILL'], expect_fail=True) + class emrun(RunnerCore): def test_emrun_info(self): diff --git a/tools/file_packager.py b/tools/file_packager.py index d435a33209cab..e247f5666b58e 100755 --- a/tools/file_packager.py +++ b/tools/file_packager.py @@ -359,7 +359,7 @@ def generate_object_file(data_files): def main(): if len(sys.argv) == 1: - err('''Usage: file_packager TARGET [--preload A [B..]] [--embed C [D..]] [--exclude E [F..]]] [--js-output=OUTPUT.js] [--no-force] [--use-preload-cache] [--indexedDB-name=EM_PRELOAD_CACHE] [--separate-metadata] [--lz4] [--use-preload-plugins] + err('''Usage: file_packager TARGET [--preload A [B..]] [--embed C [D..]] [--exclude E [F..]]] [--js-output=OUTPUT.js] [--no-force] [--use-preload-cache] [--indexedDB-name=EM_PRELOAD_CACHE] [--separate-metadata] [--lz4] [--use-preload-plugins] [--no-node] See the source for more details.''') return 1 @@ -775,7 +775,7 @@ def generate_js(data_target, data_files, metadata): } var REMOTE_PACKAGE_NAME = Module['locateFile'] ? Module['locateFile'](REMOTE_PACKAGE_BASE, '') : REMOTE_PACKAGE_BASE;\n''' % (js_manipulation.escape_for_js_string(data_target), js_manipulation.escape_for_js_string(remote_package_name)) metadata['remote_package_size'] = remote_package_size - ret += '''var REMOTE_PACKAGE_SIZE = metadata['remote_package_size'];\n''' + ret += ''' var REMOTE_PACKAGE_SIZE = metadata['remote_package_size'];\n''' if options.use_preload_cache: # Set the id to a hash of the preloaded data, so that caches survive over multiple builds @@ -965,59 +965,74 @@ def generate_js(data_target, data_files, metadata): }); return; }'''.strip() + ret += ''' function fetchRemotePackage(packageName, packageSize, callback, errback) { %(node_support_code)s - var xhr = new XMLHttpRequest(); - xhr.open('GET', packageName, true); - xhr.responseType = 'arraybuffer'; - xhr.onprogress = function(event) { - var url = packageName; - var size = packageSize; - if (event.total) size = event.total; - if (event.loaded) { - if (!xhr.addedTotal) { - xhr.addedTotal = true; - if (!Module.dataFileDownloads) Module.dataFileDownloads = {}; - Module.dataFileDownloads[url] = { - loaded: event.loaded, - total: size - }; - } else { - Module.dataFileDownloads[url].loaded = event.loaded; - } - var total = 0; - var loaded = 0; - var num = 0; - for (var download in Module.dataFileDownloads) { - var data = Module.dataFileDownloads[download]; - total += data.total; - loaded += data.loaded; - num++; - } - total = Math.ceil(total * Module.expectedDataFileDownloads/num); - if (Module['setStatus']) Module['setStatus'](`Downloading data... (${loaded}/${total})`); - } else if (!Module.dataFileDownloads) { - if (Module['setStatus']) Module['setStatus']('Downloading data...'); + Module.dataFileDownloads = Module.dataFileDownloads || {}; + const url = packageName; + fetch(url) + .catch(cause => Promise.reject(new Error('Network Error : ' + url, {cause}))) // If fetch fails, rewrite the error to include the failing URL & the cause. + .then(response => { + + let loaded = 0; + + if (!response.ok) { + return Promise.reject(new Error(response.statusText + ' : ' + response.url)); } - }; - xhr.onerror = function(event) { - throw new Error("NetworkError for: " + packageName); - } - xhr.onload = function(event) { - if (xhr.status == 200 || xhr.status == 304 || xhr.status == 206 || (xhr.status == 0 && xhr.response)) { // file URLs can return 0 - var packageData = xhr.response; - callback(packageData); - } else { - throw new Error(xhr.statusText + " : " + xhr.responseURL); + + // If we're using the polyfill, readers won't be available... + if(!response.body && response.arrayBuffer) { + response.arrayBuffer().then(buffer => callback(buffer)); + return; } - }; - xhr.send(null); - }; + const reader = response.body.getReader(); + const headers = response.headers; + + const total = headers.get('Content-Length') ?? packageSize; + const chunks = []; + + const iterate = () => reader.read().then(handleChunk).catch(cause => { + return Promise.reject(new Error(response.statusText + ' : ' + response.url, {cause})); + }); + + const handleChunk = ({done, value}) => { + if (!done) { + chunks.push(value); + loaded += value.length; + Module.dataFileDownloads[url] = Module.dataFileDownloads[url] ?? {}; + Module.dataFileDownloads[url].loaded = loaded; + Module.dataFileDownloads[url].total = total; + + if (total) { + if (Module['setStatus']) Module['setStatus'](`Downloading data... (${loaded}/${total})`); + } + else { + if (Module['setStatus']) Module['setStatus']('Downloading data...'); + } + return iterate(); + } + else { + const size = chunks.map(c => c.length).reduce((a, b) => a + b, 0); + let index = 0; + const packageData = new Uint8Array(size); + for(const chunk of chunks) { + packageData.set(chunk, index); + index += chunk.length; + } + + callback(packageData.buffer); + } + }; + return iterate(); + }); + };\n''' % {'node_support_code': node_support_code} + + ret += ''' function handleError(error) { console.error('package error:', error); - };\n''' % {'node_support_code': node_support_code} + };\n''' code += ''' function processPackageData(arrayBuffer) { @@ -1113,15 +1128,14 @@ def generate_js(data_target, data_files, metadata): function runMetaWithFS() { Module['addRunDependency']('%(metadata_file)s'); var REMOTE_METADATA_NAME = Module['locateFile'] ? Module['locateFile']('%(metadata_file)s', '') : '%(metadata_file)s'; - var xhr = new XMLHttpRequest(); - xhr.onreadystatechange = function() { - if (xhr.readyState === 4 && xhr.status === 200) { - loadPackage(JSON.parse(xhr.responseText)); - } - } - xhr.open('GET', REMOTE_METADATA_NAME, true); - xhr.overrideMimeType('application/json'); - xhr.send(null); + fetch(REMOTE_METADATA_NAME) + .then(response => { + if(response.ok) { + return response.json(); + } + return Promise.reject(new Error(response.statusText + ' : ' + response.url)); + }) + .then(loadPackage); } if (Module['calledRun']) { @@ -1130,7 +1144,6 @@ def generate_js(data_target, data_files, metadata): if (!Module['preRun']) Module['preRun'] = []; Module["preRun"].push(runMetaWithFS); }\n''' % {'metadata_file': os.path.basename(options.jsoutput + '.metadata')} - else: _metadata_template = ''' }