diff --git a/.codesandbox/.env.example b/.codesandbox/.env.example new file mode 100644 index 00000000..dc7ae985 --- /dev/null +++ b/.codesandbox/.env.example @@ -0,0 +1,13 @@ +# MongoDB Connection +MONGO_URL=mongodb://localhost/auth + +# Server Configuration +PORT=8080 +NODE_ENV=development + +# TMDB API +VITE_TMDB_API_KEY=your_tmdb_api_key_here +TMDB_BEARER_TOKEN=your_tmdb_bearer_token_here + +# Session Security +SESSION_SECRET=generate_a_strong_random_32_character_secret_here \ No newline at end of file diff --git a/.codesandbox/App.js b/.codesandbox/App.js new file mode 100644 index 00000000..dffff049 --- /dev/null +++ b/.codesandbox/App.js @@ -0,0 +1,73 @@ +var express = require('express'); +var passport = require('passport'); +var FacebookStrategy = require('passport-facebook'); +var session = require('express-session'); +var passport = require('passport'); +var db = require('../db'); +var router = express.Router(); +passport.use(new FacebookStrategy({ + clientID: process.env['FACEBOOK_CLIENT_ID'], + clientSecret: process.env['FACEBOOK_CLIENT_SECRET'], + callbackURL: '/oauth2/redirect/facebook', + state: true +}, function verify(accessToken, refreshToken, profile, cb) { + db.get('SELECT * FROM federated_credentials WHERE provider = ? AND subject = ?', [ + 'https://www.facebook.com', + profile.id + ], function(err, row) { + if (err) { return cb(err); } + if (!row) { + db.run('INSERT INTO users (name) VALUES (?)', [ + profile.displayName + ], function(err) { + if (err) { return cb(err); } + + var id = this.lastID; + db.run('INSERT INTO federated_credentials (user_id, provider, subject) VALUES (?, ?, ?)', [ + id, + 'https://www.facebook.com', + profile.id + ], function(err) { + if (err) { return cb(err); } + var user = { + id: id, + name: profile.displayName + }; + return cb(null, user); + }); + }); + } else { + db.get('SELECT * FROM users WHERE id = ?', [ row.user_id ], function(err, row) { + if (err) { return cb(err); } + if (!row) { return cb(null, false); } + return cb(null, row); + }); + } + }); + passport.serializeUser(function(user, cb) { + process.nextTick(function() { + cb(null, { id: user.id, username: user.username, name: user.name }); + }); + }); + + passport.deserializeUser(function(user, cb) { + process.nextTick(function() { + return cb(null, user); + }); + }); +})); +router.get('/login', function(req, res, next) { + res.render('login'); +}); +router.get('/login/federated/facebook', passport.authenticate('facebook')); +router.get('/oauth2/redirect/facebook', passport.authenticate('facebook', { + successRedirect: '/', + failureRedirect: '/login' +})); +router.post('/logout', function(req, res, next) { + req.logout(function(err) { + if (err) { return next(err); } + res.redirect('/'); + }); +}); +module.exports = router; \ No newline at end of file diff --git a/.codesandbox/tasks.json b/.codesandbox/tasks.json new file mode 100644 index 00000000..8452ec4c --- /dev/null +++ b/.codesandbox/tasks.json @@ -0,0 +1,23 @@ +{ + // These tasks will run in order when initializing your CodeSandbox project. + "setupTasks": [ + { + "command": "pnpm install", + "name": "Installing Dependencies" + } + ], + + // These tasks can be run from CodeSandbox. Running one will open a log in the app. + "tasks": { + "dev": { + "name": "dev", + "command": "pnpm dev", + "runAtStart": true + }, + "build": { + "name": "build", + "command": "pnpm build", + + + + diff --git a/.codesandbox/touch routes/auth.js b/.codesandbox/touch routes/auth.js new file mode 100644 index 00000000..970fdffe --- /dev/null +++ b/.codesandbox/touch routes/auth.js @@ -0,0 +1,9 @@ +var express = require("express"); + +var router = express.Router(); + +router.get("/login", function (req, res, next) { + res.render("login"); +}); + +module.exports = router; diff --git a/.github/workflows/crda.yml b/.github/workflows/crda.yml new file mode 100644 index 00000000..c96e9b39 --- /dev/null +++ b/.github/workflows/crda.yml @@ -0,0 +1,126 @@ +# This workflow uses actions that are not certified by GitHub. +# They are provided by a third-party and are governed by +# separate terms of service, privacy policy, and support +# documentation. + +# This workflow performs a static analysis of your source code using +# Red Hat CodeReady Dependency Analytics. + +# Scans are triggered: +# 1. On every push to default and protected branches +# 2. On every Pull Request targeting the default branch +# 3. On a weekly schedule +# 4. Manually, on demand, via the "workflow_dispatch" event + +# 💁 The CRDA Starter workflow will: +# - Checkout your repository +# - Setup the required tool stack +# - Install the CRDA command line tool +# - Auto detect the manifest file and install the project's dependencies +# - Perform the security scan using CRDA +# - Upload the SARIF result to the GitHub Code Scanning which can be viewed under the security tab +# - Optionally upload the SARIF file as an artifact for the future reference + +# â„šī¸ Configure your repository and the workflow with the following steps: +# 1. Setup the tool stack based on the project's requirement. +# Refer to: https://github.com/redhat-actions/crda/#1-set-up-the-tool-stack +# 2. (Optional) CRDA action attempt to detect the language and install the +# required dependencies for your project. If your project doesn't aligns +# with the default dependency installation command mentioned here +# https://github.com/redhat-actions/crda/#3-installing-dependencies. +# Use the required inputs to setup the same +# 3. (Optional) CRDA action attempts to detect the manifest file if it is +# present in the root of the project and named as per the default mentioned +# here https://github.com/redhat-actions/crda/#3-installing-dependencies. +# If it deviates from the default, use the required inputs to setup the same +# 4. Setup Authentication - Create the CRDA_KEY or SNYK_TOKEN. +# Refer to: https://github.com/redhat-actions/crda/#4-set-up-authentication +# 5. (Optional) Upload SARIF file as an Artifact to download and view +# 6. Commit and push the workflow file to your default branch to trigger a workflow run. + +# 👋 Visit our GitHub organization at https://github.com/redhat-actions/ to see our actions and provide feedback. + +name: CRDA Scan + +# Controls when the workflow will run +on: + # TODO: Customize trigger events based on your DevSecOps processes + # + # This workflow is made to run with OpenShift starter workflow + # https://github.com/actions/starter-workflows/blob/main/deployments/openshift.yml + # However, if you want to run this workflow as a standalone workflow, please + # uncomment the 'push' trigger below and configure it based on your requirements. + # + workflow_call: + secrets: + CRDA_KEY: + required: false + SNYK_TOKEN: + required: false + workflow_dispatch: + + # push: + # branches: [ "main" ] + + # pull_request_target is used to securely share secret to the PR's workflow run. + # For more info visit: https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target + pull_request_target: + branches: [ "main" ] + types: [ assigned, opened, synchronize, reopened, labeled, edited ] + +permissions: + contents: read + +jobs: + crda-scan: + permissions: + contents: read # for actions/checkout to fetch code + security-events: write # for redhat-actions/crda to upload SARIF results + name: Scan project vulnerabilities with CRDA + runs-on: ubuntu-latest + steps: + + - name: Check out repository + uses: actions/checkout@v4 + + # ******************************************************************* + # Required: Instructions to setup project + # 1. Setup Go, Java, Node.js or Python depending on your project type + # 2. Setup Actions are listed below, choose one from them: + # - Go: https://github.com/actions/setup-go + # - Java: https://github.com/actions/setup-java + # - Node.js: https://github.com/actions/setup-node + # - Python: https://github.com/actions/setup-python + # + # Example: + # - name: Setup Node + # uses: actions/setup-node@v4 + # with: + # node-version: '20' + + # https://github.com/redhat-actions/openshift-tools-installer/blob/main/README.md + - name: Install CRDA CLI + uses: redhat-actions/openshift-tools-installer@v1 + with: + source: github + github_pat: ${{ github.token }} + # Choose the desired version of the CRDA CLI + crda: "latest" + + ###################################################################################### + # https://github.com/redhat-actions/crda/blob/main/README.md + # + # By default, CRDA will detect the manifest file and install the required dependencies + # using the standard command for the project type. + # If your project doesn't aligns with the defaults mentioned in this action, you will + # need to set few inputs that are described here: + # https://github.com/redhat-actions/crda/blob/main/README.md#3-installing-dependencies + # Visit https://github.com/redhat-actions/crda/#4-set-up-authentication to understand + # process to get a SNYK_TOKEN or a CRDA_KEY + - name: CRDA Scan + id: scan + uses: redhat-actions/crda@v1 + with: + crda_key: ${{ secrets.CRDA_KEY }} # Either use crda_key or snyk_token + # snyk_token: ${{ secrets.SNYK_TOKEN }} + # upload_artifact: false # Set this to false to skip artifact upload diff --git a/.gitignore b/.gitignore index b02a1ff7..3b31ca2d 100644 --- a/.gitignore +++ b/.gitignore @@ -23,3 +23,12 @@ package-lock.json *.njsproj *.sln *.sw? +.env + +# Local Netlify folder +.netlify +.env + +# Database +*.db +sessions.db \ No newline at end of file diff --git a/.idx/dev.nix b/.idx/dev.nix new file mode 100644 index 00000000..ab83c388 --- /dev/null +++ b/.idx/dev.nix @@ -0,0 +1,55 @@ +# To learn more about how to use Nix to configure your environment +# see: https://firebase.google.com/docs/studio/customize-workspace +{ pkgs, ... }: { + # Which nixpkgs channel to use. + channel = "stable-24.05"; # or "unstable" + + # Use https://search.nixos.org/packages to find packages + packages = [ + # pkgs.go + # pkgs.python311 + # pkgs.python311Packages.pip + # pkgs.nodejs_20 + # pkgs.nodePackages.nodemon + ]; + + # Sets environment variables in the workspace + env = {}; + idx = { + # Search for the extensions you want on https://open-vsx.org/ and use "publisher.id" + extensions = [ + # "vscodevim.vim" + ]; + + # Enable previews + previews = { + enable = true; + previews = { + # web = { + # # Example: run "npm run dev" with PORT set to IDX's defined port for previews, + # # and show it in IDX's web preview panel + # command = ["npm" "run" "dev"]; + # manager = "web"; + # env = { + # # Environment variables to set for your server + # PORT = "$PORT"; + # }; + # }; + }; + }; + + # Workspace lifecycle hooks + workspace = { + # Runs when a workspace is first created + onCreate = { + # Example: install JS dependencies from NPM + # npm-install = "npm install"; + }; + # Runs when the workspace is (re)started + onStart = { + # Example: start a background task to watch and re-build backend code + # watch-backend = "npm run watch-backend"; + }; + }; + }; +} diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 00000000..034e8480 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,21 @@ +# Security Policy + +## Supported Versions + +Use this section to tell people about which versions of your project are +currently being supported with security updates. + +| Version | Supported | +| ------- | ------------------ | +| 5.1.x | :white_check_mark: | +| 5.0.x | :x: | +| 4.0.x | :white_check_mark: | +| < 4.0 | :x: | + +## Reporting a Vulnerability + +Use this section to tell people how to report a vulnerability. + +Tell them where to go, how often they can expect to get an update on a +reported vulnerability, what to expect if the vulnerability is accepted or +declined, etc. diff --git a/_redirects.md b/_redirects.md new file mode 100644 index 00000000..7bcfb1f9 --- /dev/null +++ b/_redirects.md @@ -0,0 +1,9 @@ + +/home / +/blog/my-post.php /blog/my-post +/news /blog +/cuties https://www.petsofnetlify.com +/authors/c%C3%A9line /authors/about-c%C3%A9line +/blog/my-old-title /blog/my-new-title +/blog/my-old-title /blog/an-even-better-title +/* index.html 200 \ No newline at end of file diff --git a/app.js b/app.js new file mode 100644 index 00000000..c741115a --- /dev/null +++ b/app.js @@ -0,0 +1,12 @@ +require("dotenv").config(); +// ... other imports ... + +app.use( + session({ + secret: + process.env.SESSION_SECRET || crypto.randomBytes(32).toString("hex"), + resave: false, + saveUninitialized: false, + store: new SQLiteStore({ db: "sessions.db", dir: "./var/db" }), + }) +); diff --git a/README.md b/backend/README.md similarity index 100% rename from README.md rename to backend/README.md diff --git a/backend/db.js b/backend/db.js new file mode 100644 index 00000000..de6def9d --- /dev/null +++ b/backend/db.js @@ -0,0 +1,3537 @@ +import mongoose from 'mongoose'; +const { Schema } = mongoose; +const blogSchema = new Schema({ + title: String, // String is shorthand for {type: String} + author: String, + body: String, + comments: [{ body: String, date: Date }], + date: { type: Date, default: Date.now }, + hidden: Boolean, + meta: { + votes: Number, + favs: Number + } +}); +// getting-started.js +const mongoose = require('mongoose'); + +main().catch(err => console.log(err)); + +async function main() { + await mongoose.connect('mongodb://127.0.0.1:27017/test'); + + // use `await mongoose.connect('mongodb://user:password@127.0.0.1:27017/test');` if your database has auth enabled +} +const kittySchema = new mongoose.Schema({ + name: String + }); + const Kitten = mongoose.model('Kitten', kittySchema); + const silence = new Kitten({ name: 'Silence' }); +console.log(silence.name); // 'Silence' +// NOTE: methods must be added to the schema before compiling it with mongoose.model() +kittySchema.methods.speak = function speak() { + const greeting = this.name + ? 'Meow name is ' + this.name + : 'I don\'t have a name'; + console.log(greeting); + }; + + const Kitten = mongoose.model('Kitten', kittySchema); + const fluffy = new Kitten({ name: 'fluffy' }); +fluffy.speak(); // "Meow name is fluffy" +await fluffy.save(); +fluffy.speak(); +const kittens = await Kitten.find(); +console.log(kittens); +await Kitten.find({ name: /^fluff/ }); + +const Blog = mongoose.model('Blog', blogSchema); +// ready to go! +const schema = new Schema(); + +schema.path('_id'); // ObjectId { ... } +const Model = mongoose.model('Test', schema); + +const doc = new Model(); +doc._id instanceof mongoose.Types.ObjectId; // true +const schema = new Schema({ + _id: Number // <-- overwrite Mongoose's default `_id` + }); + const Model = mongoose.model('Test', schema); + + const doc = new Model(); + await doc.save(); // Throws "document must have an _id before saving" + + doc._id = 1; + await doc.save(); // works + const nestedSchema = new Schema( + { name: String }, + { _id: false } // <-- disable `_id` + ); + const schema = new Schema({ + subdoc: nestedSchema, + docArray: [nestedSchema] + }); + const Test = mongoose.model('Test', schema); + + // Neither `subdoc` nor `docArray.0` will have an `_id` + await Test.create({ + subdoc: { name: 'test 1' }, + docArray: [{ name: 'test 2' }] + }); + // define a schema +const animalSchema = new Schema({ name: String, type: String }, + { + // Assign a function to the "methods" object of our animalSchema through schema options. + // By following this approach, there is no need to create a separate TS type to define the type of the instance functions. + methods: { + findSimilarTypes(cb) { + return mongoose.model('Animal').find({ type: this.type }, cb); + } + } + }); + const Animal = mongoose.model('Animal', animalSchema); +const dog = new Animal({ type: 'dog' }); + +dog.findSimilarTypes((err, dogs) => { + console.log(dogs); // woof +}); +// define a schema +const animalSchema = new Schema({ name: String, type: String }, + { + // Assign a function to the "statics" object of our animalSchema through schema options. + // By following this approach, there is no need to create a separate TS type to define the type of the statics functions. + statics: { + findByName(name) { + return this.find({ name: new RegExp(name, 'i') }); + } + } + }); + + // Or, Assign a function to the "statics" object of our animalSchema + animalSchema.statics.findByName = function(name) { + return this.find({ name: new RegExp(name, 'i') }); + }; + // Or, equivalently, you can call `animalSchema.static()`. + animalSchema.static('findByBreed', function(breed) { return this.find({ breed }); }); + + const Animal = mongoose.model('Animal', animalSchema); + let animals = await Animal.findByName('fido'); + animals = animals.concat(await Animal.findByBreed('Poodle')); + // define a schema +const animalSchema = new Schema({ name: String, type: String }, + { + // Assign a function to the "query" object of our animalSchema through schema options. + // By following this approach, there is no need to create a separate TS type to define the type of the query functions. + query: { + byName(name) { + return this.where({ name: new RegExp(name, 'i') }); + } + } + }); + + // Or, Assign a function to the "query" object of our animalSchema + animalSchema.query.byName = function(name) { + return this.where({ name: new RegExp(name, 'i') }); + }; + + const Animal = mongoose.model('Animal', animalSchema); + + Animal.find().byName('fido').exec((err, animals) => { + console.log(animals); + }); + + Animal.findOne().byName('fido').exec((err, animal) => { + console.log(animal); + }); + Indexes + const animalSchema = new Schema({ + name: String, + type: String, + tags: { type: [String], index: true } // path level + }); + + animalSchema.index({ name: 1, type: -1 }); // schema level + mongoose.connect('mongodb://user:pass@127.0.0.1:port/database', { autoIndex: false }); +// or +mongoose.createConnection('mongodb://user:pass@127.0.0.1:port/database', { autoIndex: false }); +// or +mongoose.set('autoIndex', false); +// or +animalSchema.set('autoIndex', false); +// or +new Schema({ /* ... */ }, { autoIndex: false }); +// Will cause an error because mongodb has an _id index by default that +// is not sparse +animalSchema.index({ _id: 1 }, { sparse: true }); +const Animal = mongoose.model('Animal', animalSchema); + +Animal.on('index', error => { + // "_id index cannot be sparse" + console.log(error.message); +}); +// define a schema +const personSchema = new Schema({ + name: { + first: String, + last: String + } + }); + + // compile our model + const Person = mongoose.model('Person', personSchema); + + // create a document + const axl = new Person({ + name: { first: 'Axl', last: 'Rose' } + }); + console.log(axl.name.first + ' ' + axl.name.last); // Axl Rose + // That can be done either by adding it to schema options: +const personSchema = new Schema({ + name: { + first: String, + last: String + } + }, { + virtuals: { + fullName: { + get() { + return this.name.first + ' ' + this.name.last; + } + } + } + }); + + // Or by using the virtual method as following: + personSchema.virtual('fullName').get(function() { + return this.name.first + ' ' + this.name.last; + }); + console.log(axl.fullName); // Axl Rose + // Convert `doc` to a POJO, with virtuals attached +doc.toObject({ virtuals: true }); + +// Equivalent: +doc.toJSON({ virtuals: true }); +// Explicitly add virtuals to `JSON.stringify()` output +JSON.stringify(doc.toObject({ virtuals: true })); + +// Or, to automatically attach virtuals to `JSON.stringify()` output: +const personSchema = new Schema({ + name: { + first: String, + last: String + } +}, { + toJSON: { virtuals: true } // <-- include virtuals in `JSON.stringify()` +}); +// Again that can be done either by adding it to schema options: +const personSchema = new Schema({ + name: { + first: String, + last: String + } + }, { + virtuals: { + fullName: { + get() { + return this.name.first + ' ' + this.name.last; + }, + set(v) { + this.name.first = v.substr(0, v.indexOf(' ')); + this.name.last = v.substr(v.indexOf(' ') + 1); + } + } + } + }); + + // Or by using the virtual method as following: + personSchema.virtual('fullName'). + get(function() { + return this.name.first + ' ' + this.name.last; + }). + set(function(v) { + this.name.first = v.substr(0, v.indexOf(' ')); + this.name.last = v.substr(v.indexOf(' ') + 1); + }); + + axl.fullName = 'William Rose'; // Now `axl.name.first` is "William" + const personSchema = new Schema({ + n: { + type: String, + // Now accessing `name` will get you the value of `n`, and setting `name` will set the value of `n` + alias: 'name' + } + }); + + // Setting `name` will propagate to `n` + const person = new Person({ name: 'Val' }); + console.log(person); // { n: 'Val' } + console.log(person.toObject({ virtuals: true })); // { n: 'Val', name: 'Val' } + console.log(person.name); // "Val" + + person.name = 'Not Val'; + console.log(person); // { n: 'Not Val' } + const childSchema = new Schema({ + n: { + type: String, + alias: 'name' + } + }, { _id: false }); + + const parentSchema = new Schema({ + // If in a child schema, alias doesn't need to include the full nested path + c: childSchema, + name: { + f: { + type: String, + // Alias needs to include the full nested path if declared inline + alias: 'name.first' + } + } + }); + new Schema({ /* ... */ }, options); + +// or + +const schema = new Schema({ /* ... */ }); +schema.set(option, value); +const schema = new Schema({ /* ... */ }, { autoIndex: false }); +const Clock = mongoose.model('Clock', schema); +Clock.ensureIndexes(callback); +const schema = new Schema({ name: String }, { + autoCreate: false, + capped: { size: 1024 } + }); + const Test = mongoose.model('Test', schema); + + // No-op if collection already exists, even if the collection is not capped. + // This means that `capped` won't be applied if the 'tests' collection already exists. + await Test.createCollection(); + const schema = new Schema({ /* ... */ }, { bufferCommands: false }); + mongoose.set('bufferCommands', true); +// Schema option below overrides the above, if the schema option is set. +const schema = new Schema({ /* ... */ }, { bufferCommands: false }); +// If an operation is buffered for more than 1 second, throw an error. +const schema = new Schema({ /* ... */ }, { bufferTimeoutMS: 1000 }); +new Schema({ /* ... */ }, { capped: 1024 }); +new Schema({ /* ... */ }, { capped: { size: 1024, max: 1000, autoIndexId: true } }); +const dataSchema = new Schema({ /* ... */ }, { collection: 'data' }); +const baseSchema = new Schema({}, { discriminatorKey: 'type' }); +const BaseModel = mongoose.model('Test', baseSchema); + +const personSchema = new Schema({ name: String }); +const PersonModel = BaseModel.discriminator('Person', personSchema); + +const doc = new PersonModel({ name: 'James T. Kirk' }); +// Without `discriminatorKey`, Mongoose would store the discriminator +// key in `__t` instead of `type` +doc.type; // 'Person' +const childSchema1 = Schema({ + name: { type: String, index: true } + }); + + const childSchema2 = Schema({ + name: { type: String, index: true } + }, { excludeIndexes: true }); + + // Mongoose will create an index on `child1.name`, but **not** `child2.name`, because `excludeIndexes` + // is true on `childSchema2` + const User = new Schema({ + name: { type: String, index: true }, + child1: childSchema1, + child2: childSchema2 + }); + // default behavior +const schema = new Schema({ name: String }); +const Page = mongoose.model('Page', schema); +const p = new Page({ name: 'mongodb.org' }); +console.log(p.id); // '50341373e894ad16347efe01' + +// disabled id +const schema = new Schema({ name: String }, { id: false }); +const Page = mongoose.model('Page', schema); +const p = new Page({ name: 'mongodb.org' }); +console.log(p.id); // undefined +// default behavior +const schema = new Schema({ name: String }); +const Page = mongoose.model('Page', schema); +const p = new Page({ name: 'mongodb.org' }); +console.log(p); // { _id: '50341373e894ad16347efe01', name: 'mongodb.org' } + +// disabled _id +const childSchema = new Schema({ name: String }, { _id: false }); +const parentSchema = new Schema({ children: [childSchema] }); + +const Model = mongoose.model('Model', parentSchema); + +Model.create({ children: [{ name: 'Luke' }] }, (error, doc) => { + // doc.children[0]._id will be undefined +}); +const schema = new Schema({ name: String, inventory: {} }); +const Character = mongoose.model('Character', schema); + +// will store `inventory` field if it is not empty +const frodo = new Character({ name: 'Frodo', inventory: { ringOfPower: 1 } }); +await frodo.save(); +let doc = await Character.findOne({ name: 'Frodo' }).lean(); +doc.inventory; // { ringOfPower: 1 } + +// will not store `inventory` field if it is empty +const sam = new Character({ name: 'Sam', inventory: {} }); +await sam.save(); +doc = await Character.findOne({ name: 'Sam' }).lean(); +doc.inventory; // undefined +const schema = new Schema({ name: String, inventory: {} }, { minimize: false }); +const Character = mongoose.model('Character', schema); + +// will store `inventory` if empty +const sam = new Character({ name: 'Sam', inventory: {} }); +await sam.save(); +doc = await Character.findOne({ name: 'Sam' }).lean(); +doc.inventory; // {} +const sam = new Character({ name: 'Sam', inventory: {} }); +sam.$isEmpty('inventory'); // true + +sam.inventory.barrowBlade = 1; +sam.$isEmpty('inventory'); // false +const schema = new Schema({ /* ... */ }, { read: 'primary' }); // also aliased as 'p' +const schema = new Schema({ /* ... */ }, { read: 'primaryPreferred' }); // aliased as 'pp' +const schema = new Schema({ /* ... */ }, { read: 'secondary' }); // aliased as 's' +const schema = new Schema({ /* ... */ }, { read: 'secondaryPreferred' }); // aliased as 'sp' +const schema = new Schema({ /* ... */ }, { read: 'nearest' }); // aliased as 'n' +// pings the replset members periodically to track network latency +const options = { replset: { strategy: 'ping' } }; +mongoose.connect(uri, options); + +const schema = new Schema({ /* ... */ }, { read: ['nearest', { disk: 'ssd' }] }); +mongoose.model('JellyBean', schema); +const schema = new Schema({ name: String }, { + writeConcern: { + w: 'majority', + j: true, + wtimeout: 1000 + } + }); + new Schema({ /* ... */ }, { shardKey: { tag: 1, name: 1 } }); + const thingSchema = new Schema({ /* ... */ }) +const Thing = mongoose.model('Thing', thingSchema); +const thing = new Thing({ iAmNotInTheSchema: true }); +thing.save(); // iAmNotInTheSchema is not saved to the db + +// set to false.. +const thingSchema = new Schema({ /* ... */ }, { strict: false }); +const thing = new Thing({ iAmNotInTheSchema: true }); +thing.save(); // iAmNotInTheSchema is now saved to the db!! +const thingSchema = new Schema({ /* ... */ }); +const Thing = mongoose.model('Thing', thingSchema); +const thing = new Thing; +thing.set('iAmNotInTheSchema', true); +thing.save(); // iAmNotInTheSchema is not saved to the db +const Thing = mongoose.model('Thing'); +const thing = new Thing(doc, true); // enables strict mode +const thing = new Thing(doc, false); // disables strict mode +const thingSchema = new Schema({ /* ... */ }); +const Thing = mongoose.model('Thing', thingSchema); +const thing = new Thing; +thing.iAmNotInTheSchema = true; +thing.save(); // iAmNotInTheSchema is never saved to the db +const mySchema = new Schema({ field: Number }, { strict: true }); +const MyModel = mongoose.model('Test', mySchema); +// Mongoose will filter out `notInSchema: 1` because `strict: true`, meaning this query will return +// _all_ documents in the 'tests' collection +MyModel.find({ notInSchema: 1 }); +// Mongoose will strip out `notInSchema` from the update if `strict` is +// not `false` +MyModel.updateMany({}, { $set: { notInSchema: 1 } }); +const mySchema = new Schema({ field: Number }, { + strict: true, + strictQuery: false // Turn off strict mode for query filters + }); + const MyModel = mongoose.model('Test', mySchema); + // Mongoose will not strip out `notInSchema: 1` because `strictQuery` is false + MyModel.find({ notInSchema: 1 }); + // Do this instead: +const docs = await MyModel.find({ name: req.query.name, age: req.query.age }).setOptions({ sanitizeFilter: true }); +// Set `strictQuery` to `true` to omit unknown fields in queries. +mongoose.set('strictQuery', true); +const schema = new Schema({ name: String }); +schema.path('name').get(function(v) { + return v + ' is my name'; +}); +schema.set('toJSON', { getters: true, virtuals: false }); +const M = mongoose.model('Person', schema); +const m = new M({ name: 'Max Headroom' }); +console.log(m.toObject()); // { _id: 504e0cd7dd992d9be2f20b6f, name: 'Max Headroom' } +console.log(m.toJSON()); // { _id: 504e0cd7dd992d9be2f20b6f, name: 'Max Headroom is my name' } +// since we know toJSON is called whenever a js object is stringified: +console.log(JSON.stringify(m)); // { "_id": "504e0cd7dd992d9be2f20b6f", "name": "Max Headroom is my name" } +const schema = new Schema({ name: String }); +schema.path('name').get(function(v) { + return v + ' is my name'; +}); +schema.set('toObject', { getters: true }); +const M = mongoose.model('Person', schema); +const m = new M({ name: 'Max Headroom' }); +console.log(m); // { _id: 504e0cd7dd992d9be2f20b6f, name: 'Max Headroom is my name' } +// Mongoose interprets this as 'loc is a String' +const schema = new Schema({ loc: { type: String, coordinates: [Number] } }); +const schema = new Schema({ + // Mongoose interprets this as 'loc is an object with 2 keys, type and coordinates' + loc: { type: String, coordinates: [Number] }, + // Mongoose interprets this as 'name is a String' + name: { $type: String } + }, { typeKey: '$type' }); // A '$type' key means this object is a type declaration + const schema = new Schema({ name: String }); +schema.set('validateBeforeSave', false); +schema.path('name').validate(function(value) { + return value != null; +}); +const M = mongoose.model('Person', schema); +const m = new M({ name: null }); +m.validate(function(err) { + console.log(err); // Will tell you that null is not allowed. +}); +m.save(); // Succeeds despite being invalid +const schema = new Schema({ name: 'string' }); +const Thing = mongoose.model('Thing', schema); +const thing = new Thing({ name: 'mongoose v3' }); +await thing.save(); // { __v: 0, name: 'mongoose v3' } + +// customized versionKey +new Schema({ /* ... */ }, { versionKey: '_somethingElse' }) +const Thing = mongoose.model('Thing', schema); +const thing = new Thing({ name: 'mongoose v3' }); +thing.save(); // { _somethingElse: 0, name: 'mongoose v3' } +// 2 copies of the same document +const doc1 = await Model.findOne({ _id }); +const doc2 = await Model.findOne({ _id }); + +// Delete first 3 comments from `doc1` +doc1.comments.splice(0, 3); +await doc1.save(); + +// The below `save()` will throw a VersionError, because you're trying to +// modify the comment at index 1, and the above `splice()` removed that +// comment. +doc2.set('comments.1.body', 'new comment'); +await doc2.save(); +new Schema({ /* ... */ }, { versionKey: false }); +const Thing = mongoose.model('Thing', schema); +const thing = new Thing({ name: 'no versioning please' }); +thing.save(); // { name: 'no versioning please' } +schema.pre('findOneAndUpdate', function() { + const update = this.getUpdate(); + if (update.__v != null) { + delete update.__v; + } + const keys = ['$set', '$setOnInsert']; + for (const key of keys) { + if (update[key] != null && update[key].__v != null) { + delete update[key].__v; + if (Object.keys(update[key]).length === 0) { + delete update[key]; + } + } + } + update.$inc = update.$inc || {}; + update.$inc.__v = 1; + }); + async function markApproved(id) { + const house = await House.findOne({ _id }); + if (house.photos.length < 2) { + throw new Error('House must have at least two photos!'); + } + + house.status = 'APPROVED'; + await house.save(); + } + const house = await House.findOne({ _id }); +if (house.photos.length < 2) { + throw new Error('House must have at least two photos!'); +} + +const house2 = await House.findOne({ _id }); +house2.photos = []; +await house2.save(); + +// Marks the house as 'APPROVED' even though it has 0 photos! +house.status = 'APPROVED'; +await house.save(); +const House = mongoose.model('House', Schema({ + status: String, + photos: [String] + }, { optimisticConcurrency: true })); + + const house = await House.findOne({ _id }); + if (house.photos.length < 2) { + throw new Error('House must have at least two photos!'); + } + + const house2 = await House.findOne({ _id }); + house2.photos = []; + await house2.save(); + + // Throws 'VersionError: No matching document found for id "..." version 0' + house.status = 'APPROVED'; + await house.save(); + const schema = new Schema({ + name: String + }, { collation: { locale: 'en_US', strength: 1 } }); + + const MyModel = db.model('MyModel', schema); + + MyModel.create([{ name: 'val' }, { name: 'Val' }]). + then(() => { + return MyModel.find({ name: 'val' }); + }). + then((docs) => { + // `docs` will contain both docs, because `strength: 1` means + // MongoDB will ignore case when matching. + }); + const schema = Schema({ name: String, timestamp: Date, metadata: Object }, { + timeseries: { + timeField: 'timestamp', + metaField: 'metadata', + granularity: 'hours' + }, + autoCreate: false, + expireAfterSeconds: 86400 + }); + + // `Test` collection will be a timeseries collection + const Test = db.model('Test', schema); + new Schema({ /* ... */ }, { skipVersioning: { dontVersionMe: true } }); +thing.dontVersionMe.push('hey'); +thing.save(); // version is not incremented +const thingSchema = new Schema({ /* ... */ }, { timestamps: { createdAt: 'created_at' } }); +const Thing = mongoose.model('Thing', thingSchema); +const thing = new Thing(); +await thing.save(); // `created_at` & `updatedAt` will be included + +// With updates, Mongoose will add `updatedAt` to `$set` +await Thing.updateOne({}, { $set: { name: 'Test' } }); + +// If you set upsert: true, Mongoose will add `created_at` to `$setOnInsert` as well +await Thing.findOneAndUpdate({}, { $set: { name: 'Test2' } }); + +// Mongoose also adds timestamps to bulkWrite() operations +// See https://mongoosejs.com/docs/api/model.html#model_Model-bulkWrite +await Thing.bulkWrite([ + { + insertOne: { + document: { + name: 'Jean-Luc Picard', + ship: 'USS Stargazer' + // Mongoose will add `created_at` and `updatedAt` + } + } + }, + { + updateOne: { + filter: { name: 'Jean-Luc Picard' }, + update: { + $set: { + ship: 'USS Enterprise' + // Mongoose will add `updatedAt` + } + } + } + } +]); +const schema = Schema({ + createdAt: Number, + updatedAt: Number, + name: String + }, { + // Make Mongoose use Unix time (seconds since Jan 1, 1970) + timestamps: { currentTime: () => Math.floor(Date.now() / 1000) } + }); + // Add a `meta` property to all schemas +mongoose.plugin(function myPlugin(schema) { + schema.add({ meta: {} }); + }); + const schema1 = new Schema({ + name: String + }, { pluginTags: ['useMetaPlugin'] }); + + const schema2 = new Schema({ + name: String + }); + / Add a `meta` property to all schemas +mongoose.plugin(function myPlugin(schema) { + schema.add({ meta: {} }); +}, { tags: ['useMetaPlugin'] }); +const bookSchema = new Schema({ + title: 'String', + author: { type: 'ObjectId', ref: 'Person' } + }); + const Book = mongoose.model('Book', bookSchema); + + // By default, Mongoose will add `author` to the below `select()`. + await Book.find().select('title').populate('author'); + + // In other words, the below query is equivalent to the above + await Book.find().select('title author').populate('author'); + const bookSchema = new Schema({ + title: 'String', + author: { type: 'ObjectId', ref: 'Person' } + }, { selectPopulatedPaths: false }); + const Book = mongoose.model('Book', bookSchema); + + // Because `selectPopulatedPaths` is false, the below doc will **not** + // contain an `author` property. + const doc = await Book.findOne().select('title').populate('author'); + const childSchema = new Schema({ name: { type: String, required: true } }); +const parentSchema = new Schema({ child: childSchema }); + +const Parent = mongoose.model('Parent', parentSchema); + +// Will contain an error for both 'child.name' _and_ 'child' +new Parent({ child: {} }).validateSync().errors; +const childSchema = new Schema({ + name: { type: String, required: true } + }, { storeSubdocValidationError: false }); // <-- set on the child schema + const parentSchema = new Schema({ child: childSchema }); + + const Parent = mongoose.model('Parent', parentSchema); + + // Will only contain an error for 'child.name' + new Parent({ child: {} }).validateSync().errors; + const schema = new Schema({ name: String }, { + autoCreate: false, + collectionOptions: { + capped: true, + max: 1000 + } + }); + const Test = mongoose.model('Test', schema); + + // Equivalent to `createCollection({ capped: true, max: 1000 })` + await Test.createCollection(); + const schema = new Schema({ name: String }, { autoSearchIndex: true }); +schema.searchIndex({ + name: 'my-index', + definition: { mappings: { dynamic: true } } +}); +// Will automatically attempt to create the `my-index` search index. +const Test = mongoose.model('Test', schema); +const eventSchema = new mongoose.Schema( + { name: String }, + { + readConcern: { level: 'available' } // <-- set default readConcern for all queries + } + ); + class MyClass { + myMethod() { return 42; } + static myStatic() { return 42; } + get myVirtual() { return 42; } + } + + const schema = new mongoose.Schema(); + schema.loadClass(MyClass); + + console.log(schema.methods); // { myMethod: [Function: myMethod] } + console.log(schema.statics); // { myStatic: [Function: myStatic] } + console.log(schema.virtuals); // { myVirtual: VirtualType { ... } } + const schema = new Schema({ name: String }); +schema.path('name') instanceof mongoose.SchemaType; // true +schema.path('name') instanceof mongoose.Schema.Types.String; // true +schema.path('name').instance; // 'String' +const schema = new Schema({ + name: String, + binary: Buffer, + living: Boolean, + updated: { type: Date, default: Date.now }, + age: { type: Number, min: 18, max: 65 }, + mixed: Schema.Types.Mixed, + _someId: Schema.Types.ObjectId, + decimal: Schema.Types.Decimal128, + double: Schema.Types.Double, + int32bit: Schema.Types.Int32, + array: [], + ofString: [String], + ofNumber: [Number], + ofDates: [Date], + ofBuffer: [Buffer], + ofBoolean: [Boolean], + ofMixed: [Schema.Types.Mixed], + ofObjectId: [Schema.Types.ObjectId], + ofArrays: [[]], + ofArrayOfNumbers: [[Number]], + nested: { + stuff: { type: String, lowercase: true, trim: true } + }, + map: Map, + mapOfString: { + type: Map, + of: String + } + }); + + // example use + + const Thing = mongoose.model('Thing', schema); + + const m = new Thing; + m.name = 'Statue of Liberty'; + m.age = 125; + m.updated = new Date; + m.binary = Buffer.alloc(0); + m.living = false; + m.mixed = { any: { thing: 'i want' } }; + m.markModified('mixed'); + m._someId = new mongoose.Types.ObjectId; + m.array.push(1); + m.ofString.push('strings!'); + m.ofNumber.unshift(1, 2, 3, 4); + m.ofDates.addToSet(new Date); + m.ofBuffer.pop(); + m.ofMixed = [1, [], 'three', { four: 5 }]; + m.nested.stuff = 'good'; + m.map = new Map([['key', 'value']]); + m.save(callback); + // 3 string SchemaTypes: 'name', 'nested.firstName', 'nested.lastName' +const schema = new Schema({ + name: { type: String }, + nested: { + firstName: { type: String }, + lastName: { type: String } + } + }); + const holdingSchema = new Schema({ + // You might expect `asset` to be an object that has 2 properties, + // but unfortunately `type` is special in Mongoose so mongoose + // interprets this schema to mean that `asset` is a string + asset: { + type: String, + ticker: String + } + }); + const holdingSchema = new Schema({ + asset: { + // Workaround to make sure Mongoose knows `asset` is an object + // and `asset.type` is a string, rather than thinking `asset` + // is a string. + type: { type: String }, + ticker: String + } + }); + const schema1 = new Schema({ + test: String // `test` is a path of type String + }); + + const schema2 = new Schema({ + // The `test` object contains the "SchemaType options" + test: { type: String } // `test` is a path of type string + }); + const schema2 = new Schema({ + test: { + type: String, + lowercase: true // Always convert `test` to lowercase + } + }); + const numberSchema = new Schema({ + integerOnly: { + type: Number, + get: v => Math.round(v), + set: v => Math.round(v), + alias: 'i' + } + }); + + const Number = mongoose.model('Number', numberSchema); + + const doc = new Number(); + doc.integerOnly = 2.001; + doc.integerOnly; // 2 + doc.i; // 2 + doc.i = 3.001; + doc.integerOnly; // 3 + doc.i; // 3 + const schema2 = new Schema({ + test: { + type: String, + index: true, + unique: true // Unique index. If you specify `unique: true` + // specifying `index: true` is optional if you do `unique: true` + } + }); + const schema1 = new Schema({ name: String }); // name will be cast to string +const schema2 = new Schema({ name: 'String' }); // Equivalent + +const Person = mongoose.model('Person', schema2); +new Person({ name: 42 }).name; // "42" as a string +new Person({ name: { toString: () => 42 } }).name; // "42" as a string + +// "undefined", will get a cast error if you `save()` this document +new Person({ name: { foo: 42 } }).name; +const schema1 = new Schema({ age: Number }); // age will be cast to a Number +const schema2 = new Schema({ age: 'Number' }); // Equivalent + +const Car = mongoose.model('Car', schema2); +new Car({ age: '15' }).age; // 15 as a Number +new Car({ age: true }).age; // 1 as a Number +new Car({ age: false }).age; // 0 as a Number +new Car({ age: { valueOf: () => 83 } }).age; // 83 as a Number +const Assignment = mongoose.model('Assignment', { dueDate: Date }); +const doc = await Assignment.findOne(); +doc.dueDate.setMonth(3); +await doc.save(); // THIS DOES NOT SAVE YOUR CHANGE + +doc.markModified('dueDate'); +await doc.save(); // works +const schema1 = new Schema({ binData: Buffer }); // binData will be cast to a Buffer +const schema2 = new Schema({ binData: 'Buffer' }); // Equivalent + +const Data = mongoose.model('Data', schema2); +const file1 = new Data({ binData: 'test'}); // {"type":"Buffer","data":[116,101,115,116]} +const file2 = new Data({ binData: 72987 }); // {"type":"Buffer","data":[27]} +const file4 = new Data({ binData: { type: 'Buffer', data: [1, 2, 3]}}); // {"type":"Buffer","data":[1,2,3]} +const Any = new Schema({ any: {} }); +const Any = new Schema({ any: Object }); +const Any = new Schema({ any: Schema.Types.Mixed }); +const Any = new Schema({ any: mongoose.Mixed }); +person.anything = { x: [3, 4, { y: 'changed' }] }; +person.markModified('anything'); +person.save(); // Mongoose will save changes to `anything`. +const mongoose = require('mongoose'); +const carSchema = new mongoose.Schema({ driver: mongoose.ObjectId }); +const Car = mongoose.model('Car', carSchema); + +const car = new Car(); +car.driver = new mongoose.Types.ObjectId(); + +typeof car.driver; // 'object' +car.driver instanceof mongoose.Types.ObjectId; // true + +car.driver.toString(); // Something like "5e1a0651741b255ddda996c4" +const M = mongoose.model('Test', new Schema({ b: Boolean })); +console.log(new M({ b: 'nay' }).b); // undefined + +// Set { false, 'false', 0, '0', 'no' } +console.log(mongoose.Schema.Types.Boolean.convertToFalse); + +mongoose.Schema.Types.Boolean.convertToFalse.add('nay'); +console.log(new M({ b: 'nay' }).b); // false +const ToySchema = new Schema({ name: String }); +const ToyBoxSchema = new Schema({ + toys: [ToySchema], + buffers: [Buffer], + strings: [String], + numbers: [Number] + // ... etc +}); +const ToyBox = mongoose.model('ToyBox', ToyBoxSchema); +console.log((new ToyBox()).toys); // [] +const ToyBoxSchema = new Schema({ + toys: { + type: [ToySchema], + default: undefined + } + }); + const Empty1 = new Schema({ any: [] }); +const Empty2 = new Schema({ any: Array }); +const Empty3 = new Schema({ any: [Schema.Types.Mixed] }); +const Empty4 = new Schema({ any: [{}] }); +const userSchema = new Schema({ + // `socialMediaHandles` is a map whose values are strings. A map's + // keys are always strings. You specify the type of values using `of`. + socialMediaHandles: { + type: Map, + of: String + } + }); + + const User = mongoose.model('User', userSchema); + // Map { 'github' => 'vkarpov15', 'twitter' => '@code_barbarian' } + console.log(new User({ + socialMediaHandles: { + github: 'vkarpov15', + twitter: '@code_barbarian' + } + }).socialMediaHandles); + const user = new User({ + socialMediaHandles: {} + }); + + // Good + user.socialMediaHandles.set('github', 'vkarpov15'); + // Works too + user.set('socialMediaHandles.twitter', '@code_barbarian'); + // Bad, the `myspace` property will **not** get saved + user.socialMediaHandles.myspace = 'fail'; + + // 'vkarpov15' + console.log(user.socialMediaHandles.get('github')); + // '@code_barbarian' + console.log(user.get('socialMediaHandles.twitter')); + // undefined + user.socialMediaHandles.github; + + // Will only save the 'github' and 'twitter' properties + user.save(); + const userSchema = new Schema({ + socialMediaHandles: { + type: Map, + of: new Schema({ + handle: String, + oauth: { + type: ObjectId, + ref: 'OAuth' + } + }) + } + }); + const User = mongoose.model('User', userSchema); + const user = await User.findOne().populate('socialMediaHandles.$*.oauth'); + const authorSchema = new Schema({ + _id: Schema.Types.UUID, // Can also do `_id: 'UUID'` + name: String + }); + + const Author = mongoose.model('Author', authorSchema); + + const bookSchema = new Schema({ + authorId: { type: Schema.Types.UUID, ref: 'Author' } + }); + const Book = mongoose.model('Book', bookSchema); + + const author = new Author({ name: 'Martin Fowler' }); + console.log(typeof author._id); // 'string' + console.log(author.toObject()._id instanceof mongoose.mongo.BSON.Binary); // true + + const book = new Book({ authorId: '09190f70-3d30-11e5-8814-0f4df9a59c41' }); + const { randomUUID } = require('crypto'); + +const schema = new mongoose.Schema({ + docId: { + type: 'UUID', + default: () => randomUUID() + } +}); +const questionSchema = new Schema({ + answer: BigInt + }); + const Question = mongoose.model('Question', questionSchema); + + const question = new Question({ answer: 42n }); + typeof question.answer; // 'bigint' + const temperatureSchema = new Schema({ + celsius: Double + }); + const Temperature = mongoose.model('Temperature', temperatureSchema); + + const temperature = new Temperature({ celsius: 1339 }); + temperature.celsius instanceof bson.Double; // true + new Temperature({ celsius: '1.2e12' }).celsius; // 15 as a Double +new Temperature({ celsius: true }).celsius; // 1 as a Double +new Temperature({ celsius: false }).celsius; // 0 as a Double +new Temperature({ celsius: { valueOf: () => 83.0033 } }).celsius; // 83 as a Double +new Temperature({ celsius: '' }).celsius; // null +const studentSchema = new Schema({ + id: Int32 + }); + const Student = mongoose.model('Student', studentSchema); + + const student = new Student({ id: 1339 }); + typeof student.id; // 'number' + new Student({ id: '15' }).id; // 15 as a Int32 +new Student({ id: true }).id; // 1 as a Int32 +new Student({ id: false }).id; // 0 as a Int32 +new Student({ id: { valueOf: () => 83 } }).id; // 83 as a Int32 +new Student({ id: '' }).id; // null as a Int32 +const root = 'https://s3.amazonaws.com/mybucket'; + +const userSchema = new Schema({ + name: String, + picture: { + type: String, + get: v => `${root}${v}` + } +}); + +const User = mongoose.model('User', userSchema); + +const doc = new User({ name: 'Val', picture: '/123.png' }); +doc.picture; // 'https://s3.amazonaws.com/mybucket/123.png' +doc.toObject({ getters: false }).picture; // '/123.png' +const schema = new Schema({ + arr: [{ url: String }] + }); + + const root = 'https://s3.amazonaws.com/mybucket'; + + // Bad, don't do this! + schema.path('arr').get(v => { + return v.map(el => Object.assign(el, { url: root + el.url })); + }); + + // Later + doc.arr.push({ key: String }); + doc.arr[0]; // 'undefined' because every `doc.arr` creates a new array! + const schema = new Schema({ + arr: [{ url: String }] + }); + + const root = 'https://s3.amazonaws.com/mybucket'; + + // Good, do this instead of declaring a getter on `arr` + schema.path('arr.0.url').get(v => `${root}${v}`); + const subSchema = new mongoose.Schema({ + // some schema definition here + }); + + const schema = new mongoose.Schema({ + data: { + type: subSchema, + default: {} + } + }); + const sampleSchema = new Schema({ name: { type: String, required: true } }); +console.log(sampleSchema.path('name')); +// Output looks like: +/** + * SchemaString { + * enumValues: [], + * regExp: null, + * path: 'name', + * instance: 'String', + * validators: ... + */ +mongoose.connect('mongodb://127.0.0.1:27017/myapp'); +const MyModel = mongoose.model('Test', new Schema({ name: String })); +// Works +await MyModel.findOne(); +const MyModel = mongoose.model('Test', new Schema({ name: String })); +const promise = MyModel.findOne(); + +setTimeout(function() { + mongoose.connect('mongodb://127.0.0.1:27017/myapp'); +}, 60000); + +// Will just hang until mongoose successfully connects +await promise; +mongoose.set('bufferCommands', false); +const schema = new Schema({ + name: String + }, { + capped: { size: 1024 }, + bufferCommands: false, + autoCreate: false // disable `autoCreate` since `bufferCommands` is false + }); + + const Model = mongoose.model('Test', schema); + // Explicitly create the collection before using it + // so the collection is capped. + await Model.createCollection(); + mongoose.connect('mongodb://127.0.0.1:27017/test'). + catch(error => handleError(error)); + +// Or: +try { + await mongoose.connect('mongodb://127.0.0.1:27017/test'); +} catch (error) { + handleError(error); +} +mongoose.connection.on('error', err => { + logError(err); + }); + mongoose.connect(uri, options); + // Throws an error "getaddrinfo ENOTFOUND doesnt.exist" after 30 seconds +await mongoose.connect('mongodb://doesnt.exist:27017/test'); +mongoose.connect(uri, { + serverSelectionTimeoutMS: 5000 + }); + +// Prints "Failed 0", "Failed 1", "Failed 2" and then throws an +// error. Exits after approximately 15 seconds. +for (let i = 0; i < 3; ++i) { + try { + await mongoose.connect('mongodb://doesnt.exist:27017/test', { + serverSelectionTimeoutMS + }); + break; + } catch (err) { + console.log('Failed', i); + if (i >= 2) { + throw err; + } + } + } + mongoose.connect(uri, options, function(error) { + // Check error in initial connection. There is no 2nd param to the callback. + }); + + // Or using promises + mongoose.connect(uri, options).then( + () => { /** ready to use. The `mongoose.connect()` promise resolves to mongoose instance. */ }, + err => { /** handle initial connection error */ } + ); + mongoose.connect('mongodb://127.0.0.1:27017/test?socketTimeoutMS=1000&bufferCommands=false&authSource=otherdb'); +// The above is equivalent to: +mongoose.connect('mongodb://127.0.0.1:27017/test', { + socketTimeoutMS: 1000 + // Note that mongoose will **not** pull `bufferCommands` from the query string +}); +mongoose.connection.on('connected', () => console.log('connected')); +mongoose.connection.on('open', () => console.log('open')); +mongoose.connection.on('disconnected', () => console.log('disconnected')); +mongoose.connection.on('reconnected', () => console.log('reconnected')); +mongoose.connection.on('disconnecting', () => console.log('disconnecting')); +mongoose.connection.on('close', () => console.log('close')); + +mongoose.connect('mongodb://127.0.0.1:27017/mongoose_test'); +const conn = mongoose.createConnection('mongodb://127.0.0.1:27017/mongoose_test'); + +conn.on('connected', () => console.log('connected')); +conn.on('open', () => console.log('open')); +conn.on('disconnected', () => console.log('disconnected')); +conn.on('reconnected', () => console.log('reconnected')); +conn.on('disconnecting', () => console.log('disconnecting')); +conn.on('close', () => console.log('close')); +mongoose.connect('mongodb://[username:password@]host1[:port1][,host2[:port2],...[,hostN[:portN]]][/[database][?options]]' [, options]); +mongoose.connect('mongodb://user:pw@host1.com:27017,host2.com:27017,host3.com:27017/testdb'); +mongoose.connect('mongodb://host1:port1/?replicaSet=rsName'); +mongoose.connect(uri, { + serverSelectionTimeoutMS: 5000 // Timeout after 5s instead of 30s + }); + const mongoose = require('mongoose'); + +const uri = 'mongodb+srv://username:badpw@cluster0-OMITTED.mongodb.net/' + + 'test?retryWrites=true&w=majority'; +// Prints "MongoServerError: bad auth Authentication failed." +mongoose.connect(uri, { + serverSelectionTimeoutMS: 5000 +}).catch(err => console.log(err.reason)); +// Can get this error even if your connection string doesn't include +// `localhost` if `rs.conf()` reports that one replica set member has +// `localhost` as its host name. +MongooseServerSelectionError: connect ECONNREFUSED localhost:27017 +if (err.name === 'MongooseServerSelectionError') { + // Contains a Map describing the state of your replica set. For example: + // Map(1) { + // 'localhost:27017' => ServerDescription { + // address: 'localhost:27017', + // type: 'Unknown', + // ... + // } + // } + console.log(err.r + // Connect to 2 mongos servers +mongoose.connect('mongodb://mongosA:27501,mongosB:27501', cb); +const conn = mongoose.createConnection('mongodb://[username:password@]host1[:port1][,host2[:port2],...[,hostN[:portN]]][/[database][?options]]', options); +const UserModel = conn.model('User', userSchema); +// `asPromise()` returns a promise that resolves to the connection +// once the connection succeeds, or rejects if connection failed. +const conn = await mongoose.createConnection(connectionString).asPromise(); +const userSchema = new Schema({ name: String, email: String }); + +// The alternative to the export model pattern is the export schema pattern. +module.exports = userSchema; + +// Because if you export a model as shown below, the model will be scoped +// to Mongoose's default connection. +// module.exports = mongoose.model('User', userSchema); +const mongoose = require('mongoose'); + +module.exports = function connectionFactory() { + const conn = mongoose.createConnection(process.env.MONGODB_URI); + + conn.model('User', require('../schemas/user')); + conn.model('PageView', require('../schemas/pageView')); + + return conn; +}; +// connections/index.js +const mongoose = require('mongoose'); + +const conn = mongoose.createConnection(process.env.MONGODB_URI); +conn.model('User', require('../schemas/user')); + +module.exports = conn; +// With object options +mongoose.createConnection(uri, { maxPoolSize: 10 }); + +// With connection string options +const uri = 'mongodb://127.0.0.1:27017/test?maxPoolSize=10'; +mongoose.createConnection(uri); +const express = require('express'); +const mongoose = require('mongoose'); + +mongoose.connect('mongodb://127.0.0.1:27017/main'); +mongoose.set('debug', true); + +mongoose.model('User', mongoose.Schema({ name: String })); + +const app = express(); + +app.get('/users/:tenantId', function(req, res) { + const db = mongoose.connection.useDb(`tenant_${req.params.tenantId}`, { + // `useCache` tells Mongoose to cache connections by database name, so + // `mongoose.connection.useDb('foo', { useCache: true })` returns the + // same reference each time. + useCache: true + }); + // Need to register models every time a new connection is created + if (!db.models['User']) { + db.model('User', mongoose.Schema({ name: String })); + } + console.log('Find users from', db.name); + db.model('User').find(). + then(users => res.json({ users })). + catch(err => res.status(500).json({ message: err.message })); +}); + +app.listen(3000); +const express = require('express'); +const mongoose = require('mongoose'); + +const tenantIdToConnection = {}; + +const app = express(); + +app.get('/users/:tenantId', function(req, res) { + let initialConnection = Promise.resolve(); + const { tenantId } = req.params; + if (!tenantIdToConnection[tenantId]) { + tenantIdToConnection[tenantId] = mongoose.createConnection(`mongodb://127.0.0.1:27017/tenant_${tenantId}`); + tenantIdToConnection[tenantId].model('User', mongoose.Schema({ name: String })); + initialConnection = tenantIdToConnection[tenantId].asPromise(); + } + const db = tenantIdToConnection[tenantId]; + initialConnection. + then(() => db.model('User').find()). + then(users => res.json({ users })). + catch(err => res.status(500).json({ message: err.message })); +}); + +app.listen(3000); +const schema = new mongoose.Schema({ name: String, size: String }); +const Tank = mongoose.model('Tank', schema); +const Tank = mongoose.model('Tank', yourSchema); + +const small = new Tank({ size: 'small' }); +await small.save(); + +// or + +await Tank.create({ size: 'small' }); + +// or, for inserting large batches of documents +await Tank.insertMany([{ size: 'small' }]); +await mongoose.connect('mongodb://127.0.0.1/gettingstarted'); +const connection = mongoose.createConnection('mongodb://127.0.0.1:27017/test'); +const Tank = connection.model('Tank', yourSchema); +await Tank.find({ size: 'small' }).where('createdDate').gt(oneYearAgo).exec(); +await Tank.deleteOne({ size: 'large' }); +// Updated at most one doc, `res.nModified` contains the number +// of docs that MongoDB updated +await Tank.updateOne({ size: 'large' }, { name: 'T-90' }); +async function run() { + // Create a new mongoose model + const personSchema = new mongoose.Schema({ + name: String + }); + const Person = mongoose.model('Person', personSchema); + + // Create a change stream. The 'change' event gets emitted when there's a + // change in the database + Person.watch(). + on('change', data => console.log(new Date(), data)); + + // Insert a doc, will trigger the change stream handler above + console.log(new Date(), 'Inserting doc'); + await Person.create({ name: 'Axl Rose' }); + } + // Make sure to disable `autoCreate` and `autoIndex` for Views, +// because you want to create the collection manually. +const userSchema = new Schema({ + name: String, + email: String, + roles: [String] + }, { autoCreate: false, autoIndex: false }); + const User = mongoose.model('User', userSchema); + + const RedactedUser = mongoose.model('RedactedUser', userSchema); + + // First, create the User model's underlying collection... + await User.createCollection(); + // Then create the `RedactedUser` model's underlying collection + // as a View. + await RedactedUser.createCollection({ + viewOn: 'users', // Set `viewOn` to the collection name, **not** model name. + pipeline: [ + { + $set: { + name: { $concat: [{ $substr: ['$name', 0, 3] }, '...'] }, + email: { $concat: [{ $substr: ['$email', 0, 3] }, '...'] } + } + } + ] + }); + + await User.create([ + { name: 'John Smith', email: 'john.smith@gmail.com', roles: ['user'] }, + { name: 'Bill James', email: 'bill@acme.co', roles: ['user', 'admin'] } + ]); + + // [{ _id: ..., name: 'Bil...', email: 'bil...', roles: ['user', 'admin'] }] + console.log(await RedactedUser.find({ roles: 'admin' })); + const MyModel = mongoose.model('Test', new Schema({ name: String })); +const doc = new MyModel(); + +doc instanceof MyModel; // true +doc instanceof mongoose.Model; // true +doc instanceof mongoose.Document; // true +const doc = await MyModel.findOne(); + +doc instanceof MyModel; // true +doc instanceof mongoose.Model; // true +doc instanceof mongoose.Document; // true +doc.name = 'foo'; + +// Mongoose sends an `updateOne({ _id: doc._id }, { $set: { name: 'foo' } })` +// to MongoDB. +await doc.save(); +doc.save().then(savedDoc => { + savedDoc === doc; // true + }); + const doc = await MyModel.findOne(); + +// Delete the document so Mongoose won't be able to save changes +await MyModel.deleteOne({ _id: doc._id }); + +doc.name = 'foo'; +await doc.save(); // Throws DocumentNotFoundError +const schema = new Schema({ + nested: { + subdoc: new Schema({ + name: String + }) + } + }); + const TestModel = mongoose.model('Test', schema); + + const doc = new TestModel(); + doc.set('nested.subdoc.name', 'John Smith'); + doc.nested.subdoc.name; // 'John Smith' + const doc2 = new TestModel(); + +doc2.get('nested.subdoc.name'); // undefined +doc2.nested?.subdoc?.name; // undefined + +doc2.set('nested.subdoc.name', 'Will Smith'); +doc2.get('nested.subdoc.name'); // 'Will Smith' +// The following works fine +const doc3 = new TestModel(); +doc3.nested.subdoc ??= {}; +doc3.nested.subdoc.name = 'John Smythe'; + +// The following does **NOT** work. +// Do not use the following pattern with Mongoose documents. +const doc4 = new TestModel(); +(doc4.nested.subdoc ??= {}).name = 'Charlie Smith'; +doc.nested.subdoc; // Empty object +doc.nested.subdoc.name; // undefined. +// Update all documents in the `mymodels` collection +await MyModel.updateMany({}, { $set: { name: 'foo' } }); +const schema = new Schema({ name: String, age: { type: Number, min: 0 } }); +const Person = mongoose.model('Person', schema); + +const p = new Person({ name: 'foo', age: 'bar' }); +// Cast to Number failed for value "bar" at path "age" +await p.validate(); + +const p2 = new Person({ name: 'foo', age: -1 }); +// Path `age` (-1) is less than minimum allowed value (0). +await p2.validate(); +// Cast to number failed for value "bar" at path "age" +await Person.updateOne({}, { age: 'bar' }); + +// Path `age` (-1) is less than minimum allowed value (0). +await Person.updateOne({}, { age: -1 }, { runValidators: true }); +const doc = await Person.findOne({ _id }); + +// Sets `name` and unsets all other properties +doc.overwrite({ name: 'Jean-Luc Picard' }); +await doc.save(); +// Sets `name` and unsets all other properties +await Person.replaceOne({ _id }, { name: 'Jean-Luc Picard' }); +const childSchema = new Schema({ name: 'string' }); + +const parentSchema = new Schema({ + // Array of subdocuments + children: [childSchema], + // Single nested subdocuments + child: childSchema +}); +const childSchema = new Schema({ name: 'string' }); +const Child = mongoose.model('Child', childSchema); + +const parentSchema = new Schema({ + child: { + type: mongoose.ObjectId, + ref: 'Child' + } +}); +const Parent = mongoose.model('Parent', parentSchema); + +const doc = await Parent.findOne().populate('child'); +// NOT a subdocument. `doc.child` is a separate top-level document. +doc.child; +const Parent = mongoose.model('Parent', parentSchema); +const parent = new Parent({ children: [{ name: 'Matt' }, { name: 'Sarah' }] }); +parent.children[0].name = 'Matthew'; + +// `parent.children[0].save()` is a no-op, it triggers middleware but +// does **not** actually save the subdocument. You need to save the parent +// doc. +await parent.save(); +childSchema.pre('save', function(next) { + if ('invalid' == this.name) { + return next(new Error('#sadpanda')); + } + next(); + }); + + const parent = new Parent({ children: [{ name: 'invalid' }] }); + try { + await parent.save(); + } catch (err) { + err.message; // '#sadpanda' + } + // Below code will print out 1-4 in order +const childSchema = new mongoose.Schema({ name: 'string' }); + +childSchema.pre('validate', function(next) { + console.log('2'); + next(); +}); + +childSchema.pre('save', function(next) { + console.log('3'); + next(); +}); + +const parentSchema = new mongoose.Schema({ + child: childSchema +}); + +parentSchema.pre('validate', function(next) { + console.log('1'); + next(); +}); + +parentSchema.pre('save', function(next) { + console.log('4'); + next(); +}); +// Subdocument +const subdocumentSchema = new mongoose.Schema({ + child: new mongoose.Schema({ name: String, age: Number }) + }); + const Subdoc = mongoose.model('Subdoc', subdocumentSchema); + + // Nested path + const nestedSchema = new mongoose.Schema({ + child: { name: String, age: Number } + }); + const Nested = mongoose.model('Nested', nestedSchema); + const doc1 = new Subdoc({}); +doc1.child === undefined; // true +doc1.child.name = 'test'; // Throws TypeError: cannot read property... + +const doc2 = new Nested({}); +doc2.child === undefined; // false +console.log(doc2.child); // Prints 'MongooseDocument { undefined }' +doc2.child.name = 'test'; // Works +const subdocumentSchema = new mongoose.Schema({ + child: new mongoose.Schema({ + name: String, + age: { + type: Number, + default: 0 + } + }) + }); + const Subdoc = mongoose.model('Subdoc', subdocumentSchema); + + // Note that the `age` default has no effect, because `child` + // is `undefined`. + const doc = new Subdoc(); + doc.child; // undefined + doc.child = {}; +// Mongoose applies the `age` default: +doc.child.age; // 0 +const childSchema = new mongoose.Schema({ + name: String, + age: { + type: Number, + default: 0 + } + }); + const subdocumentSchema = new mongoose.Schema({ + child: { + type: childSchema, + default: () => ({}) + } + }); + const Subdoc = mongoose.model('Subdoc', subdocumentSchema); + + // Note that Mongoose sets `age` to its default value 0, because + // `child` defaults to an empty object and Mongoose applies + // defaults to that empty object. + const doc = new Subdoc(); + doc.child; // { age: 0 } + const doc = parent.children.id(_id); + const Parent = mongoose.model('Parent'); +const parent = new Parent(); + +// create a comment +parent.children.push({ name: 'Liesl' }); +const subdoc = parent.children[0]; +console.log(subdoc); // { _id: '501d86090d371bab2c0341c5', name: 'Liesl' } +subdoc.isNew; // true + +await parent.save(); +console.log('Success!'); +const newdoc = parent.children.create({ name: 'Aaron' }); +// Equivalent to `parent.children.pull(_id)` +parent.children.id(_id).deleteOne(); +// Equivalent to `parent.child = null` +parent.child.deleteOne(); + +await parent.save(); +console.log('the subdocs were removed'); +const schema = new Schema({ + docArr: [{ name: String }], + singleNested: new Schema({ name: String }) + }); + const Model = mongoose.model('Test', schema); + + const doc = new Model({ + docArr: [{ name: 'foo' }], + singleNested: { name: 'bar' } + }); + + doc.singleNested.parent() === doc; // true + doc.docArr[0].parent() === doc; // true + const schema = new Schema({ + level1: new Schema({ + level2: new Schema({ + test: String + }) + }) + }); + const Model = mongoose.model('Test', schema); + + const doc = new Model({ level1: { level2: 'test' } }); + + doc.level1.level2.parent() === doc; // false + doc.level1.level2.parent() === doc.level1; // true + doc.level1.level2.ownerDocument() === doc; // true + const parentSchema = new Schema({ + children: [{ name: 'string' }] + }); + // Equivalent + const parentSchema = new Schema({ + children: [new Schema({ name: 'string' })] + }); + + const Person = mongoose.model('Person', yourSchema); + +// find each person with a last name matching 'Ghost', selecting the `name` and `occupation` fields +const person = await Person.findOne({ 'name.last': 'Ghost' }, 'name occupation'); +// Prints "Space Ghost is a talk show host". +console.log('%s %s is a %s.', person.name.first, person.name.last, person.occupation); +// find each person with a last name matching 'Ghost' +const query = Person.findOne({ 'name.last': 'Ghost' }); + +// selecting the `name` and `occupation` fields +query.select('name occupation'); + +// execute the query at a later time +const person = await query.exec(); +// Prints "Space Ghost is a talk show host." +console.log('%s %s is a %s.', person.name.first, person.name.last, person.occupation); +// With a JSON doc +await Person. + find({ + occupation: /host/, + 'name.last': 'Ghost', + age: { $gt: 17, $lt: 66 }, + likes: { $in: ['vaporizing', 'talking'] } + }). + limit(10). + sort({ occupation: -1 }). + select({ name: 1, occupation: 1 }). + exec(); + +// Using query builder +await Person. + find({ occupation: /host/ }). + where('name.last').equals('Ghost'). + where('age').gt(17).lt(66). + where('likes').in(['vaporizing', 'talking']). + limit(10). + sort('-occupation'). + select('name occupation'). + exec(); + const q = MyModel.updateMany({}, { isDeleted: true }); + +await q.then(() => console.log('Update 2')); +// Throws "Query was already executed: Test.updateMany({}, { isDeleted: true })" +await q.then(() => console.log('Update 3')); +const cursor = Person.find({ occupation: /host/ }).cursor(); + +for (let doc = await cursor.next(); doc != null; doc = await cursor.next()) { + console.log(doc); // Prints documents one at a time + for await (const doc of Person.find()) { + console.log(doc); // Prints documents one at a time + } + // MongoDB won't automatically close this cursor after 10 minutes. +const cursor = Person.find().cursor().addCursorFlag('noCursorTimeout', true); +const docs = await Person.aggregate([{ $match: { 'name.last': 'Ghost' } }]); +const docs = await Person.aggregate([{ $match: { 'name.last': 'Ghost' } }]); + +docs[0] instanceof mongoose.Document; // false +const doc = await Person.findOne(); + +const idString = doc._id.toString(); + +// Finds the `Person`, because Mongoose casts `idString` to an ObjectId +const queryRes = await Person.findOne({ _id: idString }); + +// Does **not** find the `Person`, because Mongoose doesn't cast aggregation +// pipelines. +const aggRes = await Person.aggregate([{ $match: { _id: idString } }]); +const personSchema = new mongoose.Schema({ + age: Number +}); + +const Person = mongoose.model('Person', personSchema); +for (let i = 0; i < 10; i++) { + await Person.create({ age: i }); +} + +await Person.find().sort({ age: -1 }); // returns age starting from 10 as the first entry +await Person.find().sort({ age: 1 }); // returns age starting from 0 as the first entry +const personSchema = new mongoose.Schema({ + age: Number, + name: String, + weight: Number +}); + +const Person = mongoose.model('Person', personSchema); +const iterations = 5; +for (let i = 0; i < iterations; i++) { + await Person.create({ + age: Math.abs(2 - i), + name: 'Test' + i, + weight: Math.floor(Math.random() * 100) + 1 + }); +} + +await Person.find().sort({ age: 1, weight: -1 }); // returns age starting from 0, but while keeping that order will then sort by weight. +[ + { + _id: new ObjectId('63a335a6b9b6a7bfc186cb37'), + age: 0, + name: 'Test2', + weight: 67, + __v: 0 + }, + { + _id: new ObjectId('63a335a6b9b6a7bfc186cb35'), + age: 1, + name: 'Test1', + weight: 99, + __v: 0 + }, + { + _id: new ObjectId('63a335a6b9b6a7bfc186cb39'), + age: 1, + name: 'Test3', + weight: 73, + __v: 0 + }, + { + _id: new ObjectId('63a335a6b9b6a7bfc186cb33'), + age: 2, + name: 'Test0', + weight: 65, + __v: 0 + }, + { + _id: new ObjectId('63a335a6b9b6a7bfc186cb3b'), + age: 2, + name: 'Test4', + weight: 62, + __v: 0 + } +]; +const schema = new Schema({ + name: { + type: String, + required: true + } +}); +const Cat = db.model('Cat', schema); + +// This cat has no name :( +const cat = new Cat(); + +let error; +try { + await cat.save(); +} catch (err) { + error = err; +} + +assert.equal(error.errors['name'].message, + 'Path `name` is required.'); + +error = cat.validateSync(); +assert.equal(error.errors['name'].message, + 'Path `name` is required.'); + const breakfastSchema = new Schema({ + eggs: { + type: Number, + min: [6, 'Too few eggs'], + max: 12 + }, + bacon: { + type: Number, + required: [true, 'Why no bacon?'] + }, + drink: { + type: String, + enum: ['Coffee', 'Tea'], + required: function() { + return this.bacon > 3; + } + } + }); + const Breakfast = db.model('Breakfast', breakfastSchema); + + const badBreakfast = new Breakfast({ + eggs: 2, + bacon: 0, + drink: 'Milk' + }); + let error = badBreakfast.validateSync(); + assert.equal(error.errors['eggs'].message, + 'Too few eggs'); + assert.ok(!error.errors['bacon']); + assert.equal(error.errors['drink'].message, + '`Milk` is not a valid enum value for path `drink`.'); + + badBreakfast.bacon = 5; + badBreakfast.drink = null; + + error = badBreakfast.validateSync(); + assert.equal(error.errors['drink'].message, 'Path `drink` is required.'); + + badBreakfast.bacon = null; + error = badBreakfast.validateSync(); + assert.equal(error.errors['bacon'].message, 'Why no bacon?'); + const breakfastSchema = new Schema({ + eggs: { + type: Number, + min: [6, 'Must be at least 6, got {VALUE}'], + max: 12 + }, + drink: { + type: String, + enum: { + values: ['Coffee', 'Tea'], + message: '{VALUE} is not supported' + } + } + }); + const Breakfast = db.model('Breakfast', breakfastSchema); + + const badBreakfast = new Breakfast({ + eggs: 2, + drink: 'Milk' + }); + const error = badBreakfast.validateSync(); + assert.equal(error.errors['eggs'].message, + 'Must be at least 6, got 2'); + assert.equal(error.errors['drink'].message, 'Milk is not supported'); + const uniqueUsernameSchema = new Schema({ + username: { + type: String, + unique: true + } + }); + const U1 = db.model('U1', uniqueUsernameSchema); + const U2 = db.model('U2', uniqueUsernameSchema); + + const dup = [{ username: 'Val' }, { username: 'Val' }]; + // Race condition! This may save successfully, depending on whether + // MongoDB built the index before writing the 2 docs. + U1.create(dup). + then(() => { + }). + catch(err => { + }); + + // You need to wait for Mongoose to finish building the `unique` + // index before writing. You only need to build indexes once for + // a given collection, so you normally don't need to do this + // in production. But, if you drop the database between tests, + // you will need to use `init()` to wait for the index build to finish. + U2.init(). + then(() => U2.create(dup)). + catch(error => { + // `U2.create()` will error, but will *not* be a mongoose validation error, it will be + // a duplicate key error. + // See: https://masteringjs.io/tutorials/mongoose/e11000-duplicate-key + assert.ok(error); + assert.ok(!error.errors); + assert.ok(error.message.indexOf('duplicate key error') !== -1); + }); + const userSchema = new Schema({ + phone: { + type: String, + validate: { + validator: function(v) { + return /\d{3}-\d{3}-\d{4}/.test(v); + }, + message: props => `${props.value} is not a valid phone number!` + }, + required: [true, 'User phone number required'] + } + }); + + const User = db.model('user', userSchema); + const user = new User(); + let error; + + user.phone = '555.0123'; + error = user.validateSync(); + assert.equal(error.errors['phone'].message, + '555.0123 is not a valid phone number!'); + + user.phone = ''; + error = user.validateSync(); + assert.equal(error.errors['phone'].message, + 'User phone number required'); + + user.phone = '201-555-0123'; + // Validation succeeds! Phone number is defined + // and fits `DDD-DDD-DDDD` + error = user.validateSync(); + assert.equal(error, null); + const userSchema = new Schema({ + name: { + type: String, + // You can also make a validator async by returning a promise. + validate: () => Promise.reject(new Error('Oops!')) + }, + email: { + type: String, + // There are two ways for an promise-based async validator to fail: + // 1) If the promise rejects, Mongoose assumes the validator failed with the given error. + // 2) If the promise resolves to `false`, Mongoose assumes the validator failed and creates an error with the given `message`. + validate: { + validator: () => Promise.resolve(false), + message: 'Email validation failed' + } + } + }); + + const User = db.model('User', userSchema); + const user = new User(); + + user.email = 'test@test.co'; + user.name = 'test'; + + let error; + try { + await user.validate(); + } catch (err) { + error = err; + } + assert.ok(error); + assert.equal(error.errors['name'].message, 'Oops!'); + assert.equal(error.errors['email'].message, 'Email validation failed'); + const toySchema = new Schema({ + color: String, + name: String + }); + + const validator = function(value) { + return /red|white|gold/i.test(value); + }; + toySchema.path('color').validate(validator, + 'Color `{VALUE}` not valid', 'Invalid color'); + toySchema.path('name').validate(function(v) { + if (v !== 'Turbo Man') { + throw new Error('Need to get a Turbo Man for Christmas'); + } + return true; + }, 'Name `{VALUE}` is not valid'); + + const Toy = db.model('Toy', toySchema); + + const toy = new Toy({ color: 'Green', name: 'Power Ranger' }); + + let error; + try { + await toy.save(); + } catch (err) { + error = err; + } + + // `error` is a ValidationError object + // `error.errors.color` is a ValidatorError object + assert.equal(error.errors.color.message, 'Color `Green` not valid'); + assert.equal(error.errors.color.kind, 'Invalid color'); + assert.equal(error.errors.color.path, 'color'); + assert.equal(error.errors.color.value, 'Green'); + + // If your validator throws an exception, mongoose will use the error + // message. If your validator returns `false`, + // mongoose will use the 'Name `Power Ranger` is not valid' message. + assert.equal(error.errors.name.message, + 'Need to get a Turbo Man for Christmas'); + assert.equal(error.errors.name.value, 'Power Ranger'); + // If your validator threw an error, the `reason` property will contain + // the original error thrown, including the original stack trace. + assert.equal(error.errors.name.reason.message, + 'Need to get a Turbo Man for Christmas'); + + assert.equal(error.name, 'ValidationError'); + const vehicleSchema = new mongoose.Schema({ + numWheels: { type: Number, max: 18 } + }); + const Vehicle = db.model('Vehicle', vehicleSchema); + + const doc = new Vehicle({ numWheels: 'not a number' }); + const err = doc.validateSync(); + + err.errors['numWheels'].name; // 'CastError' + // 'Cast to Number failed for value "not a number" at path "numWheels"' + err.errors['numWheels'].message; + const vehicleSchema = new mongoose.Schema({ + numWheels: { type: Number, max: 18 } + }); + const Vehicle = db.model('Vehicle', vehicleSchema); + + const doc = new Vehicle({ numWheels: 'not a number' }); + const err = doc.validateSync(); + + err.errors['numWheels'].name; // 'CastError' + // 'Cast to Number failed for value "not a number" at path "numWheels"' + err.errors['numWheels'].message; + const vehicleSchema = new mongoose.Schema({ + numWheels: { + type: Number, + cast: [null, (value, path, model, kind) => `"${value}" is not a number`] + } + }); + const Vehicle = db.model('Vehicle', vehicleSchema); + + const doc = new Vehicle({ numWheels: 'pie' }); + const err = doc.validateSync(); + + err.errors['numWheels'].name; // 'CastError' + // "pie" is not a number + err.errors['numWheels'].message; + // Add a custom validator to all strings +mongoose.Schema.Types.String.set('validate', v => v == null || v > 0); + +const userSchema = new Schema({ + name: String, + email: String +}); +const User = db.model('User', userSchema); + +const user = new User({ name: '', email: '' }); + +const err = await user.validate().then(() => null, err => err); +err.errors['name']; // ValidatorError +err.errors['email']; // ValidatorError +let personSchema = new Schema({ + name: { + first: String, + last: String + } +}); + +assert.throws(function() { + // This throws an error, because 'name' isn't a full fledged path + personSchema.path('name').required(true); +}, /Cannot.*'required'/); + +// To make a nested object required, use a single nested schema +const nameSchema = new Schema({ + first: String, + last: String +}); + +personSchema = new Schema({ + name: { + type: nameSchema, + required: true + } +}); + +const Person = db.model('Person', personSchema); + +const person = new Person(); +const error = person.validateSync(); +assert.ok(error.errors['name']); +const toySchema = new Schema({ + color: String, + name: String +}); + +const Toy = db.model('Toys', toySchema); + +Toy.schema.path('color').validate(function(value) { + return /red|green|blue/i.test(value); +}, 'Invalid color'); + +const opts = { runValidators: true }; + +let error; +try { + await Toy.updateOne({}, { color: 'not a color' }, opts); +} catch (err) { + error = err; +} + +assert.equal(error.errors.color.message, 'Invalid color'); +const toySchema = new Schema({ + color: String, + name: String +}); + +toySchema.path('color').validate(function(value) { + // When running in `validate()` or `validateSync()`, the + // validator can access the document using `this`. + // When running with update validators, `this` is the Query, + // **not** the document being updated! + // Queries have a `get()` method that lets you get the + // updated value. + if (this.get('name') && this.get('name').toLowerCase().indexOf('red') !== -1) { + return value === 'red'; + } + return true; +}); + +const Toy = db.model('ActionFigure', toySchema); + +const toy = new Toy({ color: 'green', name: 'Red Power Ranger' }); +// Validation failed: color: Validator failed for path `color` with value `green` +let error = toy.validateSync(); +assert.ok(error.errors['color']); + +const update = { color: 'green', name: 'Red Power Ranger' }; +const opts = { runValidators: true }; + +error = null; +try { + await Toy.updateOne({}, update, opts); +} catch (err) { + error = err; +} +// Validation failed: color: Validator failed for path `color` with value `green` +assert.ok(error); +const kittenSchema = new Schema({ + name: { type: String, required: true }, + age: Number +}); + +const Kitten = db.model('Kitten', kittenSchema); + +const update = { color: 'blue' }; +const opts = { runValidators: true }; +// Operation succeeds despite the fact that 'name' is not specified +await Kitten.updateOne({}, update, opts); + +const unset = { $unset: { name: 1 } }; +// Operation fails because 'name' is required +const err = await Kitten.updateOne({}, unset, opts).then(() => null, err => err); +assert.ok(err); +assert.ok(err.errors['name']); +const testSchema = new Schema({ + number: { type: Number, max: 0 }, + arr: [{ message: { type: String, maxlength: 10 } }] +}); + +// Update validators won't check this, so you can still `$push` 2 elements +// onto the array, so long as they don't have a `message` that's too long. +testSchema.path('arr').validate(function(v) { + return v.length < 2; +}); + +const Test = db.model('Test', testSchema); + +let update = { $inc: { number: 1 } }; +const opts = { runValidators: true }; + +// There will never be a validation error here +await Test.updateOne({}, update, opts); + +// This will never error either even though the array will have at +// least 2 elements. +update = { $push: [{ message: 'hello' }, { message: 'world' }] }; +await Test.updateOne({}, update, opts); +const childSchema = new mongoose.Schema({ + name: String +}); + +const mainSchema = new mongoose.Schema({ + child: [childSchema] +}); + +mainSchema.pre('findOneAndUpdate', function() { + console.log('Middleware on parent document'); // Will be executed +}); + +childSchema.pre('findOneAndUpdate', function() { + console.log('Middleware on subdocument'); // Will not be executed +}); +const childSchema = new mongoose.Schema({ + name: String +}); + +const mainSchema = new mongoose.Schema({ + child: [childSchema] +}); + +mainSchema.pre('findOneAndUpdate', function() { + console.log('Middleware on parent document'); // Will be executed +}); + +childSchema.pre('findOneAndUpdate', function() { + console.log('Middleware on subdocument'); // Will not be executed +}); +schema.pre('save', function() { + return doStuff(). + then(() => doMoreStuff()); +}); + +// Or, using async functions +schema.pre('save', async function() { + await doStuff(); + await doMoreStuff(); +}); +const schema = new Schema({ /* ... */ }); +schema.pre('save', function(next) { + if (foo()) { + console.log('calling next!'); + // `return next();` will make sure the rest of this function doesn't run + /* return */ next(); + } + // Unless you comment out the `return` above, 'after next' will print + console.log('after next'); +}); +schema.pre('save', function(next) { + const err = new Error('something went wrong'); + // If you call `next()` with an argument, that argument is assumed to be + // an error. + next(err); +}); + +schema.pre('save', function() { + // You can also return a promise that rejects + return new Promise((resolve, reject) => { + reject(new Error('something went wrong')); + }); +}); + +schema.pre('save', function() { + // You can also throw a synchronous error + throw new Error('something went wrong'); +}); + +schema.pre('save', async function() { + await Promise.resolve(); + // You can also throw an error in an `async` function + throw new Error('something went wrong'); +}); + +// later... + +// Changes will not be persisted to MongoDB because a pre hook errored out +myDoc.save(function(err) { + console.log(err.message); // something went wrong +}); +schema.post('init', function(doc) { + console.log('%s has been initialized from the db', doc._id); +}); +schema.post('validate', function(doc) { + console.log('%s has been validated (but not saved yet)', doc._id); +}); +schema.post('save', function(doc) { + console.log('%s has been saved', doc._id); +}); +schema.post('deleteOne', function(doc) { + console.log('%s has been deleted', doc._id); +}); +// Takes 2 parameters: this is an asynchronous post hook +schema.post('save', function(doc, next) { + setTimeout(function() { + console.log('post1'); + // Kick off the second post hook + next(); + }, 10); +}); + +// Will not execute until the first middleware calls `next()` +schema.post('save', function(doc, next) { + console.log('post2'); + next(); +}); +schema.post('save', async function(doc) { + await new Promise(resolve => setTimeout(resolve, 1000)); + console.log('post1'); + // If less than 2 parameters, no need to call `next()` +}); + +schema.post('save', async function(doc, next) { + await new Promise(resolve => setTimeout(resolve, 1000)); + console.log('post1'); + // If there's a `next` parameter, you need to call `next()`. + next(); +}); +const schema = new mongoose.Schema({ name: String }); + +// Compile a model from the schema +const User = mongoose.model('User', schema); + +// Mongoose will **not** call the middleware function, because +// this middleware was defined after the model was compiled +schema.pre('save', () => console.log('Hello from pre save')); + +const user = new User({ name: 'test' }); +user.save(); +const schema = new mongoose.Schema({ name: String }); +// Mongoose will call this middleware function, because this script adds +// the middleware to the schema before compiling the model. +schema.pre('save', () => console.log('Hello from pre save')); + +// Compile a model from the schema +const User = mongoose.model('User', schema); + +const user = new User({ name: 'test' }); +user.save(); +const schema = new mongoose.Schema({ name: String }); + +// Once you `require()` this file, you can no longer add any middleware +// to this schema. +module.exports = mongoose.model('User', schema); +schema.pre('validate', function() { + console.log('this gets printed first'); +}); +schema.post('validate', function() { + console.log('this gets printed second'); +}); +schema.pre('save', function() { + console.log('this gets printed third'); +}); +schema.post('save', function() { + console.log('this gets printed fourth'); +}); +const userSchema = new Schema({ name: String, age: Number }); +userSchema.pre('findOneAndUpdate', function() { + console.log(this.getFilter()); // { name: 'John' } + console.log(this.getUpdate()); // { age: 30 } +}); +const User = mongoose.model('User', userSchema); + +await User.findOneAndUpdate({ name: 'John' }, { $set: { age: 30 } }); +const userSchema = new Schema({ name: String, age: Number }); +userSchema.pre('save', function(next, options) { + options.validateModifiedOnly; // true + + // Remember to call `next()` unless you're using an async function or returning a promise + next(); +}); +const User = mongoose.model('User', userSchema); + +const doc = new User({ name: 'John', age: 30 }); +await doc.save({ validateModifiedOnly: true }); +schema.pre('deleteOne', function() { console.log('Removing!'); }); + +// Does **not** print "Removing!". Document middleware for `deleteOne` is not executed by default +await doc.deleteOne(); + +// Prints "Removing!" +await Model.deleteOne(); +// Only document middleware +schema.pre('deleteOne', { document: true, query: false }, function() { + console.log('Deleting doc!'); +}); + +// Only query middleware. This will get called when you do `Model.deleteOne()` +// but not `doc.deleteOne()`. +schema.pre('deleteOne', { query: true, document: false }, function() { + console.log('Deleting!'); + const schema = new mongoose.Schema({ name: String }); +schema.pre('validate', function() { + console.log('Document validate'); +}); +schema.pre('validate', { query: true, document: false }, function() { + console.log('Query validate'); +}); +const Test = mongoose.model('Test', schema); + +const doc = new Test({ name: 'foo' }); + +// Prints "Document validate" +await doc.validate(); + +// Prints "Query validate" +await Test.find().validate(); +schema.pre('find', function() { + console.log(this instanceof mongoose.Query); // true + this.start = Date.now(); +}); + +schema.post('find', function(result) { + console.log(this instanceof mongoose.Query); // true + // prints returned documents + console.log('find() returned ' + JSON.stringify(result)); + // prints number of milliseconds the query took + console.log('find() took ' + (Date.now() - this.start) + ' milliseconds'); +}); +schema.pre('updateOne', function() { + this.set({ updatedAt: new Date() }); +}); +schema.pre('findOneAndUpdate', async function() { + const docToUpdate = await this.model.findOne(this.getQuery()); + console.log(docToUpdate); // The document that `findOneAndUpdate()` will modify +}); +schema.pre('updateOne', { document: true, query: false }, function() { + console.log('Updating'); +}); +const Model = mongoose.model('Test', schema); + +const doc = new Model(); +await doc.updateOne({ $set: { name: 'test' } }); // Prints "Updating" + +// Doesn't print "Updating", because `Query#updateOne()` doesn't fire +// document middleware. +await Model.updateOne({}, { $set: { name: 'test' } }); +const schema = new Schema({ + name: { + type: String, + // Will trigger a MongoServerError with code 11000 when + // you save a duplicate + unique: true + } +}); + +// Handler **must** take 3 parameters: the error that occurred, the document +// in question, and the `next()` function +schema.post('save', function(error, doc, next) { + if (error.name === 'MongoServerError' && error.code === 11000) { + next(new Error('There was a duplicate key error')); + } else { + next(); + } +}); + +// Will trigger the `post('save')` error handler +Person.create([{ name: 'Axl Rose' }, { name: 'Axl Rose' }]); +// The same E11000 error can occur when you call `updateOne()` +// This function **must** take 4 parameters. + +schema.post('updateOne', function(passRawResult, error, res, next) { + if (error.name === 'MongoServerError' && error.code === 11000) { + next(new Error('There was a duplicate key error')); + } else { + next(); // The `updateOne()` call will still error out. + } +}); + +const people = [{ name: 'Axl Rose' }, { name: 'Slash' }]; +await Person.create(people); + +// Throws "There was a duplicate key error" +await Person.updateOne({ name: 'Slash' }, { $set: { name: 'Axl Rose' } }); +customerSchema.pre('aggregate', function() { + // Add a $match state to the beginning of each pipeline. + this.pipeline().unshift({ $match: { isDeleted: { $ne: true } } }); +}); +[require:post init hooks.*success] +[require:post init hooks.*error] +const mongoose = require('mongoose'); +const { Schema } = mongoose; + +const personSchema = Schema({ + _id: Schema.Types.ObjectId, + name: String, + age: Number, + stories: [{ type: Schema.Types.ObjectId, ref: 'Story' }] +}); + +const storySchema = Schema({ + author: { type: Schema.Types.ObjectId, ref: 'Person' }, + title: String, + fans: [{ type: Schema.Types.ObjectId, ref: 'Person' }] +}); + +const Story = mongoose.model('Story', storySchema); +const Person = mongoose.model('Person', personSchema); +const author = new Person({ + _id: new mongoose.Types.ObjectId(), + name: 'Ian Fleming', + age: 50 +}); + +await author.save(); + +const story1 = new Story({ + title: 'Casino Royale', + author: author._id // assign the _id from the person +}); + +await story1.save(); +// that's it! +const story = await Story. + findOne({ title: 'Casino Royale' }). + populate('author'). + exec(); +// prints "The author is Ian Fleming" +console.log('The author is %s', story.author.name); +const story = await Story.findOne({ title: 'Casino Royale' }); +story.author = author; +console.log(story.author.name); // prints "Ian Fleming" +const fan1 = await Person.create({ name: 'Sean' }); +await Story.updateOne({ title: 'Casino Royale' }, { $push: { fans: { $each: [fan1._id] } } }); + +const story = await Story.findOne({ title: 'Casino Royale' }).populate('fans'); +story.fans[0].name; // 'Sean' + +const fan2 = await Person.create({ name: 'George' }); +story.fans.push(fan2); +story.fans[1].name; // 'George' + +story.fans.push({ name: 'Roger' }); +story.fans[2].name; // 'Roger' +const fan4 = await Person.create({ name: 'Timothy' }); +story.fans.push(fan4._id); // Push the `_id`, not the full document + +story.fans[0].name; // undefined, `fans[0]` is now an ObjectId +story.fans[0].toString() === fan1._id.toString(); // true +story.populated('author'); // truthy + +story.depopulate('author'); // Make `author` not populated anymore +story.populated('author'); // undefined +story.populated('author'); // truthy +story.author._id; // ObjectId + +story.depopulate('author'); // Make `author` not populated anymore +story.populated('author'); // undefined + +story.author instanceof ObjectId; // true +story.author._id; // ObjectId, because Mongoose adds a special getter +await Person.deleteMany({ name: 'Ian Fleming' }); + +const story = await Story.findOne({ title: 'Casino Royale' }).populate('author'); +story.author; // `null' +const storySchema = Schema({ + authors: [{ type: Schema.Types.ObjectId, ref: 'Person' }], + title: String +}); + +// Later + +const story = await Story.findOne({ title: 'Casino Royale' }).populate('authors'); +story.authors; // `[]` +const story = await Story. + findOne({ title: /casino royale/i }). + populate('author', 'name'). + exec(); // only return the Persons name +// prints "The author is Ian Fleming" +console.log('The author is %s', story.author.name); +// prints "The authors age is null" +console.log('The authors age is %s', story.author.age); +await Story. + find({ /* ... */ }). + populate('fans'). + populate('author'). + exec(); + // The 2nd `populate()` call below overwrites the first because they +// both populate 'fans'. +await Story. +find(). +populate({ path: 'fans', select: 'name' }). +populate({ path: 'fans', select: 'email' }); +// The above is equivalent to: +await Story.find().populate({ path: 'fans', select: 'email' }); +await Story. + find(). + populate({ + path: 'fans', + match: { age: { $gte: 21 } }, + // Explicitly exclude `_id`, see http://bit.ly/2aEfTdB + select: 'name -_id' + }). + exec(); + const story = await Story. + findOne({ title: 'Casino Royale' }). + populate({ path: 'author', match: { name: { $ne: 'Ian Fleming' } } }). + exec(); +story.author; // `null` +const story = await Story. + findOne({ 'author.name': 'Ian Fleming' }). + populate('author'). + exec(); +story; // null +const story = await Story. + findOne({ 'author.name': 'Ian Fleming' }). + populate('author'). + exec(); +story; // null +const stories = await Story.find().populate({ + path: 'fans', + options: { limit: 2 } +}); + +stories[0].name; // 'Casino Royale' +stories[0].fans.length; // 2 + +// 2nd story has 0 fans! +stories[1].name; // 'Live and Let Die' +stories[1].fans.length; // 0 +const stories = await Story.find().populate({ + path: 'fans', + // Special option that tells Mongoose to execute a separate query + // for each `story` to make sure we get 2 fans for each story. + perDocumentLimit: 2 +}); + +stories[0].name; // 'Casino Royale' +stories[0].fans.length; // 2 + +stories[1].name; // 'Live and Let Die' +stories[1].fans.length; // 2 +await story1.save(); + +author.stories.push(story1); +await author.save(); +const person = await Person. + findOne({ name: 'Ian Fleming' }). + populate('stories'). + exec(); // only works if we pushed refs to children +console.log(person); +const stories = await Story. + find({ author: author._id }). + exec(); +console.log('The stories are an array: ', stories); +const person = await Person.findOne({ name: 'Ian Fleming' }); + +person.populated('stories'); // null + +// Call the `populate()` method on a document to populate a path. +await person.populate('stories'); + +person.populated('stories'); // Array of ObjectIds +person.stories[0].name; // 'Casino Royale' +await person.populate(['stories', 'fans']); +person.populated('fans'); // Array of ObjectIds +const userSchema = new Schema({ + name: String, + friends: [{ type: ObjectId, ref: 'User' }] +}); +const userSchema = new Schema({ + name: String, + friends: [{ type: ObjectId, ref: 'User' }] +}); +const db1 = mongoose.createConnection('mongodb://127.0.0.1:27000/db1'); +const db2 = mongoose.createConnection('mongodb://127.0.0.1:27001/db2'); + +const conversationSchema = new Schema({ numMessages: Number }); +const Conversation = db2.model('Conversation', conversationSchema); + +const eventSchema = new Schema({ + name: String, + conversation: { + type: ObjectId, + ref: Conversation // `ref` is a **Model class**, not a string + } +}); +const Event = db1.model('Event', eventSchema); +// Works +const events = await Event. + find(). + populate('conversation'); + const events = await Event. + find(). + // The `model` option specifies the model to use for populating. + populate({ path: 'conversation', model: Conversation }); + const commentSchema = new Schema({ + body: { type: String, required: true }, + doc: { + type: Schema.Types.ObjectId, + required: true, + // Instead of a hardcoded model name in `ref`, `refPath` means Mongoose + // will look at the `docModel` property to find the right model. + refPath: 'docModel' + }, + docModel: { + type: String, + required: true, + enum: ['BlogPost', 'Product'] + } + }); + + const Product = mongoose.model('Product', new Schema({ name: String })); + const BlogPost = mongoose.model('BlogPost', new Schema({ title: String })); + const Comment = mongoose.model('Comment', commentSchema); + const book = await Product.create({ name: 'The Count of Monte Cristo' }); +const post = await BlogPost.create({ title: 'Top 10 French Novels' }); + +const commentOnBook = await Comment.create({ + body: 'Great read', + doc: book._id, + docModel: 'Product' +}); + +const commentOnPost = await Comment.create({ + body: 'Very informative', + doc: post._id, + docModel: 'BlogPost' +}); + +// The below `populate()` works even though one comment references the +// 'Product' collection and the other references the 'BlogPost' collection. +const comments = await Comment.find().populate('doc').sort({ body: 1 }); +comments[0].doc.name; // "The Count of Monte Cristo" +comments[1].doc.title; // "Top 10 French Novels" +const commentSchema = new Schema({ + body: { type: String, required: true }, + product: { + type: Schema.Types.ObjectId, + required: true, + ref: 'Product' + }, + blogPost: { + type: Schema.Types.ObjectId, + required: true, + ref: 'BlogPost' + } +}); + +// ... + +// The below `populate()` is equivalent to the `refPath` approach, you +// just need to make sure you `populate()` both `product` and `blogPost`. +const comments = await Comment.find(). + populate('product'). + populate('blogPost'). + sort({ body: 1 }); +comments[0].product.name; // "The Count of Monte Cristo" +comments[1].blogPost.title; // "Top 10 French Novels" +const commentSchema = new Schema({ + body: { type: String, required: true }, + product: { + type: Schema.Types.ObjectId, + required: true, + ref: 'Product' + }, + blogPost: { + type: Schema.Types.ObjectId, + required: true, + ref: 'BlogPost' + } +}); + +// ... + +// The below `populate()` is equivalent to the `refPath` approach, you +// just need to make sure you `populate()` both `product` and `blogPost`. +const comments = await Comment.find(). + populate('product'). + populate('blogPost'). + sort({ body: 1 }); +comments[0].product.name; // "The Count of Monte Cristo" +comments[1].blogPost.title; // "Top 10 French Novels" +const commentSchema = new Schema({ + body: { type: String, required: true }, + commentType: { + type: String, + enum: ['comment', 'review'] + }, + entityId: { + type: Schema.Types.ObjectId, + required: true, + refPath: function () { + return this.commentType === 'review' ? this.reviewEntityModel : this.commentEntityModel; // 'this' refers to the document being populated + } + }, + commentEntityModel: { + type: String, + required: true, + enum: ['BlogPost', 'Review'] + }, + reviewEntityModel: { + type: String, + required: true, + enum: ['Vendor', 'Product'] + } +}); +const commentSchema = new Schema({ + body: { type: String, required: true }, + verifiedBuyer: Boolean + doc: { + type: Schema.Types.ObjectId, + required: true, + ref: function() { + return this.verifiedBuyer ? 'Product' : 'BlogPost'; // 'this' refers to the document being populated + } + }, +}); +const AuthorSchema = new Schema({ + name: String, + posts: [{ type: mongoose.Schema.Types.ObjectId, ref: 'BlogPost' }] +}); + +const BlogPostSchema = new Schema({ + title: String, + comments: [{ + author: { type: mongoose.Schema.Types.ObjectId, ref: 'Author' }, + content: String + }] +}); + +const Author = mongoose.model('Author', AuthorSchema, 'Author'); +const BlogPost = mongoose.model('BlogPost', BlogPostSchema, 'BlogPost'); +const AuthorSchema = new Schema({ + name: String +}); + +const BlogPostSchema = new Schema({ + title: String, + author: { type: mongoose.Schema.Types.ObjectId, ref: 'Author' }, + comments: [{ + author: { type: mongoose.Schema.Types.ObjectId, ref: 'Author' }, + content: String + }] +}); +// Specifying a virtual with a `ref` property is how you enable virtual +// population +AuthorSchema.virtual('posts', { + ref: 'BlogPost', + localField: '_id', + foreignField: 'author' +}); + +const Author = mongoose.model('Author', AuthorSchema, 'Author'); +const BlogPost = mongoose.model('BlogPost', BlogPostSchema, 'BlogPost'); +You can then populate() the author's posts as shown below. + +const author = await Author.findOne().populate('posts'); + +author.posts[0].title; // Title of the first blog post +const authorSchema = new Schema({ name: String }, { + toJSON: { virtuals: true }, // So `res.json()` and other `JSON.stringify()` functions include virtuals + toObject: { virtuals: true } // So `console.log()` and other functions that use `toObject()` include virtuals +}); +let authors = await Author. + find({}). + // Won't work because the foreign field `author` is not selected + populate({ path: 'posts', select: 'title' }). + exec(); + +authors = await Author. + find({}). + // Works, foreign field `author` is selected + populate({ path: 'posts', select: 'title author' }). + exec(); + const PersonSchema = new Schema({ + name: String, + band: String + }); + + const BandSchema = new Schema({ + name: String + }); + BandSchema.virtual('numMembers', { + ref: 'Person', // The model to use + localField: 'name', // Find people where `localField` + foreignField: 'band', // is equal to `foreignField` + count: true // And only get the number of docs + }); + + // Later + const doc = await Band.findOne({ name: 'Motley Crue' }). + populate('numMembers'); + doc.numMembers; // 2 + // Same example as 'Populate Virtuals' section +AuthorSchema.virtual('posts', { + ref: 'BlogPost', + localField: '_id', + foreignField: 'author', + match: { archived: false } // match option with basic query selector +}); + +const Author = mongoose.model('Author', AuthorSchema, 'Author'); +const BlogPost = mongoose.model('BlogPost', BlogPostSchema, 'BlogPost'); + +// After population +const author = await Author.findOne().populate('posts'); + +author.posts; // Array of not `archived` posts +AuthorSchema.virtual('posts', { + ref: 'BlogPost', + localField: '_id', + foreignField: 'author', + // Add an additional filter `{ tags: author.favoriteTags }` to the populate query + // Mongoose calls the `match` function with the document being populated as the + // first argument. + match: author => ({ tags: author.favoriteTags }) +}); +// Overwrite the `match` option specified in `AuthorSchema.virtual()` for this +// single `populate()` call. +await Author.findOne().populate({ path: posts, match: {} }); +await Author.findOne().populate({ + path: posts, + // Add `isDeleted: false` to the virtual's default `match`, so the `match` + // option would be `{ tags: author.favoriteTags, isDeleted: false }` + match: (author, virtual) => ({ + ...virtual.options.match(author), + isDeleted: false + }) +}); +const BandSchema = new Schema({ + name: String, + members: { + type: Map, + of: { + type: 'ObjectId', + ref: 'Person' + } + } +}); +const Band = mongoose.model('Band', bandSchema); +const person1 = new Person({ name: 'Vince Neil' }); +const person2 = new Person({ name: 'Mick Mars' }); + +const band = new Band({ + name: 'Motley Crue', + members: { + singer: person1._id, + guitarist: person2._id + } +}); +const band = await Band.findOne({ name: 'Motley Crue' }).populate('members.$*'); + +band.members.get('singer'); // { _id: ..., name: 'Vince Neil' } +const librarySchema = new Schema({ + name: String, + books: { + type: Map, + of: new Schema({ + title: String, + author: { + type: 'ObjectId', + ref: 'Person' + } + }) + } +}); +const Library = mongoose.model('Library', librarySchema); +const libraries = await Library.find().populate('books.$*.author'); +// Always attach `populate()` to `find()` calls +MySchema.pre('find', function() { + this.populate('user'); +}); +// Always `populate()` after `find()` calls. Useful if you want to selectively populate +// based on the docs found. +MySchema.post('find', async function(docs) { + for (const doc of docs) { + if (doc.isPublic) { + await doc.populate('user'); + } + } +}); +// `populate()` after saving. Useful for sending populated data back to the client in an +// update API endpoint +MySchema.post('save', function(doc, next) { + doc.populate('user').then(function() { + next(); + }); +}); +const userSchema = new Schema({ + email: String, + password: String, + followers: [{ type: mongoose.Schema.Types.ObjectId, ref: 'User' }], + following: [{ type: mongoose.Schema.Types.ObjectId, ref: 'User' }] +}); + +userSchema.pre('find', function(next) { + this.populate('followers following'); + next(); +}); + +const User = mongoose.model('User', userSchema); +userSchema.pre('find', function(next) { + if (this.options._recursed) { + return next(); + } + this.populate({ path: 'followers following', options: { _recursed: true } }); + next(); +}); +// With `transform` +doc = await Parent.findById(doc).populate([ + { + path: 'child', + // If `doc` is null, use the original id instead + transform: (doc, id) => doc == null ? id : doc + } +]); + +doc.child; // 634d1a5744efe65ae09142f9 +doc.children; // [ 634d1a67ac15090a0ca6c0ea, { _id: 634d1a4ddb804d17d95d1c7f, name: 'Luke', __v: 0 } ] +let doc = await Parent.create({ children: [{ name: 'Luke' }, { name: 'Leia' }] }); + +doc = await Parent.findById(doc).populate([{ + path: 'children', + transform: doc => doc == null ? null : doc.name +}]); + +doc.children; // ['Luke', 'Leia'] +const internationalizedStringSchema = new Schema({ + en: String, + es: String +}); + +const ingredientSchema = new Schema({ + // Instead of setting `name` to just a string, set `name` to a map + // of language codes to strings. + name: { + type: internationalizedStringSchema, + // When you access `name`, pull the document's locale + get: function(value) { + return value[this.$locals.language || 'en']; + } + } +}); + +const recipeSchema = new Schema({ + ingredients: [{ type: mongoose.ObjectId, ref: 'Ingredient' }] +}); + +const Ingredient = mongoose.model('Ingredient', ingredientSchema); +const Recipe = mongoose.model('Recipe', recipeSchema); +// Create some sample data +const { _id } = await Ingredient.create({ + name: { + en: 'Eggs', + es: 'Huevos' + } +}); +await Recipe.create({ ingredients: [_id] }); + +// Populate with setting `$locals.language` for internationalization +const language = 'es'; +const recipes = await Recipe.find().populate({ + path: 'ingredients', + transform: function(doc) { + doc.$locals.language = language; + return doc; + } +}); + +// Gets the ingredient's name in Spanish `name.es` +recipes[0].ingredients[0].name; // 'Huevos' +const options = { discriminatorKey: 'kind' }; + +const eventSchema = new mongoose.Schema({ time: Date }, options); +const Event = mongoose.model('Event', eventSchema); + +// ClickedLinkEvent is a special type of Event that has +// a URL. +const ClickedLinkEvent = Event.discriminator('ClickedLink', + new mongoose.Schema({ url: String }, options)); + +// When you create a generic event, it can't have a URL field... +const genericEvent = new Event({ time: Date.now(), url: 'google.com' }); +assert.ok(!genericEvent.url); + +// But a ClickedLinkEvent can +const clickedEvent = new ClickedLinkEvent({ time: Date.now(), url: 'google.com' }); +assert.ok(clickedEvent.url); +const event1 = new Event({ time: Date.now() }); +const event2 = new ClickedLinkEvent({ time: Date.now(), url: 'google.com' }); +const event3 = new SignedUpEvent({ time: Date.now(), user: 'testuser' }); + + +await Promise.all([event1.save(), event2.save(), event3.save()]); +const count = await Event.countDocuments(); +assert.equal(count, 3); +const event1 = new Event({ time: Date.now() }); +const event2 = new ClickedLinkEvent({ time: Date.now(), url: 'google.com' }); +const event3 = new SignedUpEvent({ time: Date.now(), user: 'testuser' }); + +assert.ok(!event1.__t); +assert.equal(event2.__t, 'ClickedLink'); +assert.equal(event3.__t, 'SignedUp'); +let event = new ClickedLinkEvent({ time: Date.now(), url: 'google.com' }); +await event.save(); + +event.__t = 'SignedUp'; +// ValidationError: ClickedLink validation failed: __t: Cast to String failed for value "SignedUp" (type string) at path "__t" + await event.save(); + +event = await ClickedLinkEvent.findByIdAndUpdate(event._id, { __t: 'SignedUp' }, { new: true }); +event.__t; // 'ClickedLink', update was a no-op +let event = new ClickedLinkEvent({ time: Date.now(), url: 'google.com' }); +await event.save(); + +event = await ClickedLinkEvent.findByIdAndUpdate( + event._id, + { __t: 'SignedUp' }, + { overwriteDiscriminatorKey: true, new: true } +); +event.__t; // 'SignedUp', updated discriminator key +const eventSchema = new Schema({ message: String }, + { discriminatorKey: 'kind', _id: false }); + +const batchSchema = new Schema({ events: [eventSchema] }); + +// `batchSchema.path('events')` gets the mongoose `DocumentArray` +// For TypeScript, use `schema.path('events')` +const docArray = batchSchema.path('events'); + +// The `events` array can contain 2 different types of events, a +// 'clicked' event that requires an element id that was clicked... +const clickedSchema = new Schema({ + element: { + type: String, + required: true + } +}, { _id: false }); +// Make sure to attach any hooks to `eventSchema` and `clickedSchema` +// **before** calling `discriminator()`. +const Clicked = docArray.discriminator('Clicked', clickedSchema); + +// ... and a 'purchased' event that requires the product that was purchased. +const Purchased = docArray.discriminator('Purchased', new Schema({ + product: { + type: String, + required: true + } +}, { _id: false })); + +const Batch = db.model('EventBatch', batchSchema); + +// Create a new batch of events with different kinds +const doc = await Batch.create({ + events: [ + { kind: 'Clicked', element: '#hero', message: 'hello' }, + { kind: 'Purchased', product: 'action-figure-1', message: 'world' } + ] +}); + +assert.equal(doc.events.length, 2); + +assert.equal(doc.events[0].element, '#hero'); +assert.equal(doc.events[0].message, 'hello'); +assert.ok(doc.events[0] instanceof Clicked); + +assert.equal(doc.events[1].product, 'action-figure-1'); +assert.equal(doc.events[1].message, 'world'); +assert.ok(doc.events[1] instanceof Purchased); + +doc.events.push({ kind: 'Purchased', product: 'action-figure-2' }); + +await doc.save(); + +assert.equal(doc.events.length, 3); + +assert.equal(doc.events[2].product, 'action-figure-2'); +assert.ok(doc.events[2] instanceof Purchased); +const shapeSchema = Schema({ name: String }, { discriminatorKey: 'kind' }); +const schema = Schema({ shape: shapeSchema }); + +// For TypeScript, use `schema.path('shape').discriminator(...)` +schema.path('shape').discriminator('Circle', Schema({ radius: String })); +schema.path('shape').discriminator('Square', Schema({ side: Number })); + +const MyModel = mongoose.model('ShapeTest', schema); + +// If `kind` is set to 'Circle', then `shape` will have a `radius` property +let doc = new MyModel({ shape: { kind: 'Circle', radius: 5 } }); +doc.shape.radius; // 5 + +// If `kind` is set to 'Square', then `shape` will have a `side` property +doc = new MyModel({ shape: { kind: 'Square', side: 10 } }); +doc.shape.side; // 10 +// loadedAt.js +module.exports = function loadedAtPlugin(schema, options) { + schema.virtual('loadedAt'). + get(function() { return this._loadedAt; }). + set(function(v) { this._loadedAt = v; }); + + schema.post(['find', 'findOne'], function(docs) { + if (!Array.isArray(docs)) { + docs = [docs]; + } + const now = new Date(); + for (const doc of docs) { + doc.loadedAt = now; + } + }); +}; + +// game-schema.js +const loadedAtPlugin = require('./loadedAt'); +const gameSchema = new Schema({ /* ... */ }); +gameSchema.plugin(loadedAtPlugin); + +// player-schema.js +const loadedAtPlugin = require('./loadedAt'); +const playerSchema = new Schema({ /* ... */ }); +playerSchema.plugin(loadedAtPlugin); +const mongoose = require('mongoose'); +mongoose.plugin(require('./loadedAt')); + +const gameSchema = new Schema({ /* ... */ }); +const playerSchema = new Schema({ /* ... */ }); +// `loadedAtPlugin` gets attached to both schemas +const Game = mongoose.model('Game', gameSchema); +const Player = mongoose.model('Player', playerSchema); +// loadedAt.js +module.exports = function loadedAtPlugin(schema, options) { + schema.virtual('loadedAt'). + get(function() { return this._loadedAt; }). + set(function(v) { this._loadedAt = v; }); + + schema.post(['find', 'findOne'], function(docs) { + if (!Array.isArray(docs)) { + docs = [docs]; + } + const now = new Date(); + for (const doc of docs) { + doc.loadedAt = now; + } + }); +}; + +// game-schema.js +const loadedAtPlugin = require('./loadedAt'); +const gameSchema = new Schema({ /* ... */ }); +const Game = mongoose.model('Game', gameSchema); + +// `find()` and `findOne()` hooks from `loadedAtPlugin()` won't get applied +// because `mongoose.model()` was already called! +gameSchema.plugin(loadedAtPlugin); +const userSchema = new Schema({ name: String }, { timestamps: true }); +const User = mongoose.model('User', userSchema); + +let doc = await User.create({ name: 'test' }); + +console.log(doc.createdAt); // 2022-02-26T16:37:48.244Z +console.log(doc.updatedAt); // 2022-02-26T16:37:48.244Z + +doc.name = 'test2'; +await doc.save(); +console.log(doc.createdAt); // 2022-02-26T16:37:48.244Z +console.log(doc.updatedAt); // 2022-02-26T16:37:48.307Z + +doc = await User.findOneAndUpdate({ _id: doc._id }, { name: 'test3' }, { new: true }); +console.log(doc.createdAt); // 2022-02-26T16:37:48.244Z +console.log(doc.updatedAt); // 2022-02-26T16:37:48.366Z +let doc = await User.create({ name: 'test' }); + +console.log(doc.createdAt); // 2022-02-26T17:08:13.930Z +console.log(doc.updatedAt); // 2022-02-26T17:08:13.930Z + +doc.name = 'test2'; +doc.createdAt = new Date(0); +doc.updatedAt = new Date(0); +await doc.save(); + +// Mongoose blocked changing `createdAt` and set its own `updatedAt`, ignoring +// the attempt to manually set them. +console.log(doc.createdAt); // 2022-02-26T17:08:13.930Z +console.log(doc.updatedAt); // 2022-02-26T17:08:13.991Z + +// Mongoose also blocks changing `createdAt` and sets its own `updatedAt` +// on `findOneAndUpdate()`, `updateMany()`, and other query operations +// **except** `replaceOne()` and `findOneAndReplace()`. +doc = await User.findOneAndUpdate( + { _id: doc._id }, + { name: 'test3', createdAt: new Date(0), updatedAt: new Date(0) }, + { new: true } +); +console.log(doc.createdAt); // 2022-02-26T17:08:13.930Z +console.log(doc.updatedAt); // 2022-02-26T17:08:14.008Z +// `findOneAndReplace()` and `replaceOne()` without timestamps specified in `replacement` +// sets `createdAt` and `updatedAt` to current time. +doc = await User.findOneAndReplace( + { _id: doc._id }, + { name: 'test3' }, + { new: true } +); +console.log(doc.createdAt); // 2022-02-26T17:08:14.008Z +console.log(doc.updatedAt); // 2022-02-26T17:08:14.008Z + +// `findOneAndReplace()` and `replaceOne()` with timestamps specified in `replacement` +// sets `createdAt` and `updatedAt` to the values in `replacement`. +doc = await User.findOneAndReplace( + { _id: doc._id }, + { + name: 'test3', + createdAt: new Date('2022-06-01'), + updatedAt: new Date('2022-06-01') + }, + { new: true } +); +console.log(doc.createdAt); // 2022-06-01T00:00:00.000Z +console.log(doc.updatedAt); // 2022-06-01T00:00:00.000Z +const userSchema = new Schema({ name: String }, { + timestamps: { + createdAt: 'created_at', // Use `created_at` to store the created date + updatedAt: 'updated_at' // and `updated_at` to store the last updated date + } +}); +let doc = await User.create({ name: 'test' }); + +console.log(doc.createdAt); // 2022-02-26T23:28:54.264Z +console.log(doc.updatedAt); // 2022-02-26T23:28:54.264Z + +doc.name = 'test2'; + +// Setting `timestamps: false` tells Mongoose to skip updating `updatedAt` on this `save()` +await doc.save({ timestamps: false }); +console.log(doc.updatedAt); // 2022-02-26T23:28:54.264Z + +// Similarly, setting `timestamps: false` on a query tells Mongoose to skip updating +// `updatedAt`. +doc = await User.findOneAndUpdate({ _id: doc._id }, { name: 'test3' }, { + new: true, + timestamps: false +}); +console.log(doc.updatedAt); // 2022-02-26T23:28:54.264Z + +// Below is how you can disable timestamps on a `bulkWrite()` +await User.bulkWrite([{ + updateOne: { + filter: { _id: doc._id }, + update: { name: 'test4' }, + timestamps: false + } +}]); +doc = await User.findOne({ _id: doc._id }); +console.log(doc.updatedAt); // 2022-02-26T23:28:54.264Z +const doc = new User({ name: 'test' }); + +// Tell Mongoose to set `createdAt`, but skip `updatedAt`. +await doc.save({ timestamps: { createdAt: true, updatedAt: false } }); +console.log(doc.createdAt); // 2022-02-26T23:32:12.478Z +console.log(doc.updatedAt); // undefined +let doc = await User.create({ name: 'test' }); + +// To update `updatedAt`, do a `findOneAndUpdate()` with `timestamps: false` and +// `updatedAt` set to the value you want +doc = await User.findOneAndUpdate({ _id: doc._id }, { updatedAt: new Date(0) }, { + new: true, + timestamps: false +}); +console.log(doc.updatedAt); // 1970-01-01T00:00:00.000Z + +// To update `createdAt`, you also need to set `strict: false` because `createdAt` +// is immutable +doc = await User.findOneAndUpdate({ _id: doc._id }, { createdAt: new Date(0) }, { + new: true, + timestamps: false, + strict: false +}); +console.log(doc.createdAt); // 1970-01-01T00:00:00.000Z +const roleSchema = new Schema({ value: String }, { timestamps: true }); +const userSchema = new Schema({ name: String, roles: [roleSchema] }); + +const doc = await User.create({ name: 'test', roles: [{ value: 'admin' }] }); +console.log(doc.roles[0].createdAt); // 2022-02-27T00:22:53.836Z +console.log(doc.roles[0].updatedAt); // 2022-02-27T00:22:53.836Z + +// Overwriting the subdocument also overwrites `createdAt` and `updatedAt` +doc.roles[0] = { value: 'root' }; +await doc.save(); +console.log(doc.roles[0].createdAt); // 2022-02-27T00:22:53.902Z +console.log(doc.roles[0].updatedAt); // 2022-02-27T00:22:53.902Z + +// But updating the subdocument preserves `createdAt` and updates `updatedAt` +doc.roles[0].value = 'admin'; +await doc.save(); +console.log(doc.roles[0].createdAt); // 2022-02-27T00:22:53.902Z +console.log(doc.roles[0].updatedAt); // 2022-02-27T00:22:53.909Z +mongoose.set('debug', true); + +const userSchema = new Schema({ + name: String +}, { timestamps: true }); +const User = mongoose.model('User', userSchema); + +await User.findOneAndUpdate({}, { name: 'test' }); +await User.findOneAndUpdate({}, { $setOnInsert: { updatedAt: new Date() } }, { + timestamps: { createdAt: true, updatedAt: false } +}); +const createdAt = new Date('2011-06-01'); +// Update a document's `createdAt` to a custom value. +// Normally Mongoose would prevent doing this because `createdAt` is immutable. +await Model.updateOne({ _id: doc._id }, { createdAt }, { overwriteImmutable: true, timestamps: false }); + +doc = await Model.collection.findOne({ _id: doc._id }); +doc.createdAt.valueOf() === createdAt.valueOf(); // true +// Using Mongoose's default connection +const session = await mongoose.startSession(); + +// Using custom connection +const db = await mongoose.createConnection(mongodbUri).asPromise(); +const session = await db.startSession(); +let session = null; +return Customer.createCollection(). + then(() => Customer.startSession()). + // The `withTransaction()` function's first parameter is a function + // that returns a promise. + then(_session => { + session = _session; + return session.withTransaction(() => { + return Customer.create([{ name: 'Test' }], { session: session }); + }); + }). + then(() => Customer.countDocuments()). + then(count => assert.strictEqual(count, 1)). + then(() => session.endSession()); + const doc = new Person({ name: 'Will Riker' }); + +await db.transaction(async function setRank(session) { + doc.name = 'Captain'; + await doc.save({ session }); + doc.isNew; // false + + // Throw an error to abort the transaction + throw new Error('Oops!'); +}, { readPreference: 'primary' }).catch(() => {}); + +// true, `transaction()` reset the document's state because the +// transaction was aborted. +doc.isNew; +const User = db.model('User', new Schema({ name: String })); + +let session = null; +return User.createCollection(). + then(() => db.startSession()). + then(_session => { + session = _session; + return User.create({ name: 'foo' }); + }). + then(() => { + session.startTransaction(); + return User.findOne({ name: 'foo' }).session(session); + }). + then(user => { + // Getter/setter for the session associated with this document. + assert.ok(user.$session()); + user.name = 'bar'; + // By default, `save()` uses the associated session + return user.save(); + }). + then(() => User.findOne({ name: 'bar' })). + // Won't find the doc because `save()` is part of an uncommitted transaction + then(doc => assert.ok(!doc)). + then(() => session.commitTransaction()). + then(() => session.endSession()). + then(() => User.findOne({ name: 'bar' })). + then(doc => assert.ok(doc)); + const Event = db.model('Event', new Schema({ createdAt: Date }), 'Event'); + +let session = null; +return Event.createCollection(). + then(() => db.startSession()). + then(_session => { + session = _session; + session.startTransaction(); + return Event.insertMany([ + { createdAt: new Date('2018-06-01') }, + { createdAt: new Date('2018-06-02') }, + { createdAt: new Date('2017-06-01') }, + { createdAt: new Date('2017-05-31') } + ], { session: session }); + }). + then(() => Event.aggregate([ + { + $group: { + _id: { + month: { $month: '$createdAt' }, + year: { $year: '$createdAt' } + }, + count: { $sum: 1 } + } + }, + { $sort: { count: -1, '_id.year': -1, '_id.month': -1 } } + ]).session(session)). + then(res => assert.deepEqual(res, [ + { _id: { month: 6, year: 2018 }, count: 2 }, + { _id: { month: 6, year: 2017 }, count: 1 }, + { _id: { month: 5, year: 2017 }, count: 1 } + ])). + then(() => session.commitTransaction()). + then(() => session.endSession()); + mongoose.set('transactionAsyncLocalStorage', true); + +const Test = mongoose.model('Test', mongoose.Schema({ name: String })); + +const doc = new Test({ name: 'test' }); + +// Save a new doc in a transaction that aborts +await connection.transaction(async() => { + await doc.save(); // Notice no session here + throw new Error('Oops'); +}).catch(() => {}); + +// false, `save()` was rolled back +await Test.exists({ _id: doc._id }); +const Customer = db.model('Customer', new Schema({ name: String })); + +let session = null; +return Customer.createCollection(). + then(() => db.startSession()). + then(_session => { + session = _session; + // Start a transaction + session.startTransaction(); + // This `create()` is part of the transaction because of the `session` + // option. + return Customer.create([{ name: 'Test' }], { session: session }); + }). + // Transactions execute in isolation, so unless you pass a `session` + // to `findOne()` you won't see the document until the transaction + // is committed. + then(() => Customer.findOne({ name: 'Test' })). + then(doc => assert.ok(!doc)). + // This `findOne()` will return the doc, because passing the `session` + // means this `findOne()` will run as part of the transaction. + then(() => Customer.findOne({ name: 'Test' }).session(session)). + then(doc => assert.ok(doc)). + // Once the transaction is committed, the write operation becomes + // visible outside of the transaction. + then(() => session.commitTransaction()). + then(() => Customer.findOne({ name: 'Test' })). + then(doc => assert.ok(doc)). + then(() => session.endSession()); + let session = null; +return Customer.createCollection(). + then(() => Customer.startSession()). + then(_session => { + session = _session; + session.startTransaction(); + return Customer.create([{ name: 'Test' }], { session: session }); + }). + then(() => Customer.create([{ name: 'Test2' }], { session: session })). + then(() => session.abortTransaction()). + then(() => Customer.countDocuments()). + then(count => assert.strictEqual(count, 0)). + then(() => session.endSession()); \ No newline at end of file diff --git a/backend/package.json b/backend/package.json new file mode 100644 index 00000000..430fab9a --- /dev/null +++ b/backend/package.json @@ -0,0 +1,41 @@ +{ + "name": "movies", + "private": true, + "version": "0.0.0", + "type": "module", + "scripts": { + "dev": "vite", + "build": "vite build", + "lint": "eslint .", + "preview": "vite preview" + }, + "dependencies": { + "@fontsource/roboto": "^5.2.6", + "@mui/icons-material": "^7.2.0", + "@mui/material": "^7.2.0", + "@mui/styled-engine-sc": "^7.2.0", + "dotenv": "^16.5.0", + "express": "^5.1.0", + "global": "^4.4.0", + "hooks": "^0.3.2", + "react": "^19.1.0", + "react-dom": "^19.1.0", + "react-router": "^7.6.0", + "react-router-dom": "^7.6.0", + "router": "^2.2.0", + "style": "^0.0.3", + "styled-components": "^6.1.19", + "test": "^3.3.0" + }, + "devDependencies": { + "@eslint/js": "^9.21.0", + "@types/react": "^19.0.10", + "@types/react-dom": "^19.0.4", + "@vitejs/plugin-react": "^4.3.4", + "eslint": "^9.21.0", + "eslint-plugin-react-hooks": "^5.1.0", + "eslint-plugin-react-refresh": "^0.4.19", + "globals": "^15.15.0", + "vite": "^6.3.5" + } +} diff --git a/backend/server.js b/backend/server.js new file mode 100644 index 00000000..75e355d9 --- /dev/null +++ b/backend/server.js @@ -0,0 +1,39 @@ +import express from "express"; +import cors from "cors"; +import mongoose from "mongoose"; +import bcrypt from "bcrypt-nodejs"; +import bodyParser from "body-parser"; +import crypto from "crypto"; + +const mongoUrl = process.env.MONGO_URL || "mongodb://localhost/auth"; +mongoose.connect(mongoUrl, { useNewUrlParser: true, useUnifiedTopology: true }); +mongoose.Promise = Promise; + +const User = mongoose.model("User", { + name: { + type: String, + unique: true, + }, + email: { + type: String, + unique: true, + }, + password: { + type: String, + required: true, + }, + accessToken: { + type: String, + default: () => crypto.randomBytes(128).toString("hex"), + }, +}); + +const authenticateUser = async (req, res, next) => { + const user = await User.findOne({ accessToken: req.header("Authorization") }); + if (user) { + req.user = user; + next(); + } else { + res.status(401).json({ loggedOut: true }); + } +}; diff --git a/backend/settings.json b/backend/settings.json new file mode 100644 index 00000000..1ee97b0a --- /dev/null +++ b/backend/settings.json @@ -0,0 +1 @@ +"github.gitAuthentication": true diff --git a/coverage/clover.xml b/coverage/clover.xml new file mode 100644 index 00000000..5285c1db --- /dev/null +++ b/coverage/clover.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + diff --git a/coverage/coverage-final.json b/coverage/coverage-final.json new file mode 100644 index 00000000..40ee531e --- /dev/null +++ b/coverage/coverage-final.json @@ -0,0 +1,2 @@ +{"/home/user/js-project-movies/sum.js": {"path":"/home/user/js-project-movies/sum.js","all":false,"statementMap":{"0":{"start":{"line":1,"column":0},"end":{"line":1,"column":20}},"1":{"start":{"line":2,"column":0},"end":{"line":2,"column":17}},"2":{"start":{"line":3,"column":0},"end":{"line":3,"column":3}},"3":{"start":{"line":4,"column":0},"end":{"line":4,"column":23}}},"s":{"0":1,"1":1,"2":1,"3":1},"branchMap":{"0":{"type":"branch","line":1,"loc":{"start":{"line":1,"column":0},"end":{"line":3,"column":3}},"locations":[{"start":{"line":1,"column":0},"end":{"line":3,"column":3}}]}},"b":{"0":[1]},"fnMap":{"0":{"name":"sum","decl":{"start":{"line":1,"column":0},"end":{"line":3,"column":3}},"loc":{"start":{"line":1,"column":0},"end":{"line":3,"column":3}},"line":1}},"f":{"0":1}} +} diff --git a/coverage/lcov-report/base.css b/coverage/lcov-report/base.css new file mode 100644 index 00000000..f418035b --- /dev/null +++ b/coverage/lcov-report/base.css @@ -0,0 +1,224 @@ +body, html { + margin:0; padding: 0; + height: 100%; +} +body { + font-family: Helvetica Neue, Helvetica, Arial; + font-size: 14px; + color:#333; +} +.small { font-size: 12px; } +*, *:after, *:before { + -webkit-box-sizing:border-box; + -moz-box-sizing:border-box; + box-sizing:border-box; + } +h1 { font-size: 20px; margin: 0;} +h2 { font-size: 14px; } +pre { + font: 12px/1.4 Consolas, "Liberation Mono", Menlo, Courier, monospace; + margin: 0; + padding: 0; + -moz-tab-size: 2; + -o-tab-size: 2; + tab-size: 2; +} +a { color:#0074D9; text-decoration:none; } +a:hover { text-decoration:underline; } +.strong { font-weight: bold; } +.space-top1 { padding: 10px 0 0 0; } +.pad2y { padding: 20px 0; } +.pad1y { padding: 10px 0; } +.pad2x { padding: 0 20px; } +.pad2 { padding: 20px; } +.pad1 { padding: 10px; } +.space-left2 { padding-left:55px; } +.space-right2 { padding-right:20px; } +.center { text-align:center; } +.clearfix { display:block; } +.clearfix:after { + content:''; + display:block; + height:0; + clear:both; + visibility:hidden; + } +.fl { float: left; } +@media only screen and (max-width:640px) { + .col3 { width:100%; max-width:100%; } + .hide-mobile { display:none!important; } +} + +.quiet { + color: #7f7f7f; + color: rgba(0,0,0,0.5); +} +.quiet a { opacity: 0.7; } + +.fraction { + font-family: Consolas, 'Liberation Mono', Menlo, Courier, monospace; + font-size: 10px; + color: #555; + background: #E8E8E8; + padding: 4px 5px; + border-radius: 3px; + vertical-align: middle; +} + +div.path a:link, div.path a:visited { color: #333; } +table.coverage { + border-collapse: collapse; + margin: 10px 0 0 0; + padding: 0; +} + +table.coverage td { + margin: 0; + padding: 0; + vertical-align: top; +} +table.coverage td.line-count { + text-align: right; + padding: 0 5px 0 20px; +} +table.coverage td.line-coverage { + text-align: right; + padding-right: 10px; + min-width:20px; +} + +table.coverage td span.cline-any { + display: inline-block; + padding: 0 5px; + width: 100%; +} +.missing-if-branch { + display: inline-block; + margin-right: 5px; + border-radius: 3px; + position: relative; + padding: 0 4px; + background: #333; + color: yellow; +} + +.skip-if-branch { + display: none; + margin-right: 10px; + position: relative; + padding: 0 4px; + background: #ccc; + color: white; +} +.missing-if-branch .typ, .skip-if-branch .typ { + color: inherit !important; +} +.coverage-summary { + border-collapse: collapse; + width: 100%; +} +.coverage-summary tr { border-bottom: 1px solid #bbb; } +.keyline-all { border: 1px solid #ddd; } +.coverage-summary td, .coverage-summary th { padding: 10px; } +.coverage-summary tbody { border: 1px solid #bbb; } +.coverage-summary td { border-right: 1px solid #bbb; } +.coverage-summary td:last-child { border-right: none; } +.coverage-summary th { + text-align: left; + font-weight: normal; + white-space: nowrap; +} +.coverage-summary th.file { border-right: none !important; } +.coverage-summary th.pct { } +.coverage-summary th.pic, +.coverage-summary th.abs, +.coverage-summary td.pct, +.coverage-summary td.abs { text-align: right; } +.coverage-summary td.file { white-space: nowrap; } +.coverage-summary td.pic { min-width: 120px !important; } +.coverage-summary tfoot td { } + +.coverage-summary .sorter { + height: 10px; + width: 7px; + display: inline-block; + margin-left: 0.5em; + background: url(sort-arrow-sprite.png) no-repeat scroll 0 0 transparent; +} +.coverage-summary .sorted .sorter { + background-position: 0 -20px; +} +.coverage-summary .sorted-desc .sorter { + background-position: 0 -10px; +} +.status-line { height: 10px; } +/* yellow */ +.cbranch-no { background: yellow !important; color: #111; } +/* dark red */ +.red.solid, .status-line.low, .low .cover-fill { background:#C21F39 } +.low .chart { border:1px solid #C21F39 } +.highlighted, +.highlighted .cstat-no, .highlighted .fstat-no, .highlighted .cbranch-no{ + background: #C21F39 !important; +} +/* medium red */ +.cstat-no, .fstat-no, .cbranch-no, .cbranch-no { background:#F6C6CE } +/* light red */ +.low, .cline-no { background:#FCE1E5 } +/* light green */ +.high, .cline-yes { background:rgb(230,245,208) } +/* medium green */ +.cstat-yes { background:rgb(161,215,106) } +/* dark green */ +.status-line.high, .high .cover-fill { background:rgb(77,146,33) } +.high .chart { border:1px solid rgb(77,146,33) } +/* dark yellow (gold) */ +.status-line.medium, .medium .cover-fill { background: #f9cd0b; } +.medium .chart { border:1px solid #f9cd0b; } +/* light yellow */ +.medium { background: #fff4c2; } + +.cstat-skip { background: #ddd; color: #111; } +.fstat-skip { background: #ddd; color: #111 !important; } +.cbranch-skip { background: #ddd !important; color: #111; } + +span.cline-neutral { background: #eaeaea; } + +.coverage-summary td.empty { + opacity: .5; + padding-top: 4px; + padding-bottom: 4px; + line-height: 1; + color: #888; +} + +.cover-fill, .cover-empty { + display:inline-block; + height: 12px; +} +.chart { + line-height: 0; +} +.cover-empty { + background: white; +} +.cover-full { + border-right: none !important; +} +pre.prettyprint { + border: none !important; + padding: 0 !important; + margin: 0 !important; +} +.com { color: #999 !important; } +.ignore-none { color: #999; font-weight: normal; } + +.wrapper { + min-height: 100%; + height: auto !important; + height: 100%; + margin: 0 auto -48px; +} +.footer, .push { + height: 48px; +} diff --git a/coverage/lcov-report/block-navigation.js b/coverage/lcov-report/block-navigation.js new file mode 100644 index 00000000..530d1ed2 --- /dev/null +++ b/coverage/lcov-report/block-navigation.js @@ -0,0 +1,87 @@ +/* eslint-disable */ +var jumpToCode = (function init() { + // Classes of code we would like to highlight in the file view + var missingCoverageClasses = ['.cbranch-no', '.cstat-no', '.fstat-no']; + + // Elements to highlight in the file listing view + var fileListingElements = ['td.pct.low']; + + // We don't want to select elements that are direct descendants of another match + var notSelector = ':not(' + missingCoverageClasses.join('):not(') + ') > '; // becomes `:not(a):not(b) > ` + + // Selector that finds elements on the page to which we can jump + var selector = + fileListingElements.join(', ') + + ', ' + + notSelector + + missingCoverageClasses.join(', ' + notSelector); // becomes `:not(a):not(b) > a, :not(a):not(b) > b` + + // The NodeList of matching elements + var missingCoverageElements = document.querySelectorAll(selector); + + var currentIndex; + + function toggleClass(index) { + missingCoverageElements + .item(currentIndex) + .classList.remove('highlighted'); + missingCoverageElements.item(index).classList.add('highlighted'); + } + + function makeCurrent(index) { + toggleClass(index); + currentIndex = index; + missingCoverageElements.item(index).scrollIntoView({ + behavior: 'smooth', + block: 'center', + inline: 'center' + }); + } + + function goToPrevious() { + var nextIndex = 0; + if (typeof currentIndex !== 'number' || currentIndex === 0) { + nextIndex = missingCoverageElements.length - 1; + } else if (missingCoverageElements.length > 1) { + nextIndex = currentIndex - 1; + } + + makeCurrent(nextIndex); + } + + function goToNext() { + var nextIndex = 0; + + if ( + typeof currentIndex === 'number' && + currentIndex < missingCoverageElements.length - 1 + ) { + nextIndex = currentIndex + 1; + } + + makeCurrent(nextIndex); + } + + return function jump(event) { + if ( + document.getElementById('fileSearch') === document.activeElement && + document.activeElement != null + ) { + // if we're currently focused on the search input, we don't want to navigate + return; + } + + switch (event.which) { + case 78: // n + case 74: // j + goToNext(); + break; + case 66: // b + case 75: // k + case 80: // p + goToPrevious(); + break; + } + }; +})(); +window.addEventListener('keydown', jumpToCode); diff --git a/coverage/lcov-report/favicon.png b/coverage/lcov-report/favicon.png new file mode 100644 index 00000000..c1525b81 Binary files /dev/null and b/coverage/lcov-report/favicon.png differ diff --git a/coverage/lcov-report/index.html b/coverage/lcov-report/index.html new file mode 100644 index 00000000..f426a03a --- /dev/null +++ b/coverage/lcov-report/index.html @@ -0,0 +1,116 @@ + + + + + + Code coverage report for All files + + + + + + + + + +
+
+

All files

+
+ +
+ 100% + Statements + 4/4 +
+ + +
+ 100% + Branches + 1/1 +
+ + +
+ 100% + Functions + 1/1 +
+ + +
+ 100% + Lines + 4/4 +
+ + +
+

+ Press n or j to go to the next uncovered block, b, p or k for the previous block. +

+ +
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FileStatementsBranchesFunctionsLines
sum.js +
+
100%4/4100%1/1100%1/1100%4/4
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/coverage/lcov-report/prettify.css b/coverage/lcov-report/prettify.css new file mode 100644 index 00000000..b317a7cd --- /dev/null +++ b/coverage/lcov-report/prettify.css @@ -0,0 +1 @@ +.pln{color:#000}@media screen{.str{color:#080}.kwd{color:#008}.com{color:#800}.typ{color:#606}.lit{color:#066}.pun,.opn,.clo{color:#660}.tag{color:#008}.atn{color:#606}.atv{color:#080}.dec,.var{color:#606}.fun{color:red}}@media print,projection{.str{color:#060}.kwd{color:#006;font-weight:bold}.com{color:#600;font-style:italic}.typ{color:#404;font-weight:bold}.lit{color:#044}.pun,.opn,.clo{color:#440}.tag{color:#006;font-weight:bold}.atn{color:#404}.atv{color:#060}}pre.prettyprint{padding:2px;border:1px solid #888}ol.linenums{margin-top:0;margin-bottom:0}li.L0,li.L1,li.L2,li.L3,li.L5,li.L6,li.L7,li.L8{list-style-type:none}li.L1,li.L3,li.L5,li.L7,li.L9{background:#eee} diff --git a/coverage/lcov-report/prettify.js b/coverage/lcov-report/prettify.js new file mode 100644 index 00000000..b3225238 --- /dev/null +++ b/coverage/lcov-report/prettify.js @@ -0,0 +1,2 @@ +/* eslint-disable */ +window.PR_SHOULD_USE_CONTINUATION=true;(function(){var h=["break,continue,do,else,for,if,return,while"];var u=[h,"auto,case,char,const,default,double,enum,extern,float,goto,int,long,register,short,signed,sizeof,static,struct,switch,typedef,union,unsigned,void,volatile"];var p=[u,"catch,class,delete,false,import,new,operator,private,protected,public,this,throw,true,try,typeof"];var l=[p,"alignof,align_union,asm,axiom,bool,concept,concept_map,const_cast,constexpr,decltype,dynamic_cast,explicit,export,friend,inline,late_check,mutable,namespace,nullptr,reinterpret_cast,static_assert,static_cast,template,typeid,typename,using,virtual,where"];var x=[p,"abstract,boolean,byte,extends,final,finally,implements,import,instanceof,null,native,package,strictfp,super,synchronized,throws,transient"];var R=[x,"as,base,by,checked,decimal,delegate,descending,dynamic,event,fixed,foreach,from,group,implicit,in,interface,internal,into,is,lock,object,out,override,orderby,params,partial,readonly,ref,sbyte,sealed,stackalloc,string,select,uint,ulong,unchecked,unsafe,ushort,var"];var r="all,and,by,catch,class,else,extends,false,finally,for,if,in,is,isnt,loop,new,no,not,null,of,off,on,or,return,super,then,true,try,unless,until,when,while,yes";var w=[p,"debugger,eval,export,function,get,null,set,undefined,var,with,Infinity,NaN"];var s="caller,delete,die,do,dump,elsif,eval,exit,foreach,for,goto,if,import,last,local,my,next,no,our,print,package,redo,require,sub,undef,unless,until,use,wantarray,while,BEGIN,END";var I=[h,"and,as,assert,class,def,del,elif,except,exec,finally,from,global,import,in,is,lambda,nonlocal,not,or,pass,print,raise,try,with,yield,False,True,None"];var f=[h,"alias,and,begin,case,class,def,defined,elsif,end,ensure,false,in,module,next,nil,not,or,redo,rescue,retry,self,super,then,true,undef,unless,until,when,yield,BEGIN,END"];var H=[h,"case,done,elif,esac,eval,fi,function,in,local,set,then,until"];var A=[l,R,w,s+I,f,H];var e=/^(DIR|FILE|vector|(de|priority_)?queue|list|stack|(const_)?iterator|(multi)?(set|map)|bitset|u?(int|float)\d*)/;var C="str";var z="kwd";var j="com";var O="typ";var G="lit";var L="pun";var F="pln";var m="tag";var E="dec";var J="src";var P="atn";var n="atv";var N="nocode";var M="(?:^^\\.?|[+-]|\\!|\\!=|\\!==|\\#|\\%|\\%=|&|&&|&&=|&=|\\(|\\*|\\*=|\\+=|\\,|\\-=|\\->|\\/|\\/=|:|::|\\;|<|<<|<<=|<=|=|==|===|>|>=|>>|>>=|>>>|>>>=|\\?|\\@|\\[|\\^|\\^=|\\^\\^|\\^\\^=|\\{|\\||\\|=|\\|\\||\\|\\|=|\\~|break|case|continue|delete|do|else|finally|instanceof|return|throw|try|typeof)\\s*";function k(Z){var ad=0;var S=false;var ac=false;for(var V=0,U=Z.length;V122)){if(!(al<65||ag>90)){af.push([Math.max(65,ag)|32,Math.min(al,90)|32])}if(!(al<97||ag>122)){af.push([Math.max(97,ag)&~32,Math.min(al,122)&~32])}}}}af.sort(function(av,au){return(av[0]-au[0])||(au[1]-av[1])});var ai=[];var ap=[NaN,NaN];for(var ar=0;arat[0]){if(at[1]+1>at[0]){an.push("-")}an.push(T(at[1]))}}an.push("]");return an.join("")}function W(al){var aj=al.source.match(new RegExp("(?:\\[(?:[^\\x5C\\x5D]|\\\\[\\s\\S])*\\]|\\\\u[A-Fa-f0-9]{4}|\\\\x[A-Fa-f0-9]{2}|\\\\[0-9]+|\\\\[^ux0-9]|\\(\\?[:!=]|[\\(\\)\\^]|[^\\x5B\\x5C\\(\\)\\^]+)","g"));var ah=aj.length;var an=[];for(var ak=0,am=0;ak=2&&ai==="["){aj[ak]=X(ag)}else{if(ai!=="\\"){aj[ak]=ag.replace(/[a-zA-Z]/g,function(ao){var ap=ao.charCodeAt(0);return"["+String.fromCharCode(ap&~32,ap|32)+"]"})}}}}return aj.join("")}var aa=[];for(var V=0,U=Z.length;V=0;){S[ac.charAt(ae)]=Y}}var af=Y[1];var aa=""+af;if(!ag.hasOwnProperty(aa)){ah.push(af);ag[aa]=null}}ah.push(/[\0-\uffff]/);V=k(ah)})();var X=T.length;var W=function(ah){var Z=ah.sourceCode,Y=ah.basePos;var ad=[Y,F];var af=0;var an=Z.match(V)||[];var aj={};for(var ae=0,aq=an.length;ae=5&&"lang-"===ap.substring(0,5);if(am&&!(ai&&typeof ai[1]==="string")){am=false;ap=J}if(!am){aj[ag]=ap}}var ab=af;af+=ag.length;if(!am){ad.push(Y+ab,ap)}else{var al=ai[1];var ak=ag.indexOf(al);var ac=ak+al.length;if(ai[2]){ac=ag.length-ai[2].length;ak=ac-al.length}var ar=ap.substring(5);B(Y+ab,ag.substring(0,ak),W,ad);B(Y+ab+ak,al,q(ar,al),ad);B(Y+ab+ac,ag.substring(ac),W,ad)}}ah.decorations=ad};return W}function i(T){var W=[],S=[];if(T.tripleQuotedStrings){W.push([C,/^(?:\'\'\'(?:[^\'\\]|\\[\s\S]|\'{1,2}(?=[^\']))*(?:\'\'\'|$)|\"\"\"(?:[^\"\\]|\\[\s\S]|\"{1,2}(?=[^\"]))*(?:\"\"\"|$)|\'(?:[^\\\']|\\[\s\S])*(?:\'|$)|\"(?:[^\\\"]|\\[\s\S])*(?:\"|$))/,null,"'\""])}else{if(T.multiLineStrings){W.push([C,/^(?:\'(?:[^\\\']|\\[\s\S])*(?:\'|$)|\"(?:[^\\\"]|\\[\s\S])*(?:\"|$)|\`(?:[^\\\`]|\\[\s\S])*(?:\`|$))/,null,"'\"`"])}else{W.push([C,/^(?:\'(?:[^\\\'\r\n]|\\.)*(?:\'|$)|\"(?:[^\\\"\r\n]|\\.)*(?:\"|$))/,null,"\"'"])}}if(T.verbatimStrings){S.push([C,/^@\"(?:[^\"]|\"\")*(?:\"|$)/,null])}var Y=T.hashComments;if(Y){if(T.cStyleComments){if(Y>1){W.push([j,/^#(?:##(?:[^#]|#(?!##))*(?:###|$)|.*)/,null,"#"])}else{W.push([j,/^#(?:(?:define|elif|else|endif|error|ifdef|include|ifndef|line|pragma|undef|warning)\b|[^\r\n]*)/,null,"#"])}S.push([C,/^<(?:(?:(?:\.\.\/)*|\/?)(?:[\w-]+(?:\/[\w-]+)+)?[\w-]+\.h|[a-z]\w*)>/,null])}else{W.push([j,/^#[^\r\n]*/,null,"#"])}}if(T.cStyleComments){S.push([j,/^\/\/[^\r\n]*/,null]);S.push([j,/^\/\*[\s\S]*?(?:\*\/|$)/,null])}if(T.regexLiterals){var X=("/(?=[^/*])(?:[^/\\x5B\\x5C]|\\x5C[\\s\\S]|\\x5B(?:[^\\x5C\\x5D]|\\x5C[\\s\\S])*(?:\\x5D|$))+/");S.push(["lang-regex",new RegExp("^"+M+"("+X+")")])}var V=T.types;if(V){S.push([O,V])}var U=(""+T.keywords).replace(/^ | $/g,"");if(U.length){S.push([z,new RegExp("^(?:"+U.replace(/[\s,]+/g,"|")+")\\b"),null])}W.push([F,/^\s+/,null," \r\n\t\xA0"]);S.push([G,/^@[a-z_$][a-z_$@0-9]*/i,null],[O,/^(?:[@_]?[A-Z]+[a-z][A-Za-z_$@0-9]*|\w+_t\b)/,null],[F,/^[a-z_$][a-z_$@0-9]*/i,null],[G,new RegExp("^(?:0x[a-f0-9]+|(?:\\d(?:_\\d+)*\\d*(?:\\.\\d*)?|\\.\\d\\+)(?:e[+\\-]?\\d+)?)[a-z]*","i"),null,"0123456789"],[F,/^\\[\s\S]?/,null],[L,/^.[^\s\w\.$@\'\"\`\/\#\\]*/,null]);return g(W,S)}var K=i({keywords:A,hashComments:true,cStyleComments:true,multiLineStrings:true,regexLiterals:true});function Q(V,ag){var U=/(?:^|\s)nocode(?:\s|$)/;var ab=/\r\n?|\n/;var ac=V.ownerDocument;var S;if(V.currentStyle){S=V.currentStyle.whiteSpace}else{if(window.getComputedStyle){S=ac.defaultView.getComputedStyle(V,null).getPropertyValue("white-space")}}var Z=S&&"pre"===S.substring(0,3);var af=ac.createElement("LI");while(V.firstChild){af.appendChild(V.firstChild)}var W=[af];function ae(al){switch(al.nodeType){case 1:if(U.test(al.className)){break}if("BR"===al.nodeName){ad(al);if(al.parentNode){al.parentNode.removeChild(al)}}else{for(var an=al.firstChild;an;an=an.nextSibling){ae(an)}}break;case 3:case 4:if(Z){var am=al.nodeValue;var aj=am.match(ab);if(aj){var ai=am.substring(0,aj.index);al.nodeValue=ai;var ah=am.substring(aj.index+aj[0].length);if(ah){var ak=al.parentNode;ak.insertBefore(ac.createTextNode(ah),al.nextSibling)}ad(al);if(!ai){al.parentNode.removeChild(al)}}}break}}function ad(ak){while(!ak.nextSibling){ak=ak.parentNode;if(!ak){return}}function ai(al,ar){var aq=ar?al.cloneNode(false):al;var ao=al.parentNode;if(ao){var ap=ai(ao,1);var an=al.nextSibling;ap.appendChild(aq);for(var am=an;am;am=an){an=am.nextSibling;ap.appendChild(am)}}return aq}var ah=ai(ak.nextSibling,0);for(var aj;(aj=ah.parentNode)&&aj.nodeType===1;){ah=aj}W.push(ah)}for(var Y=0;Y=S){ah+=2}if(V>=ap){Z+=2}}}var t={};function c(U,V){for(var S=V.length;--S>=0;){var T=V[S];if(!t.hasOwnProperty(T)){t[T]=U}else{if(window.console){console.warn("cannot override language handler %s",T)}}}}function q(T,S){if(!(T&&t.hasOwnProperty(T))){T=/^\s*]*(?:>|$)/],[j,/^<\!--[\s\S]*?(?:-\->|$)/],["lang-",/^<\?([\s\S]+?)(?:\?>|$)/],["lang-",/^<%([\s\S]+?)(?:%>|$)/],[L,/^(?:<[%?]|[%?]>)/],["lang-",/^]*>([\s\S]+?)<\/xmp\b[^>]*>/i],["lang-js",/^]*>([\s\S]*?)(<\/script\b[^>]*>)/i],["lang-css",/^]*>([\s\S]*?)(<\/style\b[^>]*>)/i],["lang-in.tag",/^(<\/?[a-z][^<>]*>)/i]]),["default-markup","htm","html","mxml","xhtml","xml","xsl"]);c(g([[F,/^[\s]+/,null," \t\r\n"],[n,/^(?:\"[^\"]*\"?|\'[^\']*\'?)/,null,"\"'"]],[[m,/^^<\/?[a-z](?:[\w.:-]*\w)?|\/?>$/i],[P,/^(?!style[\s=]|on)[a-z](?:[\w:-]*\w)?/i],["lang-uq.val",/^=\s*([^>\'\"\s]*(?:[^>\'\"\s\/]|\/(?=\s)))/],[L,/^[=<>\/]+/],["lang-js",/^on\w+\s*=\s*\"([^\"]+)\"/i],["lang-js",/^on\w+\s*=\s*\'([^\']+)\'/i],["lang-js",/^on\w+\s*=\s*([^\"\'>\s]+)/i],["lang-css",/^style\s*=\s*\"([^\"]+)\"/i],["lang-css",/^style\s*=\s*\'([^\']+)\'/i],["lang-css",/^style\s*=\s*([^\"\'>\s]+)/i]]),["in.tag"]);c(g([],[[n,/^[\s\S]+/]]),["uq.val"]);c(i({keywords:l,hashComments:true,cStyleComments:true,types:e}),["c","cc","cpp","cxx","cyc","m"]);c(i({keywords:"null,true,false"}),["json"]);c(i({keywords:R,hashComments:true,cStyleComments:true,verbatimStrings:true,types:e}),["cs"]);c(i({keywords:x,cStyleComments:true}),["java"]);c(i({keywords:H,hashComments:true,multiLineStrings:true}),["bsh","csh","sh"]);c(i({keywords:I,hashComments:true,multiLineStrings:true,tripleQuotedStrings:true}),["cv","py"]);c(i({keywords:s,hashComments:true,multiLineStrings:true,regexLiterals:true}),["perl","pl","pm"]);c(i({keywords:f,hashComments:true,multiLineStrings:true,regexLiterals:true}),["rb"]);c(i({keywords:w,cStyleComments:true,regexLiterals:true}),["js"]);c(i({keywords:r,hashComments:3,cStyleComments:true,multilineStrings:true,tripleQuotedStrings:true,regexLiterals:true}),["coffee"]);c(g([],[[C,/^[\s\S]+/]]),["regex"]);function d(V){var U=V.langExtension;try{var S=a(V.sourceNode);var T=S.sourceCode;V.sourceCode=T;V.spans=S.spans;V.basePos=0;q(U,T)(V);D(V)}catch(W){if("console" in window){console.log(W&&W.stack?W.stack:W)}}}function y(W,V,U){var S=document.createElement("PRE");S.innerHTML=W;if(U){Q(S,U)}var T={langExtension:V,numberLines:U,sourceNode:S};d(T);return S.innerHTML}function b(ad){function Y(af){return document.getElementsByTagName(af)}var ac=[Y("pre"),Y("code"),Y("xmp")];var T=[];for(var aa=0;aa=0){var ah=ai.match(ab);var am;if(!ah&&(am=o(aj))&&"CODE"===am.tagName){ah=am.className.match(ab)}if(ah){ah=ah[1]}var al=false;for(var ak=aj.parentNode;ak;ak=ak.parentNode){if((ak.tagName==="pre"||ak.tagName==="code"||ak.tagName==="xmp")&&ak.className&&ak.className.indexOf("prettyprint")>=0){al=true;break}}if(!al){var af=aj.className.match(/\blinenums\b(?::(\d+))?/);af=af?af[1]&&af[1].length?+af[1]:true:false;if(af){Q(aj,af)}S={langExtension:ah,sourceNode:aj,numberLines:af};d(S)}}}if(X]*(?:>|$)/],[PR.PR_COMMENT,/^<\!--[\s\S]*?(?:-\->|$)/],[PR.PR_PUNCTUATION,/^(?:<[%?]|[%?]>)/],["lang-",/^<\?([\s\S]+?)(?:\?>|$)/],["lang-",/^<%([\s\S]+?)(?:%>|$)/],["lang-",/^]*>([\s\S]+?)<\/xmp\b[^>]*>/i],["lang-handlebars",/^]*type\s*=\s*['"]?text\/x-handlebars-template['"]?\b[^>]*>([\s\S]*?)(<\/script\b[^>]*>)/i],["lang-js",/^]*>([\s\S]*?)(<\/script\b[^>]*>)/i],["lang-css",/^]*>([\s\S]*?)(<\/style\b[^>]*>)/i],["lang-in.tag",/^(<\/?[a-z][^<>]*>)/i],[PR.PR_DECLARATION,/^{{[#^>/]?\s*[\w.][^}]*}}/],[PR.PR_DECLARATION,/^{{&?\s*[\w.][^}]*}}/],[PR.PR_DECLARATION,/^{{{>?\s*[\w.][^}]*}}}/],[PR.PR_COMMENT,/^{{![^}]*}}/]]),["handlebars","hbs"]);PR.registerLangHandler(PR.createSimpleLexer([[PR.PR_PLAIN,/^[ \t\r\n\f]+/,null," \t\r\n\f"]],[[PR.PR_STRING,/^\"(?:[^\n\r\f\\\"]|\\(?:\r\n?|\n|\f)|\\[\s\S])*\"/,null],[PR.PR_STRING,/^\'(?:[^\n\r\f\\\']|\\(?:\r\n?|\n|\f)|\\[\s\S])*\'/,null],["lang-css-str",/^url\(([^\)\"\']*)\)/i],[PR.PR_KEYWORD,/^(?:url|rgb|\!important|@import|@page|@media|@charset|inherit)(?=[^\-\w]|$)/i,null],["lang-css-kw",/^(-?(?:[_a-z]|(?:\\[0-9a-f]+ ?))(?:[_a-z0-9\-]|\\(?:\\[0-9a-f]+ ?))*)\s*:/i],[PR.PR_COMMENT,/^\/\*[^*]*\*+(?:[^\/*][^*]*\*+)*\//],[PR.PR_COMMENT,/^(?:)/],[PR.PR_LITERAL,/^(?:\d+|\d*\.\d+)(?:%|[a-z]+)?/i],[PR.PR_LITERAL,/^#(?:[0-9a-f]{3}){1,2}/i],[PR.PR_PLAIN,/^-?(?:[_a-z]|(?:\\[\da-f]+ ?))(?:[_a-z\d\-]|\\(?:\\[\da-f]+ ?))*/i],[PR.PR_PUNCTUATION,/^[^\s\w\'\"]+/]]),["css"]);PR.registerLangHandler(PR.createSimpleLexer([],[[PR.PR_KEYWORD,/^-?(?:[_a-z]|(?:\\[\da-f]+ ?))(?:[_a-z\d\-]|\\(?:\\[\da-f]+ ?))*/i]]),["css-kw"]);PR.registerLangHandler(PR.createSimpleLexer([],[[PR.PR_STRING,/^[^\)\"\']+/]]),["css-str"]); diff --git a/coverage/lcov-report/sort-arrow-sprite.png b/coverage/lcov-report/sort-arrow-sprite.png new file mode 100644 index 00000000..6ed68316 Binary files /dev/null and b/coverage/lcov-report/sort-arrow-sprite.png differ diff --git a/coverage/lcov-report/sorter.js b/coverage/lcov-report/sorter.js new file mode 100644 index 00000000..4ed70ae5 --- /dev/null +++ b/coverage/lcov-report/sorter.js @@ -0,0 +1,210 @@ +/* eslint-disable */ +var addSorting = (function() { + 'use strict'; + var cols, + currentSort = { + index: 0, + desc: false + }; + + // returns the summary table element + function getTable() { + return document.querySelector('.coverage-summary'); + } + // returns the thead element of the summary table + function getTableHeader() { + return getTable().querySelector('thead tr'); + } + // returns the tbody element of the summary table + function getTableBody() { + return getTable().querySelector('tbody'); + } + // returns the th element for nth column + function getNthColumn(n) { + return getTableHeader().querySelectorAll('th')[n]; + } + + function onFilterInput() { + const searchValue = document.getElementById('fileSearch').value; + const rows = document.getElementsByTagName('tbody')[0].children; + + // Try to create a RegExp from the searchValue. If it fails (invalid regex), + // it will be treated as a plain text search + let searchRegex; + try { + searchRegex = new RegExp(searchValue, 'i'); // 'i' for case-insensitive + } catch (error) { + searchRegex = null; + } + + for (let i = 0; i < rows.length; i++) { + const row = rows[i]; + let isMatch = false; + + if (searchRegex) { + // If a valid regex was created, use it for matching + isMatch = searchRegex.test(row.textContent); + } else { + // Otherwise, fall back to the original plain text search + isMatch = row.textContent + .toLowerCase() + .includes(searchValue.toLowerCase()); + } + + row.style.display = isMatch ? '' : 'none'; + } + } + + // loads the search box + function addSearchBox() { + var template = document.getElementById('filterTemplate'); + var templateClone = template.content.cloneNode(true); + templateClone.getElementById('fileSearch').oninput = onFilterInput; + template.parentElement.appendChild(templateClone); + } + + // loads all columns + function loadColumns() { + var colNodes = getTableHeader().querySelectorAll('th'), + colNode, + cols = [], + col, + i; + + for (i = 0; i < colNodes.length; i += 1) { + colNode = colNodes[i]; + col = { + key: colNode.getAttribute('data-col'), + sortable: !colNode.getAttribute('data-nosort'), + type: colNode.getAttribute('data-type') || 'string' + }; + cols.push(col); + if (col.sortable) { + col.defaultDescSort = col.type === 'number'; + colNode.innerHTML = + colNode.innerHTML + ''; + } + } + return cols; + } + // attaches a data attribute to every tr element with an object + // of data values keyed by column name + function loadRowData(tableRow) { + var tableCols = tableRow.querySelectorAll('td'), + colNode, + col, + data = {}, + i, + val; + for (i = 0; i < tableCols.length; i += 1) { + colNode = tableCols[i]; + col = cols[i]; + val = colNode.getAttribute('data-value'); + if (col.type === 'number') { + val = Number(val); + } + data[col.key] = val; + } + return data; + } + // loads all row data + function loadData() { + var rows = getTableBody().querySelectorAll('tr'), + i; + + for (i = 0; i < rows.length; i += 1) { + rows[i].data = loadRowData(rows[i]); + } + } + // sorts the table using the data for the ith column + function sortByIndex(index, desc) { + var key = cols[index].key, + sorter = function(a, b) { + a = a.data[key]; + b = b.data[key]; + return a < b ? -1 : a > b ? 1 : 0; + }, + finalSorter = sorter, + tableBody = document.querySelector('.coverage-summary tbody'), + rowNodes = tableBody.querySelectorAll('tr'), + rows = [], + i; + + if (desc) { + finalSorter = function(a, b) { + return -1 * sorter(a, b); + }; + } + + for (i = 0; i < rowNodes.length; i += 1) { + rows.push(rowNodes[i]); + tableBody.removeChild(rowNodes[i]); + } + + rows.sort(finalSorter); + + for (i = 0; i < rows.length; i += 1) { + tableBody.appendChild(rows[i]); + } + } + // removes sort indicators for current column being sorted + function removeSortIndicators() { + var col = getNthColumn(currentSort.index), + cls = col.className; + + cls = cls.replace(/ sorted$/, '').replace(/ sorted-desc$/, ''); + col.className = cls; + } + // adds sort indicators for current column being sorted + function addSortIndicators() { + getNthColumn(currentSort.index).className += currentSort.desc + ? ' sorted-desc' + : ' sorted'; + } + // adds event listeners for all sorter widgets + function enableUI() { + var i, + el, + ithSorter = function ithSorter(i) { + var col = cols[i]; + + return function() { + var desc = col.defaultDescSort; + + if (currentSort.index === i) { + desc = !currentSort.desc; + } + sortByIndex(i, desc); + removeSortIndicators(); + currentSort.index = i; + currentSort.desc = desc; + addSortIndicators(); + }; + }; + for (i = 0; i < cols.length; i += 1) { + if (cols[i].sortable) { + // add the click event handler on the th so users + // dont have to click on those tiny arrows + el = getNthColumn(i).querySelector('.sorter').parentElement; + if (el.addEventListener) { + el.addEventListener('click', ithSorter(i)); + } else { + el.attachEvent('onclick', ithSorter(i)); + } + } + } + } + // adds sorting functionality to the UI + return function() { + if (!getTable()) { + return; + } + cols = loadColumns(); + loadData(); + addSearchBox(); + addSortIndicators(); + enableUI(); + }; +})(); + +window.addEventListener('load', addSorting); diff --git a/coverage/lcov-report/sum.js.html b/coverage/lcov-report/sum.js.html new file mode 100644 index 00000000..5fd66632 --- /dev/null +++ b/coverage/lcov-report/sum.js.html @@ -0,0 +1,94 @@ + + + + + + Code coverage report for sum.js + + + + + + + + + +
+
+

All files sum.js

+
+ +
+ 100% + Statements + 4/4 +
+ + +
+ 100% + Branches + 1/1 +
+ + +
+ 100% + Functions + 1/1 +
+ + +
+ 100% + Lines + 4/4 +
+ + +
+

+ Press n or j to go to the next uncovered block, b, p or k for the previous block. +

+ +
+
+

+
1 +2 +3 +41x +1x +1x +1x
function sum(a, b) {
+    return a + b;
+  }
+  module.exports = sum;
+ +
+
+ + + + + + + + \ No newline at end of file diff --git a/coverage/lcov.info b/coverage/lcov.info new file mode 100644 index 00000000..67d7bb6f --- /dev/null +++ b/coverage/lcov.info @@ -0,0 +1,16 @@ +TN: +SF:sum.js +FN:1,sum +FNF:1 +FNH:1 +FNDA:1,sum +DA:1,1 +DA:2,1 +DA:3,1 +DA:4,1 +LF:4 +LH:4 +BRDA:1,0,0,1 +BRF:1 +BRH:1 +end_of_record diff --git a/facebook-tutorial b/facebook-tutorial new file mode 160000 index 00000000..cb8c9927 --- /dev/null +++ b/facebook-tutorial @@ -0,0 +1 @@ +Subproject commit cb8c9927541c8285a805a353176c721213a896a9 diff --git a/index.html b/index.html index 46030f8b..5d65ed6f 100644 --- a/index.html +++ b/index.html @@ -4,7 +4,7 @@ - Movies + Movie Company
diff --git a/install.sh b/install.sh new file mode 100644 index 00000000..68d70f49 --- /dev/null +++ b/install.sh @@ -0,0 +1,50 @@ +#!/bin/bash +set -e -o pipefail +shopt -s nocaseglob + +OUT_FILE=/usr/local/bin/autorestic + +# Type +NATIVE_OS=$(uname | tr '[:upper:]' '[:lower:]') +if [[ $NATIVE_OS == *"linux"* ]]; then + OS=linux +elif [[ $NATIVE_OS == *"darwin"* ]]; then + OS=darwin +elif [[ $NATIVE_OS == *"freebsd"* ]]; then + OS=freebsd +else + echo "Could not determine OS automatically, please check the release page manually: https://github.com/cupcakearmy/autorestic/releases" + exit 1 +fi +echo $OS + +NATIVE_ARCH=$(uname -m | tr '[:upper:]' '[:lower:]') +if [[ $NATIVE_ARCH == *"x86_64"* || $NATIVE_ARCH == *"amd64"* ]]; then + ARCH=amd64 +elif [[ $NATIVE_ARCH == *"arm64"* || $NATIVE_ARCH == *"aarch64"* ]]; then + ARCH=arm64 +elif [[ $NATIVE_ARCH == *"x86"* ]]; then + ARCH=386 +elif [[ $NATIVE_ARCH == *"armv7"* ]]; then + ARCH=arm +else + echo "Could not determine Architecure automatically, please check the release page manually: https://github.com/cupcakearmy/autorestic/releases" + exit 1 +fi +echo $ARCH + +if ! command -v bzip2 &>/dev/null; then + echo "Missing bzip2 command. Please install the bzip2 package for your system." + exit 1 +fi + +wget -qO - https://api.github.com/repos/cupcakearmy/autorestic/releases/latest \ +| grep "browser_download_url.*_${OS}_${ARCH}" \ +| cut -d : -f 2,3 \ +| tr -d \" \ +| wget -O "${OUT_FILE}.bz2" -i - +bzip2 -fd "${OUT_FILE}.bz2" +chmod +x ${OUT_FILE} + +autorestic install +echo "Successfully installed autorestic" diff --git a/jest.config.ts b/jest.config.ts new file mode 100644 index 00000000..71a89149 --- /dev/null +++ b/jest.config.ts @@ -0,0 +1,201 @@ +/** + * For a detailed explanation regarding each configuration property, visit: + * https://jestjs.io/docs/configuration + */ + +import type {Config} from 'jest'; + +const config: Config = { + // All imported modules in your tests should be mocked automatically + // automock: false, + + // Stop running tests after `n` failures + // bail: 0, + + // The directory where Jest should store its cached dependency information + // cacheDirectory: "/tmp/jest_rs", + + // Automatically clear mock calls, instances, contexts and results before every test + clearMocks: true, + + // Indicates whether the coverage information should be collected while executing the test + collectCoverage: true, + + // An array of glob patterns indicating a set of files for which coverage information should be collected + // collectCoverageFrom: undefined, + + // The directory where Jest should output its coverage files + coverageDirectory: "coverage", + + // An array of regexp pattern strings used to skip coverage collection + // coveragePathIgnorePatterns: [ + // "/node_modules/" + // ], + + // Indicates which provider should be used to instrument code for coverage + coverageProvider: "v8", + + // A list of reporter names that Jest uses when writing coverage reports + // coverageReporters: [ + // "json", + // "text", + // "lcov", + // "clover" + // ], + + // An object that configures minimum threshold enforcement for coverage results + // coverageThreshold: undefined, + + // A path to a custom dependency extractor + // dependencyExtractor: undefined, + + // Make calling deprecated APIs throw helpful error messages + // errorOnDeprecated: false, + + // The default configuration for fake timers + // fakeTimers: { + // "enableGlobally": false + // }, + + // Force coverage collection from ignored files using an array of glob patterns + // forceCoverageMatch: [], + + // A path to a module which exports an async function that is triggered once before all test suites + // globalSetup: undefined, + + // A path to a module which exports an async function that is triggered once after all test suites + // globalTeardown: undefined, + + // A set of global variables that need to be available in all test environments + // globals: {}, + + // The maximum amount of workers used to run your tests. Can be specified as % or a number. E.g. maxWorkers: 10% will use 10% of your CPU amount + 1 as the maximum worker number. maxWorkers: 2 will use a maximum of 2 workers. + // maxWorkers: "50%", + + // An array of directory names to be searched recursively up from the requiring module's location + // moduleDirectories: [ + // "node_modules" + // ], + + // An array of file extensions your modules use + // moduleFileExtensions: [ + // "js", + // "mjs", + // "cjs", + // "jsx", + // "ts", + // "mts", + // "cts", + // "tsx", + // "json", + // "node" + // ], + + // A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module + // moduleNameMapper: {}, + + // An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader + // modulePathIgnorePatterns: [], + + // Activates notifications for test results + // notify: false, + + // An enum that specifies notification mode. Requires { notify: true } + // notifyMode: "failure-change", + + // A preset that is used as a base for Jest's configuration + // preset: undefined, + + // Run tests from one or more projects + // projects: undefined, + + // Use this configuration option to add custom reporters to Jest + // reporters: undefined, + + // Automatically reset mock state before every test + // resetMocks: false, + + // Reset the module registry before running each individual test + // resetModules: false, + + // A path to a custom resolver + // resolver: undefined, + + // Automatically restore mock state and implementation before every test + // restoreMocks: false, + + // The root directory that Jest should scan for tests and modules within + // rootDir: undefined, + + // A list of paths to directories that Jest should use to search for files in + // roots: [ + // "" + // ], + + // Allows you to use a custom runner instead of Jest's default test runner + // runner: "jest-runner", + + // The paths to modules that run some code to configure or set up the testing environment before each test + // setupFiles: [], + + // A list of paths to modules that run some code to configure or set up the testing framework before each test + // setupFilesAfterEnv: [], + + // The number of seconds after which a test is considered as slow and reported as such in the results. + // slowTestThreshold: 5, + + // A list of paths to snapshot serializer modules Jest should use for snapshot testing + // snapshotSerializers: [], + + // The test environment that will be used for testing + // testEnvironment: "jest-environment-node", + + // Options that will be passed to the testEnvironment + // testEnvironmentOptions: {}, + + // Adds a location field to test results + // testLocationInResults: false, + + // The glob patterns Jest uses to detect test files + // testMatch: [ + // "**/__tests__/**/*.?([mc])[jt]s?(x)", + // "**/?(*.)+(spec|test).?([mc])[jt]s?(x)" + // ], + + // An array of regexp pattern strings that are matched against all test paths, matched tests are skipped + // testPathIgnorePatterns: [ + // "/node_modules/" + // ], + + // The regexp pattern or array of patterns that Jest uses to detect test files + // testRegex: [], + + // This option allows the use of a custom results processor + // testResultsProcessor: undefined, + + // This option allows use of a custom test runner + // testRunner: "jest-circus/runner", + + // A map from regular expressions to paths to transformers + // transform: undefined, + + // An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation + // transformIgnorePatterns: [ + // "/node_modules/", + // "\\.pnp\\.[^\\/]+$" + // ], + + // An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them + // unmockedModulePathPatterns: undefined, + + // Indicates whether each individual test should be reported during the run + // verbose: undefined, + + // An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode + // watchPathIgnorePatterns: [], + + // Whether to use watchman for file crawling + // watchman: true, +}; + +export default config; diff --git a/login.ejs b/login.ejs new file mode 100644 index 00000000..200208c1 --- /dev/null +++ b/login.ejs @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + +

Sign in

+ \ No newline at end of file diff --git a/name-of-your-project/.gitignore b/name-of-your-project/.gitignore new file mode 100644 index 00000000..7fc73163 --- /dev/null +++ b/name-of-your-project/.gitignore @@ -0,0 +1,35 @@ +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +pnpm-debug.log* +lerna-debug.log* + +node_modules +dist +dist-ssr +*.local + +# Editor directories and files +.vscode/* +!.vscode/extensions.json +.idea +.DS_Store +*.suo +*.ntvs* +*.njsproj +*.sln +*.sw? +# Environment variables +.env +.env.local +.env.*.local + +# Database +*.db +sessions.db + +# Node +node_modules/ \ No newline at end of file diff --git a/name-of-your-project/README.md b/name-of-your-project/README.md new file mode 100644 index 00000000..7059a962 --- /dev/null +++ b/name-of-your-project/README.md @@ -0,0 +1,12 @@ +# React + Vite + +This template provides a minimal setup to get React working in Vite with HMR and some ESLint rules. + +Currently, two official plugins are available: + +- [@vitejs/plugin-react](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react) uses [Babel](https://babeljs.io/) for Fast Refresh +- [@vitejs/plugin-react-swc](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react-swc) uses [SWC](https://swc.rs/) for Fast Refresh + +## Expanding the ESLint configuration + +If you are developing a production application, we recommend using TypeScript with type-aware lint rules enabled. Check out the [TS template](https://github.com/vitejs/vite/tree/main/packages/create-vite/template-react-ts) for information on how to integrate TypeScript and [`typescript-eslint`](https://typescript-eslint.io) in your project. diff --git a/name-of-your-project/eslint.config.js b/name-of-your-project/eslint.config.js new file mode 100644 index 00000000..cee1e2c7 --- /dev/null +++ b/name-of-your-project/eslint.config.js @@ -0,0 +1,29 @@ +import js from '@eslint/js' +import globals from 'globals' +import reactHooks from 'eslint-plugin-react-hooks' +import reactRefresh from 'eslint-plugin-react-refresh' +import { defineConfig, globalIgnores } from 'eslint/config' + +export default defineConfig([ + globalIgnores(['dist']), + { + files: ['**/*.{js,jsx}'], + extends: [ + js.configs.recommended, + reactHooks.configs['recommended-latest'], + reactRefresh.configs.vite, + ], + languageOptions: { + ecmaVersion: 2020, + globals: globals.browser, + parserOptions: { + ecmaVersion: 'latest', + ecmaFeatures: { jsx: true }, + sourceType: 'module', + }, + }, + rules: { + 'no-unused-vars': ['error', { varsIgnorePattern: '^[A-Z_]' }], + }, + }, +]) diff --git a/name-of-your-project/index.html b/name-of-your-project/index.html new file mode 100644 index 00000000..0c589ecc --- /dev/null +++ b/name-of-your-project/index.html @@ -0,0 +1,13 @@ + + + + + + + Vite + React + + +
+ + + diff --git a/name-of-your-project/package.json b/name-of-your-project/package.json new file mode 100644 index 00000000..38215191 --- /dev/null +++ b/name-of-your-project/package.json @@ -0,0 +1,27 @@ +{ + "name": "name-of-your-project", + "private": true, + "version": "0.0.0", + "type": "module", + "scripts": { + "dev": "vite", + "build": "vite build", + "lint": "eslint .", + "preview": "vite preview" + }, + "dependencies": { + "react": "^19.1.1", + "react-dom": "^19.1.1" + }, + "devDependencies": { + "@eslint/js": "^9.33.0", + "@types/react": "^19.1.10", + "@types/react-dom": "^19.1.7", + "@vitejs/plugin-react": "^5.0.0", + "eslint": "^9.33.0", + "eslint-plugin-react-hooks": "^5.2.0", + "eslint-plugin-react-refresh": "^0.4.20", + "globals": "^16.3.0", + "vite": "^7.1.2" + } +} diff --git a/name-of-your-project/public/vite.svg b/name-of-your-project/public/vite.svg new file mode 100644 index 00000000..e7b8dfb1 --- /dev/null +++ b/name-of-your-project/public/vite.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/name-of-your-project/src/App.css b/name-of-your-project/src/App.css new file mode 100644 index 00000000..b9d355df --- /dev/null +++ b/name-of-your-project/src/App.css @@ -0,0 +1,42 @@ +#root { + max-width: 1280px; + margin: 0 auto; + padding: 2rem; + text-align: center; +} + +.logo { + height: 6em; + padding: 1.5em; + will-change: filter; + transition: filter 300ms; +} +.logo:hover { + filter: drop-shadow(0 0 2em #646cffaa); +} +.logo.react:hover { + filter: drop-shadow(0 0 2em #61dafbaa); +} + +@keyframes logo-spin { + from { + transform: rotate(0deg); + } + to { + transform: rotate(360deg); + } +} + +@media (prefers-reduced-motion: no-preference) { + a:nth-of-type(2) .logo { + animation: logo-spin infinite 20s linear; + } +} + +.card { + padding: 2em; +} + +.read-the-docs { + color: #888; +} diff --git a/name-of-your-project/src/App.jsx b/name-of-your-project/src/App.jsx new file mode 100644 index 00000000..f67355ae --- /dev/null +++ b/name-of-your-project/src/App.jsx @@ -0,0 +1,35 @@ +import { useState } from 'react' +import reactLogo from './assets/react.svg' +import viteLogo from '/vite.svg' +import './App.css' + +function App() { + const [count, setCount] = useState(0) + + return ( + <> + +

Vite + React

+
+ +

+ Edit src/App.jsx and save to test HMR +

+
+

+ Click on the Vite and React logos to learn more +

+ + ) +} + +export default App diff --git a/name-of-your-project/src/assets/react.svg b/name-of-your-project/src/assets/react.svg new file mode 100644 index 00000000..6c87de9b --- /dev/null +++ b/name-of-your-project/src/assets/react.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/name-of-your-project/src/index.css b/name-of-your-project/src/index.css new file mode 100644 index 00000000..08a3ac9e --- /dev/null +++ b/name-of-your-project/src/index.css @@ -0,0 +1,68 @@ +:root { + font-family: system-ui, Avenir, Helvetica, Arial, sans-serif; + line-height: 1.5; + font-weight: 400; + + color-scheme: light dark; + color: rgba(255, 255, 255, 0.87); + background-color: #242424; + + font-synthesis: none; + text-rendering: optimizeLegibility; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; +} + +a { + font-weight: 500; + color: #646cff; + text-decoration: inherit; +} +a:hover { + color: #535bf2; +} + +body { + margin: 0; + display: flex; + place-items: center; + min-width: 320px; + min-height: 100vh; +} + +h1 { + font-size: 3.2em; + line-height: 1.1; +} + +button { + border-radius: 8px; + border: 1px solid transparent; + padding: 0.6em 1.2em; + font-size: 1em; + font-weight: 500; + font-family: inherit; + background-color: #1a1a1a; + cursor: pointer; + transition: border-color 0.25s; +} +button:hover { + border-color: #646cff; +} +button:focus, +button:focus-visible { + outline: 4px auto -webkit-focus-ring-color; +} + +@media (prefers-color-scheme: light) { + :root { + color: #213547; + background-color: #ffffff; + } + a:hover { + color: #747bff; + } + button { + background-color: #f9f9f9; + } +} diff --git a/name-of-your-project/src/main.jsx b/name-of-your-project/src/main.jsx new file mode 100644 index 00000000..b9a1a6de --- /dev/null +++ b/name-of-your-project/src/main.jsx @@ -0,0 +1,10 @@ +import { StrictMode } from 'react' +import { createRoot } from 'react-dom/client' +import './index.css' +import App from './App.jsx' + +createRoot(document.getElementById('root')).render( + + + , +) diff --git a/name-of-your-project/vite.config.js b/name-of-your-project/vite.config.js new file mode 100644 index 00000000..8b0f57b9 --- /dev/null +++ b/name-of-your-project/vite.config.js @@ -0,0 +1,7 @@ +import { defineConfig } from 'vite' +import react from '@vitejs/plugin-react' + +// https://vite.dev/config/ +export default defineConfig({ + plugins: [react()], +}) diff --git a/node/assert.js b/node/assert.js new file mode 100644 index 00000000..2ddc44d9 --- /dev/null +++ b/node/assert.js @@ -0,0 +1,824 @@ +'use strict'; + +const { + ArrayPrototypeIndexOf, + ArrayPrototypeJoin, + ArrayPrototypePush, + ArrayPrototypeSlice, + Error, + NumberIsNaN, + ObjectAssign, + ObjectIs, + ObjectKeys, + ObjectPrototypeIsPrototypeOf, + ReflectApply, + RegExpPrototypeExec, + String, + StringPrototypeIndexOf, + StringPrototypeSlice, + StringPrototypeSplit, +} = primordials; + +const { + codes: { + ERR_AMBIGUOUS_ARGUMENT, + ERR_INVALID_ARG_TYPE, + ERR_INVALID_ARG_VALUE, + ERR_INVALID_RETURN_VALUE, + ERR_MISSING_ARGS, + }, +} = require('internal/errors'); +const AssertionError = require('internal/assert/assertion_error'); +const { inspect } = require('internal/util/inspect'); +const { + isPromise, + isRegExp, +} = require('internal/util/types'); +const { isError, deprecate } = require('internal/util'); +const { innerOk } = require('internal/assert/utils'); + +const CallTracker = require('internal/assert/calltracker'); +const { + validateFunction, +} = require('internal/validators'); + +let isDeepEqual; +let isDeepStrictEqual; +let isPartialStrictEqual; + +function lazyLoadComparison() { + const comparison = require('internal/util/comparisons'); + isDeepEqual = comparison.isDeepEqual; + isDeepStrictEqual = comparison.isDeepStrictEqual; + isPartialStrictEqual = comparison.isPartialStrictEqual; +} + +let warned = false; + +// The assert module provides functions that throw +// AssertionError's when particular conditions are not met. The +// assert module must conform to the following interface. + +const assert = module.exports = ok; + +const NO_EXCEPTION_SENTINEL = {}; + +// All of the following functions must throw an AssertionError +// when a corresponding condition is not met, with a message that +// may be undefined if not provided. All assertion methods provide +// both the actual and expected values to the assertion error for +// display purposes. + +function innerFail(obj) { + if (obj.message instanceof Error) throw obj.message; + + throw new AssertionError(obj); +} + +/** + * @param {any} actual + * @param {any} expected + * @param {string | Error} [message] + * @param {string} [operator] + * @param {Function} [stackStartFn] + */ +function fail(actual, expected, message, operator, stackStartFn) { + const argsLen = arguments.length; + + let internalMessage = false; + if (actual == null && argsLen <= 1) { + internalMessage = true; + message = 'Failed'; + } else if (argsLen === 1) { + message = actual; + actual = undefined; + } else { + if (warned === false) { + warned = true; + process.emitWarning( + 'assert.fail() with more than one argument is deprecated. ' + + 'Please use assert.strictEqual() instead or only pass a message.', + 'DeprecationWarning', + 'DEP0094', + ); + } + if (argsLen === 2) + operator = '!='; + } + + if (message instanceof Error) throw message; + + const errArgs = { + actual, + expected, + operator: operator === undefined ? 'fail' : operator, + stackStartFn: stackStartFn || fail, + message, + }; + const err = new AssertionError(errArgs); + if (internalMessage) { + err.generatedMessage = true; + } + throw err; +} + +assert.fail = fail; + +// The AssertionError is defined in internal/error. +assert.AssertionError = AssertionError; + +/** + * Pure assertion tests whether a value is truthy, as determined + * by !!value. + * @param {...any} args + * @returns {void} + */ +function ok(...args) { + innerOk(ok, args.length, ...args); +} +assert.ok = ok; + +/** + * The equality assertion tests shallow, coercive equality with ==. + * @param {any} actual + * @param {any} expected + * @param {string | Error} [message] + * @returns {void} + */ +/* eslint-disable no-restricted-properties */ +assert.equal = function equal(actual, expected, message) { + if (arguments.length < 2) { + throw new ERR_MISSING_ARGS('actual', 'expected'); + } + // eslint-disable-next-line eqeqeq + if (actual != expected && (!NumberIsNaN(actual) || !NumberIsNaN(expected))) { + innerFail({ + actual, + expected, + message, + operator: '==', + stackStartFn: equal, + }); + } +}; + +/** + * The non-equality assertion tests for whether two objects are not + * equal with !=. + * @param {any} actual + * @param {any} expected + * @param {string | Error} [message] + * @returns {void} + */ +assert.notEqual = function notEqual(actual, expected, message) { + if (arguments.length < 2) { + throw new ERR_MISSING_ARGS('actual', 'expected'); + } + // eslint-disable-next-line eqeqeq + if (actual == expected || (NumberIsNaN(actual) && NumberIsNaN(expected))) { + innerFail({ + actual, + expected, + message, + operator: '!=', + stackStartFn: notEqual, + }); + } +}; + +/** + * The deep equivalence assertion tests a deep equality relation. + * @param {any} actual + * @param {any} expected + * @param {string | Error} [message] + * @returns {void} + */ +assert.deepEqual = function deepEqual(actual, expected, message) { + if (arguments.length < 2) { + throw new ERR_MISSING_ARGS('actual', 'expected'); + } + if (isDeepEqual === undefined) lazyLoadComparison(); + if (!isDeepEqual(actual, expected)) { + innerFail({ + actual, + expected, + message, + operator: 'deepEqual', + stackStartFn: deepEqual, + }); + } +}; + +/** + * The deep non-equivalence assertion tests for any deep inequality. + * @param {any} actual + * @param {any} expected + * @param {string | Error} [message] + * @returns {void} + */ +assert.notDeepEqual = function notDeepEqual(actual, expected, message) { + if (arguments.length < 2) { + throw new ERR_MISSING_ARGS('actual', 'expected'); + } + if (isDeepEqual === undefined) lazyLoadComparison(); + if (isDeepEqual(actual, expected)) { + innerFail({ + actual, + expected, + message, + operator: 'notDeepEqual', + stackStartFn: notDeepEqual, + }); + } +}; +/* eslint-enable */ + +/** + * The deep strict equivalence assertion tests a deep strict equality + * relation. + * @param {any} actual + * @param {any} expected + * @param {string | Error} [message] + * @returns {void} + */ +assert.deepStrictEqual = function deepStrictEqual(actual, expected, message) { + if (arguments.length < 2) { + throw new ERR_MISSING_ARGS('actual', 'expected'); + } + if (isDeepEqual === undefined) lazyLoadComparison(); + if (!isDeepStrictEqual(actual, expected)) { + innerFail({ + actual, + expected, + message, + operator: 'deepStrictEqual', + stackStartFn: deepStrictEqual, + }); + } +}; + +/** + * The deep strict non-equivalence assertion tests for any deep strict + * inequality. + * @param {any} actual + * @param {any} expected + * @param {string | Error} [message] + * @returns {void} + */ +assert.notDeepStrictEqual = notDeepStrictEqual; +function notDeepStrictEqual(actual, expected, message) { + if (arguments.length < 2) { + throw new ERR_MISSING_ARGS('actual', 'expected'); + } + if (isDeepEqual === undefined) lazyLoadComparison(); + if (isDeepStrictEqual(actual, expected)) { + innerFail({ + actual, + expected, + message, + operator: 'notDeepStrictEqual', + stackStartFn: notDeepStrictEqual, + }); + } +} + +/** + * The strict equivalence assertion tests a strict equality relation. + * @param {any} actual + * @param {any} expected + * @param {string | Error} [message] + * @returns {void} + */ +assert.strictEqual = function strictEqual(actual, expected, message) { + if (arguments.length < 2) { + throw new ERR_MISSING_ARGS('actual', 'expected'); + } + if (!ObjectIs(actual, expected)) { + innerFail({ + actual, + expected, + message, + operator: 'strictEqual', + stackStartFn: strictEqual, + }); + } +}; + +/** + * The strict non-equivalence assertion tests for any strict inequality. + * @param {any} actual + * @param {any} expected + * @param {string | Error} [message] + * @returns {void} + */ +assert.notStrictEqual = function notStrictEqual(actual, expected, message) { + if (arguments.length < 2) { + throw new ERR_MISSING_ARGS('actual', 'expected'); + } + if (ObjectIs(actual, expected)) { + innerFail({ + actual, + expected, + message, + operator: 'notStrictEqual', + stackStartFn: notStrictEqual, + }); + } +}; + +/** + * The strict equivalence assertion test between two objects + * @param {any} actual + * @param {any} expected + * @param {string | Error} [message] + * @returns {void} + */ +assert.partialDeepStrictEqual = function partialDeepStrictEqual( + actual, + expected, + message, +) { + if (arguments.length < 2) { + throw new ERR_MISSING_ARGS('actual', 'expected'); + } + if (isDeepEqual === undefined) lazyLoadComparison(); + if (!isPartialStrictEqual(actual, expected)) { + innerFail({ + actual, + expected, + message, + operator: 'partialDeepStrictEqual', + stackStartFn: partialDeepStrictEqual, + }); + } +}; + +class Comparison { + constructor(obj, keys, actual) { + for (const key of keys) { + if (key in obj) { + if (actual !== undefined && + typeof actual[key] === 'string' && + isRegExp(obj[key]) && + RegExpPrototypeExec(obj[key], actual[key]) !== null) { + this[key] = actual[key]; + } else { + this[key] = obj[key]; + } + } + } + } +} + +function compareExceptionKey(actual, expected, key, message, keys, fn) { + if (!(key in actual) || !isDeepStrictEqual(actual[key], expected[key])) { + if (!message) { + // Create placeholder objects to create a nice output. + const a = new Comparison(actual, keys); + const b = new Comparison(expected, keys, actual); + + const err = new AssertionError({ + actual: a, + expected: b, + operator: 'deepStrictEqual', + stackStartFn: fn, + }); + err.actual = actual; + err.expected = expected; + err.operator = fn.name; + throw err; + } + innerFail({ + actual, + expected, + message, + operator: fn.name, + stackStartFn: fn, + }); + } +} + +function expectedException(actual, expected, message, fn) { + let generatedMessage = false; + let throwError = false; + + if (typeof expected !== 'function') { + // Handle regular expressions. + if (isRegExp(expected)) { + const str = String(actual); + if (RegExpPrototypeExec(expected, str) !== null) + return; + + if (!message) { + generatedMessage = true; + message = 'The input did not match the regular expression ' + + `${inspect(expected)}. Input:\n\n${inspect(str)}\n`; + } + throwError = true; + // Handle primitives properly. + } else if (typeof actual !== 'object' || actual === null) { + const err = new AssertionError({ + actual, + expected, + message, + operator: 'deepStrictEqual', + stackStartFn: fn, + }); + err.operator = fn.name; + throw err; + } else { + // Handle validation objects. + const keys = ObjectKeys(expected); + // Special handle errors to make sure the name and the message are + // compared as well. + if (expected instanceof Error) { + ArrayPrototypePush(keys, 'name', 'message'); + } else if (keys.length === 0) { + throw new ERR_INVALID_ARG_VALUE('error', + expected, 'may not be an empty object'); + } + if (isDeepEqual === undefined) lazyLoadComparison(); + for (const key of keys) { + if (typeof actual[key] === 'string' && + isRegExp(expected[key]) && + RegExpPrototypeExec(expected[key], actual[key]) !== null) { + continue; + } + compareExceptionKey(actual, expected, key, message, keys, fn); + } + return; + } + // Guard instanceof against arrow functions as they don't have a prototype. + // Check for matching Error classes. + } else if (expected.prototype !== undefined && actual instanceof expected) { + return; + } else if (ObjectPrototypeIsPrototypeOf(Error, expected)) { + if (!message) { + generatedMessage = true; + message = 'The error is expected to be an instance of ' + + `"${expected.name}". Received `; + if (isError(actual)) { + const name = (actual.constructor?.name) || + actual.name; + if (expected.name === name) { + message += 'an error with identical name but a different prototype.'; + } else { + message += `"${name}"`; + } + if (actual.message) { + message += `\n\nError message:\n\n${actual.message}`; + } + } else { + message += `"${inspect(actual, { depth: -1 })}"`; + } + } + throwError = true; + } else { + // Check validation functions return value. + const res = ReflectApply(expected, {}, [actual]); + if (res !== true) { + if (!message) { + generatedMessage = true; + const name = expected.name ? `"${expected.name}" ` : ''; + message = `The ${name}validation function is expected to return` + + ` "true". Received ${inspect(res)}`; + + if (isError(actual)) { + message += `\n\nCaught error:\n\n${actual}`; + } + } + throwError = true; + } + } + + if (throwError) { + const err = new AssertionError({ + actual, + expected, + message, + operator: fn.name, + stackStartFn: fn, + }); + err.generatedMessage = generatedMessage; + throw err; + } +} + +function getActual(fn) { + validateFunction(fn, 'fn'); + try { + fn(); + } catch (e) { + return e; + } + return NO_EXCEPTION_SENTINEL; +} + +function checkIsPromise(obj) { + // Accept native ES6 promises and promises that are implemented in a similar + // way. Do not accept thenables that use a function as `obj` and that have no + // `catch` handler. + return isPromise(obj) || + (obj !== null && typeof obj === 'object' && + typeof obj.then === 'function' && + typeof obj.catch === 'function'); +} + +async function waitForActual(promiseFn) { + let resultPromise; + if (typeof promiseFn === 'function') { + // Return a rejected promise if `promiseFn` throws synchronously. + resultPromise = promiseFn(); + // Fail in case no promise is returned. + if (!checkIsPromise(resultPromise)) { + throw new ERR_INVALID_RETURN_VALUE('instance of Promise', + 'promiseFn', resultPromise); + } + } else if (checkIsPromise(promiseFn)) { + resultPromise = promiseFn; + } else { + throw new ERR_INVALID_ARG_TYPE( + 'promiseFn', ['Function', 'Promise'], promiseFn); + } + + try { + await resultPromise; + } catch (e) { + return e; + } + return NO_EXCEPTION_SENTINEL; +} + +function expectsError(stackStartFn, actual, error, message) { + if (typeof error === 'string') { + if (arguments.length === 4) { + throw new ERR_INVALID_ARG_TYPE('error', + ['Object', 'Error', 'Function', 'RegExp'], + error); + } + if (typeof actual === 'object' && actual !== null) { + if (actual.message === error) { + throw new ERR_AMBIGUOUS_ARGUMENT( + 'error/message', + `The error message "${actual.message}" is identical to the message.`, + ); + } + } else if (actual === error) { + throw new ERR_AMBIGUOUS_ARGUMENT( + 'error/message', + `The error "${actual}" is identical to the message.`, + ); + } + message = error; + error = undefined; + } else if (error != null && + typeof error !== 'object' && + typeof error !== 'function') { + throw new ERR_INVALID_ARG_TYPE('error', + ['Object', 'Error', 'Function', 'RegExp'], + error); + } + + if (actual === NO_EXCEPTION_SENTINEL) { + let details = ''; + if (error?.name) { + details += ` (${error.name})`; + } + details += message ? `: ${message}` : '.'; + const fnType = stackStartFn === assert.rejects ? 'rejection' : 'exception'; + innerFail({ + actual: undefined, + expected: error, + operator: stackStartFn.name, + message: `Missing expected ${fnType}${details}`, + stackStartFn, + }); + } + + if (!error) + return; + + expectedException(actual, error, message, stackStartFn); +} + +function hasMatchingError(actual, expected) { + if (typeof expected !== 'function') { + if (isRegExp(expected)) { + const str = String(actual); + return RegExpPrototypeExec(expected, str) !== null; + } + throw new ERR_INVALID_ARG_TYPE( + 'expected', ['Function', 'RegExp'], expected, + ); + } + // Guard instanceof against arrow functions as they don't have a prototype. + if (expected.prototype !== undefined && actual instanceof expected) { + return true; + } + if (ObjectPrototypeIsPrototypeOf(Error, expected)) { + return false; + } + return ReflectApply(expected, {}, [actual]) === true; +} + +function expectsNoError(stackStartFn, actual, error, message) { + if (actual === NO_EXCEPTION_SENTINEL) + return; + + if (typeof error === 'string') { + message = error; + error = undefined; + } + + if (!error || hasMatchingError(actual, error)) { + const details = message ? `: ${message}` : '.'; + const fnType = stackStartFn === assert.doesNotReject ? + 'rejection' : 'exception'; + innerFail({ + actual, + expected: error, + operator: stackStartFn.name, + message: `Got unwanted ${fnType}${details}\n` + + `Actual message: "${actual?.message}"`, + stackStartFn, + }); + } + throw actual; +} + +/** + * Expects the function `promiseFn` to throw an error. + * @param {() => any} promiseFn + * @param {...any} [args] + * @returns {void} + */ +assert.throws = function throws(promiseFn, ...args) { + expectsError(throws, getActual(promiseFn), ...args); +}; + +/** + * Expects `promiseFn` function or its value to reject. + * @param {() => Promise} promiseFn + * @param {...any} [args] + * @returns {Promise} + */ +assert.rejects = async function rejects(promiseFn, ...args) { + expectsError(rejects, await waitForActual(promiseFn), ...args); +}; + +/** + * Asserts that the function `fn` does not throw an error. + * @param {() => any} fn + * @param {...any} [args] + * @returns {void} + */ +assert.doesNotThrow = function doesNotThrow(fn, ...args) { + expectsNoError(doesNotThrow, getActual(fn), ...args); +}; + +/** + * Expects `fn` or its value to not reject. + * @param {() => Promise} fn + * @param {...any} [args] + * @returns {Promise} + */ +assert.doesNotReject = async function doesNotReject(fn, ...args) { + expectsNoError(doesNotReject, await waitForActual(fn), ...args); +}; + +/** + * Throws `AssertionError` if the value is not `null` or `undefined`. + * @param {any} err + * @returns {void} + */ +assert.ifError = function ifError(err) { + if (err !== null && err !== undefined) { + let message = 'ifError got unwanted exception: '; + if (typeof err === 'object' && typeof err.message === 'string') { + if (err.message.length === 0 && err.constructor) { + message += err.constructor.name; + } else { + message += err.message; + } + } else { + message += inspect(err); + } + + const newErr = new AssertionError({ + actual: err, + expected: null, + operator: 'ifError', + message, + stackStartFn: ifError, + }); + + // Make sure we actually have a stack trace! + const origStack = err.stack; + + if (typeof origStack === 'string') { + // This will remove any duplicated frames from the error frames taken + // from within `ifError` and add the original error frames to the newly + // created ones. + const origStackStart = StringPrototypeIndexOf(origStack, '\n at'); + if (origStackStart !== -1) { + const originalFrames = StringPrototypeSplit( + StringPrototypeSlice(origStack, origStackStart + 1), + '\n', + ); + // Filter all frames existing in err.stack. + let newFrames = StringPrototypeSplit(newErr.stack, '\n'); + for (const errFrame of originalFrames) { + // Find the first occurrence of the frame. + const pos = ArrayPrototypeIndexOf(newFrames, errFrame); + if (pos !== -1) { + // Only keep new frames. + newFrames = ArrayPrototypeSlice(newFrames, 0, pos); + break; + } + } + const stackStart = ArrayPrototypeJoin(newFrames, '\n'); + const stackEnd = ArrayPrototypeJoin(originalFrames, '\n'); + newErr.stack = `${stackStart}\n${stackEnd}`; + } + } + + throw newErr; + } +}; + +function internalMatch(string, regexp, message, fn) { + if (!isRegExp(regexp)) { + throw new ERR_INVALID_ARG_TYPE( + 'regexp', 'RegExp', regexp, + ); + } + const match = fn === assert.match; + if (typeof string !== 'string' || + RegExpPrototypeExec(regexp, string) !== null !== match) { + if (message instanceof Error) { + throw message; + } + + const generatedMessage = !message; + + // 'The input was expected to not match the regular expression ' + + message ||= (typeof string !== 'string' ? + 'The "string" argument must be of type string. Received type ' + + `${typeof string} (${inspect(string)})` : + (match ? + 'The input did not match the regular expression ' : + 'The input was expected to not match the regular expression ') + + `${inspect(regexp)}. Input:\n\n${inspect(string)}\n`); + const err = new AssertionError({ + actual: string, + expected: regexp, + message, + operator: fn.name, + stackStartFn: fn, + }); + err.generatedMessage = generatedMessage; + throw err; + } +} + +/** + * Expects the `string` input to match the regular expression. + * @param {string} string + * @param {RegExp} regexp + * @param {string | Error} [message] + * @returns {void} + */ +assert.match = function match(string, regexp, message) { + internalMatch(string, regexp, message, match); +}; + +/** + * Expects the `string` input not to match the regular expression. + * @param {string} string + * @param {RegExp} regexp + * @param {string | Error} [message] + * @returns {void} + */ +assert.doesNotMatch = function doesNotMatch(string, regexp, message) { + internalMatch(string, regexp, message, doesNotMatch); +}; + +assert.CallTracker = deprecate(CallTracker, 'assert.CallTracker is deprecated.', 'DEP0173'); + +/** + * Expose a strict only variant of assert. + * @param {...any} args + * @returns {void} + */ +function strict(...args) { + innerOk(strict, args.length, ...args); +} + +assert.strict = ObjectAssign(strict, assert, { + equal: assert.strictEqual, + deepEqual: assert.deepStrictEqual, + notEqual: assert.notStrictEqual, + notDeepEqual: assert.notDeepStrictEqual, +}); + +assert.strict.strict = assert.strict; \ No newline at end of file diff --git a/node/async_hooks.js b/node/async_hooks.js new file mode 100644 index 00000000..8c57bc67 --- /dev/null +++ b/node/async_hooks.js @@ -0,0 +1,296 @@ +'use strict'; + +const { + ArrayPrototypeIncludes, + ArrayPrototypeIndexOf, + ArrayPrototypePush, + ArrayPrototypeSplice, + ArrayPrototypeUnshift, + FunctionPrototypeBind, + NumberIsSafeInteger, + ObjectDefineProperties, + ObjectFreeze, + ReflectApply, + Symbol, +} = primordials; + +const { + ERR_ASYNC_CALLBACK, + ERR_ASYNC_TYPE, + ERR_INVALID_ASYNC_ID, +} = require('internal/errors').codes; +const { + deprecate, + kEmptyObject, +} = require('internal/util'); +const { + validateFunction, + validateString, +} = require('internal/validators'); +const internal_async_hooks = require('internal/async_hooks'); + +const AsyncContextFrame = require('internal/async_context_frame'); + +// Get functions +// For userland AsyncResources, make sure to emit a destroy event when the +// resource gets gced. +const { registerDestroyHook, kNoPromiseHook } = internal_async_hooks; +const { + asyncWrap, + executionAsyncId, + triggerAsyncId, + // Private API + hasAsyncIdStack, + getHookArrays, + enableHooks, + disableHooks, + updatePromiseHookMode, + executionAsyncResource, + // Internal Embedder API + newAsyncId, + getDefaultTriggerAsyncId, + emitInit, + emitBefore, + emitAfter, + emitDestroy, + enabledHooksExist, + initHooksExist, + destroyHooksExist, +} = internal_async_hooks; + +// Get symbols +const { + async_id_symbol, trigger_async_id_symbol, + init_symbol, before_symbol, after_symbol, destroy_symbol, + promise_resolve_symbol, +} = internal_async_hooks.symbols; + +// Get constants +const { + kInit, kBefore, kAfter, kDestroy, kTotals, kPromiseResolve, +} = internal_async_hooks.constants; + +// Listener API // + +class AsyncHook { + constructor({ init, before, after, destroy, promiseResolve }) { + if (init !== undefined && typeof init !== 'function') + throw new ERR_ASYNC_CALLBACK('hook.init'); + if (before !== undefined && typeof before !== 'function') + throw new ERR_ASYNC_CALLBACK('hook.before'); + if (after !== undefined && typeof after !== 'function') + throw new ERR_ASYNC_CALLBACK('hook.after'); + if (destroy !== undefined && typeof destroy !== 'function') + throw new ERR_ASYNC_CALLBACK('hook.destroy'); + if (promiseResolve !== undefined && typeof promiseResolve !== 'function') + throw new ERR_ASYNC_CALLBACK('hook.promiseResolve'); + + this[init_symbol] = init; + this[before_symbol] = before; + this[after_symbol] = after; + this[destroy_symbol] = destroy; + this[promise_resolve_symbol] = promiseResolve; + this[kNoPromiseHook] = false; + } + + enable() { + // The set of callbacks for a hook should be the same regardless of whether + // enable()/disable() are run during their execution. The following + // references are reassigned to the tmp arrays if a hook is currently being + // processed. + const { 0: hooks_array, 1: hook_fields } = getHookArrays(); + + // Each hook is only allowed to be added once. + if (ArrayPrototypeIncludes(hooks_array, this)) + return this; + + const prev_kTotals = hook_fields[kTotals]; + + // createHook() has already enforced that the callbacks are all functions, + // so here simply increment the count of whether each callbacks exists or + // not. + hook_fields[kTotals] = hook_fields[kInit] += +!!this[init_symbol]; + hook_fields[kTotals] += hook_fields[kBefore] += +!!this[before_symbol]; + hook_fields[kTotals] += hook_fields[kAfter] += +!!this[after_symbol]; + hook_fields[kTotals] += hook_fields[kDestroy] += +!!this[destroy_symbol]; + hook_fields[kTotals] += + hook_fields[kPromiseResolve] += +!!this[promise_resolve_symbol]; + ArrayPrototypePush(hooks_array, this); + + if (prev_kTotals === 0 && hook_fields[kTotals] > 0) { + enableHooks(); + } + + if (!this[kNoPromiseHook]) { + updatePromiseHookMode(); + } + + return this; + } + + disable() { + const { 0: hooks_array, 1: hook_fields } = getHookArrays(); + + const index = ArrayPrototypeIndexOf(hooks_array, this); + if (index === -1) + return this; + + const prev_kTotals = hook_fields[kTotals]; + + hook_fields[kTotals] = hook_fields[kInit] -= +!!this[init_symbol]; + hook_fields[kTotals] += hook_fields[kBefore] -= +!!this[before_symbol]; + hook_fields[kTotals] += hook_fields[kAfter] -= +!!this[after_symbol]; + hook_fields[kTotals] += hook_fields[kDestroy] -= +!!this[destroy_symbol]; + hook_fields[kTotals] += + hook_fields[kPromiseResolve] -= +!!this[promise_resolve_symbol]; + ArrayPrototypeSplice(hooks_array, index, 1); + + if (prev_kTotals > 0 && hook_fields[kTotals] === 0) { + disableHooks(); + } + + return this; + } +} + + +function createHook(fns) { + return new AsyncHook(fns); +} + + +// Embedder API // + +const destroyedSymbol = Symbol('destroyed'); +const contextFrameSymbol = Symbol('context_frame'); + +class AsyncResource { + constructor(type, opts = kEmptyObject) { + validateString(type, 'type'); + + let triggerAsyncId = opts; + let requireManualDestroy = false; + if (typeof opts !== 'number') { + triggerAsyncId = opts.triggerAsyncId === undefined ? + getDefaultTriggerAsyncId() : opts.triggerAsyncId; + requireManualDestroy = !!opts.requireManualDestroy; + } + + // Unlike emitInitScript, AsyncResource doesn't supports null as the + // triggerAsyncId. + if (!NumberIsSafeInteger(triggerAsyncId) || triggerAsyncId < -1) { + throw new ERR_INVALID_ASYNC_ID('triggerAsyncId', triggerAsyncId); + } + + this[contextFrameSymbol] = AsyncContextFrame.current(); + + const asyncId = newAsyncId(); + this[async_id_symbol] = asyncId; + this[trigger_async_id_symbol] = triggerAsyncId; + + if (initHooksExist()) { + if (enabledHooksExist() && type.length === 0) { + throw new ERR_ASYNC_TYPE(type); + } + + emitInit(asyncId, type, triggerAsyncId, this); + } + + if (!requireManualDestroy && destroyHooksExist()) { + // This prop name (destroyed) has to be synchronized with C++ + const destroyed = { destroyed: false }; + this[destroyedSymbol] = destroyed; + registerDestroyHook(this, asyncId, destroyed); + } + } + + runInAsyncScope(fn, thisArg, ...args) { + const asyncId = this[async_id_symbol]; + emitBefore(asyncId, this[trigger_async_id_symbol], this); + + const contextFrame = this[contextFrameSymbol]; + const prior = AsyncContextFrame.exchange(contextFrame); + try { + return ReflectApply(fn, thisArg, args); + } finally { + AsyncContextFrame.set(prior); + if (hasAsyncIdStack()) + emitAfter(asyncId); + } + } + + emitDestroy() { + if (this[destroyedSymbol] !== undefined) { + this[destroyedSymbol].destroyed = true; + } + emitDestroy(this[async_id_symbol]); + return this; + } + + asyncId() { + return this[async_id_symbol]; + } + + triggerAsyncId() { + return this[trigger_async_id_symbol]; + } + + bind(fn, thisArg) { + validateFunction(fn, 'fn'); + let bound; + if (thisArg === undefined) { + const resource = this; + bound = function(...args) { + ArrayPrototypeUnshift(args, fn, this); + return ReflectApply(resource.runInAsyncScope, resource, args); + }; + } else { + bound = FunctionPrototypeBind(this.runInAsyncScope, this, fn, thisArg); + } + let self = this; + ObjectDefineProperties(bound, { + 'length': { + __proto__: null, + configurable: true, + enumerable: false, + value: fn.length, + writable: false, + }, + 'asyncResource': { + __proto__: null, + configurable: true, + enumerable: true, + get: deprecate(function() { + return self; + }, 'The asyncResource property on bound functions is deprecated', 'DEP0172'), + set: deprecate(function(val) { + self = val; + }, 'The asyncResource property on bound functions is deprecated', 'DEP0172'), + }, + }); + return bound; + } + + static bind(fn, type, thisArg) { + type ||= fn.name; + return (new AsyncResource(type || 'bound-anonymous-fn')).bind(fn, thisArg); + } +} + +// Placing all exports down here because the exported classes won't export +// otherwise. +module.exports = { + // Public API + get AsyncLocalStorage() { + return AsyncContextFrame.enabled ? + require('internal/async_local_storage/async_context_frame') : + require('internal/async_local_storage/async_hooks'); + }, + createHook, + executionAsyncId, + triggerAsyncId, + executionAsyncResource, + asyncWrapProviders: ObjectFreeze({ __proto__: null, ...asyncWrap.Providers }), + // Embedder API + AsyncResource, +}; \ No newline at end of file diff --git a/node/buffer.js b/node/buffer.js new file mode 100644 index 00000000..06bfebc7 --- /dev/null +++ b/node/buffer.js @@ -0,0 +1,1365 @@ +'use strict'; + +const { + Array, + ArrayBufferIsView, + ArrayIsArray, + ArrayPrototypeForEach, + MathFloor, + MathMin, + MathTrunc, + NumberIsInteger, + NumberIsNaN, + NumberMAX_SAFE_INTEGER, + NumberMIN_SAFE_INTEGER, + ObjectDefineProperties, + ObjectDefineProperty, + ObjectPrototypeHasOwnProperty, + ObjectSetPrototypeOf, + RegExpPrototypeSymbolReplace, + StringPrototypeCharCodeAt, + StringPrototypeSlice, + StringPrototypeToLowerCase, + StringPrototypeTrim, + SymbolSpecies, + SymbolToPrimitive, + TypedArrayPrototypeFill, + TypedArrayPrototypeGetBuffer, + TypedArrayPrototypeGetByteLength, + TypedArrayPrototypeGetByteOffset, + TypedArrayPrototypeGetLength, + TypedArrayPrototypeSet, + TypedArrayPrototypeSlice, + Uint8Array, + Uint8ArrayPrototype, +} = primordials; + +const { + byteLengthUtf8, + compare: _compare, + compareOffset, + copy: _copy, + fill: bindingFill, + isAscii: bindingIsAscii, + isUtf8: bindingIsUtf8, + indexOfBuffer, + indexOfNumber, + indexOfString, + swap16: _swap16, + swap32: _swap32, + swap64: _swap64, + kMaxLength, + kStringMaxLength, + atob: _atob, + btoa: _btoa, +} = internalBinding('buffer'); +const { + constants: { + ALL_PROPERTIES, + ONLY_ENUMERABLE, + }, + getOwnNonIndexProperties, + isInsideNodeModules, +} = internalBinding('util'); +const { + customInspectSymbol, + lazyDOMException, + normalizeEncoding, + kIsEncodingSymbol, + defineLazyProperties, + encodingsMap, + deprecate, +} = require('internal/util'); +const { + isAnyArrayBuffer, + isArrayBufferView, + isUint8Array, + isTypedArray, +} = require('internal/util/types'); +const { + inspect: utilInspect, +} = require('internal/util/inspect'); + +const { + codes: { + ERR_BUFFER_OUT_OF_BOUNDS, + ERR_INVALID_ARG_TYPE, + ERR_INVALID_ARG_VALUE, + ERR_INVALID_BUFFER_SIZE, + ERR_MISSING_ARGS, + ERR_OUT_OF_RANGE, + ERR_UNKNOWN_ENCODING, + }, + genericNodeError, +} = require('internal/errors'); +const { + validateArray, + validateBuffer, + validateInteger, + validateNumber, + validateString, +} = require('internal/validators'); +// Provide validateInteger() but with kMaxLength as the default maximum value. +const validateOffset = (value, name, min = 0, max = kMaxLength) => + validateInteger(value, name, min, max); + +const { + FastBuffer, + markAsUntransferable, + addBufferPrototypeMethods, + createUnsafeBuffer, +} = require('internal/buffer'); + +FastBuffer.prototype.constructor = Buffer; +Buffer.prototype = FastBuffer.prototype; +addBufferPrototypeMethods(Buffer.prototype); + +const constants = ObjectDefineProperties({}, { + MAX_LENGTH: { + __proto__: null, + value: kMaxLength, + writable: false, + enumerable: true, + }, + MAX_STRING_LENGTH: { + __proto__: null, + value: kStringMaxLength, + writable: false, + enumerable: true, + }, +}); + +Buffer.poolSize = 8 * 1024; +let poolSize, poolOffset, allocPool, allocBuffer; + +function createPool() { + poolSize = Buffer.poolSize; + allocBuffer = createUnsafeBuffer(poolSize); + allocPool = allocBuffer.buffer; + markAsUntransferable(allocPool); + poolOffset = 0; +} +createPool(); + +function alignPool() { + // Ensure aligned slices + if (poolOffset & 0x7) { + poolOffset |= 0x7; + poolOffset++; + } +} + +let bufferWarningAlreadyEmitted = false; +let nodeModulesCheckCounter = 0; +const bufferWarning = 'Buffer() is deprecated due to security and usability ' + + 'issues. Please use the Buffer.alloc(), ' + + 'Buffer.allocUnsafe(), or Buffer.from() methods instead.'; + +function showFlaggedDeprecation() { + if (bufferWarningAlreadyEmitted || + ++nodeModulesCheckCounter > 10000 || + (!require('internal/options').getOptionValue('--pending-deprecation') && + isInsideNodeModules(100, true))) { + // We don't emit a warning, because we either: + // - Already did so, or + // - Already checked too many times whether a call is coming + // from node_modules and want to stop slowing down things, or + // - We aren't running with `--pending-deprecation` enabled, + // and the code is inside `node_modules`. + // - We found node_modules in up to the topmost 100 frames, or + // there are more than 100 frames and we don't want to search anymore. + return; + } + + process.emitWarning(bufferWarning, 'DeprecationWarning', 'DEP0005'); + bufferWarningAlreadyEmitted = true; +} + +function toInteger(n, defaultVal) { + n = +n; + if (!NumberIsNaN(n) && + n >= NumberMIN_SAFE_INTEGER && + n <= NumberMAX_SAFE_INTEGER) { + return ((n % 1) === 0 ? n : MathFloor(n)); + } + return defaultVal; +} + +function copyImpl(source, target, targetStart, sourceStart, sourceEnd) { + if (!ArrayBufferIsView(source)) + throw new ERR_INVALID_ARG_TYPE('source', ['Buffer', 'Uint8Array'], source); + if (!ArrayBufferIsView(target)) + throw new ERR_INVALID_ARG_TYPE('target', ['Buffer', 'Uint8Array'], target); + + if (targetStart === undefined) { + targetStart = 0; + } else { + targetStart = NumberIsInteger(targetStart) ? targetStart : toInteger(targetStart, 0); + if (targetStart < 0) + throw new ERR_OUT_OF_RANGE('targetStart', '>= 0', targetStart); + } + + if (sourceStart === undefined) { + sourceStart = 0; + } else { + sourceStart = NumberIsInteger(sourceStart) ? sourceStart : toInteger(sourceStart, 0); + if (sourceStart < 0 || sourceStart > source.byteLength) + throw new ERR_OUT_OF_RANGE('sourceStart', `>= 0 && <= ${source.byteLength}`, sourceStart); + } + + if (sourceEnd === undefined) { + sourceEnd = source.byteLength; + } else { + sourceEnd = NumberIsInteger(sourceEnd) ? sourceEnd : toInteger(sourceEnd, 0); + if (sourceEnd < 0) + throw new ERR_OUT_OF_RANGE('sourceEnd', '>= 0', sourceEnd); + } + + if (targetStart >= target.byteLength || sourceStart >= sourceEnd) + return 0; + + return _copyActual(source, target, targetStart, sourceStart, sourceEnd); +} + +function _copyActual(source, target, targetStart, sourceStart, sourceEnd) { + if (sourceEnd - sourceStart > target.byteLength - targetStart) + sourceEnd = sourceStart + target.byteLength - targetStart; + + let nb = sourceEnd - sourceStart; + const sourceLen = source.byteLength - sourceStart; + if (nb > sourceLen) + nb = sourceLen; + + if (nb <= 0) + return 0; + + _copy(source, target, targetStart, sourceStart, nb); + + return nb; +} + +/** + * The Buffer() constructor is deprecated in documentation and should not be + * used moving forward. Rather, developers should use one of the three new + * factory APIs: Buffer.from(), Buffer.allocUnsafe() or Buffer.alloc() based on + * their specific needs. There is no runtime deprecation because of the extent + * to which the Buffer constructor is used in the ecosystem currently -- a + * runtime deprecation would introduce too much breakage at this time. It's not + * likely that the Buffer constructors would ever actually be removed. + * Deprecation Code: DEP0005 + * @returns {Buffer} + */ +function Buffer(arg, encodingOrOffset, length) { + showFlaggedDeprecation(); + // Common case. + if (typeof arg === 'number') { + if (typeof encodingOrOffset === 'string') { + throw new ERR_INVALID_ARG_TYPE('string', 'string', arg); + } + return Buffer.alloc(arg); + } + return Buffer.from(arg, encodingOrOffset, length); +} + +ObjectDefineProperty(Buffer, SymbolSpecies, { + __proto__: null, + enumerable: false, + configurable: true, + get() { return FastBuffer; }, +}); + +/** + * Functionally equivalent to Buffer(arg, encoding) but throws a TypeError + * if value is a number. + * Buffer.from(str[, encoding]) + * Buffer.from(array) + * Buffer.from(buffer) + * Buffer.from(arrayBuffer[, byteOffset[, length]]) + * @param {any} value + * @param {BufferEncoding|number} encodingOrOffset + * @param {number} [length] + * @returns {Buffer} + */ +Buffer.from = function from(value, encodingOrOffset, length) { + if (typeof value === 'string') + return fromString(value, encodingOrOffset); + + if (typeof value === 'object' && value !== null) { + if (isAnyArrayBuffer(value)) + return fromArrayBuffer(value, encodingOrOffset, length); + + const valueOf = value.valueOf && value.valueOf(); + if (valueOf != null && + valueOf !== value && + (typeof valueOf === 'string' || typeof valueOf === 'object')) { + return from(valueOf, encodingOrOffset, length); + } + + const b = fromObject(value); + if (b) + return b; + + if (typeof value[SymbolToPrimitive] === 'function') { + const primitive = value[SymbolToPrimitive]('string'); + if (typeof primitive === 'string') { + return fromString(primitive, encodingOrOffset); + } + } + } + + throw new ERR_INVALID_ARG_TYPE( + 'first argument', + ['string', 'Buffer', 'ArrayBuffer', 'Array', 'Array-like Object'], + value, + ); +}; + +/** + * Creates the Buffer as a copy of the underlying ArrayBuffer of the view + * rather than the contents of the view. + * @param {TypedArray} view + * @param {number} [offset] + * @param {number} [length] + * @returns {Buffer} + */ +Buffer.copyBytesFrom = function copyBytesFrom(view, offset, length) { + if (!isTypedArray(view)) { + throw new ERR_INVALID_ARG_TYPE('view', [ 'TypedArray' ], view); + } + + const viewLength = TypedArrayPrototypeGetLength(view); + if (viewLength === 0) { + return Buffer.alloc(0); + } + + if (offset !== undefined || length !== undefined) { + if (offset !== undefined) { + validateInteger(offset, 'offset', 0); + if (offset >= viewLength) return Buffer.alloc(0); + } else { + offset = 0; + } + let end; + if (length !== undefined) { + validateInteger(length, 'length', 0); + end = offset + length; + } else { + end = viewLength; + } + + view = TypedArrayPrototypeSlice(view, offset, end); + } + + return fromArrayLike(new Uint8Array( + TypedArrayPrototypeGetBuffer(view), + TypedArrayPrototypeGetByteOffset(view), + TypedArrayPrototypeGetByteLength(view))); +}; + +// Identical to the built-in %TypedArray%.of(), but avoids using the deprecated +// Buffer() constructor. Must use arrow function syntax to avoid automatically +// adding a `prototype` property and making the function a constructor. +// +// Refs: https://tc39.github.io/ecma262/#sec-%typedarray%.of +// Refs: https://esdiscuss.org/topic/isconstructor#content-11 +const of = (...items) => { + const newObj = createUnsafeBuffer(items.length); + for (let k = 0; k < items.length; k++) + newObj[k] = items[k]; + return newObj; +}; +Buffer.of = of; + +ObjectSetPrototypeOf(Buffer, Uint8Array); + +/** + * Creates a new filled Buffer instance. + * alloc(size[, fill[, encoding]]) + * @returns {FastBuffer} + */ +Buffer.alloc = function alloc(size, fill, encoding) { + validateNumber(size, 'size', 0, kMaxLength); + if (fill !== undefined && fill !== 0 && size > 0) { + const buf = createUnsafeBuffer(size); + return _fill(buf, fill, 0, buf.length, encoding); + } + return new FastBuffer(size); +}; + +/** + * Equivalent to Buffer(num), by default creates a non-zero-filled Buffer + * instance. If `--zero-fill-buffers` is set, will zero-fill the buffer. + * @returns {FastBuffer} + */ +Buffer.allocUnsafe = function allocUnsafe(size) { + validateNumber(size, 'size', 0, kMaxLength); + return allocate(size); +}; + +/** + * Equivalent to SlowBuffer(num), by default creates a non-zero-filled + * Buffer instance that is not allocated off the pre-initialized pool. + * If `--zero-fill-buffers` is set, will zero-fill the buffer. + * @param {number} size + * @returns {FastBuffer|undefined} + */ +Buffer.allocUnsafeSlow = function allocUnsafeSlow(size) { + validateNumber(size, 'size', 0, kMaxLength); + return createUnsafeBuffer(size); +}; + +// If --zero-fill-buffers command line argument is set, a zero-filled +// buffer is returned. +function SlowBuffer(size) { + validateNumber(size, 'size', 0, kMaxLength); + return createUnsafeBuffer(size); +} + +ObjectSetPrototypeOf(SlowBuffer.prototype, Uint8ArrayPrototype); +ObjectSetPrototypeOf(SlowBuffer, Uint8Array); + +function allocate(size) { + if (size <= 0) { + return new FastBuffer(); + } + if (size < (Buffer.poolSize >>> 1)) { + if (size > (poolSize - poolOffset)) + createPool(); + const b = new FastBuffer(allocPool, poolOffset, size); + poolOffset += size; + alignPool(); + return b; + } + return createUnsafeBuffer(size); +} + +function fromStringFast(string, ops) { + const maxLength = Buffer.poolSize >>> 1; + + let length = string.length; // Min length + + if (length >= maxLength) + return createFromString(string, ops); + + length *= 4; // Max length (4 bytes per character) + + if (length >= maxLength) + length = ops.byteLength(string); // Actual length + + if (length >= maxLength) + return createFromString(string, ops, length); + + if (length > (poolSize - poolOffset)) + createPool(); + + const actual = ops.write(allocBuffer, string, poolOffset, length); + const b = new FastBuffer(allocPool, poolOffset, actual); + + poolOffset += actual; + alignPool(); + return b; +} + +function createFromString(string, ops, length = ops.byteLength(string)) { + const buf = Buffer.allocUnsafeSlow(length); + const actual = ops.write(buf, string, 0, length); + return actual < length ? new FastBuffer(buf.buffer, 0, actual) : buf; +} + +function fromString(string, encoding) { + let ops; + if (!encoding || encoding === 'utf8' || typeof encoding !== 'string') { + ops = encodingOps.utf8; + } else { + ops = getEncodingOps(encoding); + if (ops === undefined) + throw new ERR_UNKNOWN_ENCODING(encoding); + } + + return string.length === 0 ? new FastBuffer() : fromStringFast(string, ops); +} + +function fromArrayBuffer(obj, byteOffset, length) { + // Convert byteOffset to integer + if (byteOffset === undefined) { + byteOffset = 0; + } else { + byteOffset = +byteOffset; + if (NumberIsNaN(byteOffset)) + byteOffset = 0; + } + + const maxLength = obj.byteLength - byteOffset; + + if (maxLength < 0) + throw new ERR_BUFFER_OUT_OF_BOUNDS('offset'); + + if (length !== undefined) { + // Convert length to non-negative integer. + length = +length; + if (length > 0) { + if (length > maxLength) + throw new ERR_BUFFER_OUT_OF_BOUNDS('length'); + } else { + length = 0; + } + } + + return new FastBuffer(obj, byteOffset, length); +} + +function fromArrayLike(obj) { + if (obj.length <= 0) + return new FastBuffer(); + if (obj.length < (Buffer.poolSize >>> 1)) { + if (obj.length > (poolSize - poolOffset)) + createPool(); + const b = new FastBuffer(allocPool, poolOffset, obj.length); + TypedArrayPrototypeSet(b, obj, 0); + poolOffset += obj.length; + alignPool(); + return b; + } + return new FastBuffer(obj); +} + +function fromObject(obj) { + if (obj.length !== undefined || isAnyArrayBuffer(obj.buffer)) { + if (typeof obj.length !== 'number') { + return new FastBuffer(); + } + return fromArrayLike(obj); + } + + if (obj.type === 'Buffer' && ArrayIsArray(obj.data)) { + return fromArrayLike(obj.data); + } +} + +// Static methods + +Buffer.isBuffer = function isBuffer(b) { + return b instanceof Buffer; +}; + +Buffer.compare = function compare(buf1, buf2) { + if (!isUint8Array(buf1)) { + throw new ERR_INVALID_ARG_TYPE('buf1', ['Buffer', 'Uint8Array'], buf1); + } + + if (!isUint8Array(buf2)) { + throw new ERR_INVALID_ARG_TYPE('buf2', ['Buffer', 'Uint8Array'], buf2); + } + + if (buf1 === buf2) { + return 0; + } + + return _compare(buf1, buf2); +}; + +Buffer.isEncoding = function isEncoding(encoding) { + return typeof encoding === 'string' && encoding.length !== 0 && + normalizeEncoding(encoding) !== undefined; +}; +Buffer[kIsEncodingSymbol] = Buffer.isEncoding; + +Buffer.concat = function concat(list, length) { + validateArray(list, 'list'); + + if (list.length === 0) + return new FastBuffer(); + + if (length === undefined) { + length = 0; + for (let i = 0; i < list.length; i++) { + if (list[i].length) { + length += list[i].length; + } + } + } else { + validateOffset(length, 'length'); + } + + const buffer = Buffer.allocUnsafe(length); + let pos = 0; + for (let i = 0; i < list.length; i++) { + const buf = list[i]; + if (!isUint8Array(buf)) { + // TODO(BridgeAR): This should not be of type ERR_INVALID_ARG_TYPE. + // Instead, find the proper error code for this. + throw new ERR_INVALID_ARG_TYPE( + `list[${i}]`, ['Buffer', 'Uint8Array'], list[i]); + } + pos += _copyActual(buf, buffer, pos, 0, buf.length); + } + + // Note: `length` is always equal to `buffer.length` at this point + if (pos < length) { + // Zero-fill the remaining bytes if the specified `length` was more than + // the actual total length, i.e. if we have some remaining allocated bytes + // there were not initialized. + TypedArrayPrototypeFill(buffer, 0, pos, length); + } + + return buffer; +}; + +function base64ByteLength(str, bytes) { + // Handle padding + if (StringPrototypeCharCodeAt(str, bytes - 1) === 0x3D) + bytes--; + if (bytes > 1 && StringPrototypeCharCodeAt(str, bytes - 1) === 0x3D) + bytes--; + + // Base64 ratio: 3/4 + return (bytes * 3) >>> 2; +} + +const encodingOps = { + utf8: { + encoding: 'utf8', + encodingVal: encodingsMap.utf8, + byteLength: byteLengthUtf8, + write: (buf, string, offset, len) => buf.utf8Write(string, offset, len), + slice: (buf, start, end) => buf.utf8Slice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfString(buf, val, byteOffset, encodingsMap.utf8, dir), + }, + ucs2: { + encoding: 'ucs2', + encodingVal: encodingsMap.utf16le, + byteLength: (string) => string.length * 2, + write: (buf, string, offset, len) => buf.ucs2Write(string, offset, len), + slice: (buf, start, end) => buf.ucs2Slice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfString(buf, val, byteOffset, encodingsMap.utf16le, dir), + }, + utf16le: { + encoding: 'utf16le', + encodingVal: encodingsMap.utf16le, + byteLength: (string) => string.length * 2, + write: (buf, string, offset, len) => buf.ucs2Write(string, offset, len), + slice: (buf, start, end) => buf.ucs2Slice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfString(buf, val, byteOffset, encodingsMap.utf16le, dir), + }, + latin1: { + encoding: 'latin1', + encodingVal: encodingsMap.latin1, + byteLength: (string) => string.length, + write: (buf, string, offset, len) => buf.latin1Write(string, offset, len), + slice: (buf, start, end) => buf.latin1Slice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfString(buf, val, byteOffset, encodingsMap.latin1, dir), + }, + ascii: { + encoding: 'ascii', + encodingVal: encodingsMap.ascii, + byteLength: (string) => string.length, + write: (buf, string, offset, len) => buf.asciiWrite(string, offset, len), + slice: (buf, start, end) => buf.asciiSlice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfBuffer(buf, + fromStringFast(val, encodingOps.ascii), + byteOffset, + encodingsMap.ascii, + dir), + }, + base64: { + encoding: 'base64', + encodingVal: encodingsMap.base64, + byteLength: (string) => base64ByteLength(string, string.length), + write: (buf, string, offset, len) => buf.base64Write(string, offset, len), + slice: (buf, start, end) => buf.base64Slice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfBuffer(buf, + fromStringFast(val, encodingOps.base64), + byteOffset, + encodingsMap.base64, + dir), + }, + base64url: { + encoding: 'base64url', + encodingVal: encodingsMap.base64url, + byteLength: (string) => base64ByteLength(string, string.length), + write: (buf, string, offset, len) => + buf.base64urlWrite(string, offset, len), + slice: (buf, start, end) => buf.base64urlSlice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfBuffer(buf, + fromStringFast(val, encodingOps.base64url), + byteOffset, + encodingsMap.base64url, + dir), + }, + hex: { + encoding: 'hex', + encodingVal: encodingsMap.hex, + byteLength: (string) => string.length >>> 1, + write: (buf, string, offset, len) => buf.hexWrite(string, offset, len), + slice: (buf, start, end) => buf.hexSlice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfBuffer(buf, + fromStringFast(val, encodingOps.hex), + byteOffset, + encodingsMap.hex, + dir), + }, +}; +function getEncodingOps(encoding) { + encoding += ''; + switch (encoding.length) { + case 4: + if (encoding === 'utf8') return encodingOps.utf8; + if (encoding === 'ucs2') return encodingOps.ucs2; + encoding = StringPrototypeToLowerCase(encoding); + if (encoding === 'utf8') return encodingOps.utf8; + if (encoding === 'ucs2') return encodingOps.ucs2; + break; + case 5: + if (encoding === 'utf-8') return encodingOps.utf8; + if (encoding === 'ascii') return encodingOps.ascii; + if (encoding === 'ucs-2') return encodingOps.ucs2; + encoding = StringPrototypeToLowerCase(encoding); + if (encoding === 'utf-8') return encodingOps.utf8; + if (encoding === 'ascii') return encodingOps.ascii; + if (encoding === 'ucs-2') return encodingOps.ucs2; + break; + case 7: + if (encoding === 'utf16le' || + StringPrototypeToLowerCase(encoding) === 'utf16le') + return encodingOps.utf16le; + break; + case 8: + if (encoding === 'utf-16le' || + StringPrototypeToLowerCase(encoding) === 'utf-16le') + return encodingOps.utf16le; + break; + case 6: + if (encoding === 'latin1' || encoding === 'binary') + return encodingOps.latin1; + if (encoding === 'base64') return encodingOps.base64; + encoding = StringPrototypeToLowerCase(encoding); + if (encoding === 'latin1' || encoding === 'binary') + return encodingOps.latin1; + if (encoding === 'base64') return encodingOps.base64; + break; + case 3: + if (encoding === 'hex' || StringPrototypeToLowerCase(encoding) === 'hex') + return encodingOps.hex; + break; + case 9: + if (encoding === 'base64url' || + StringPrototypeToLowerCase(encoding) === 'base64url') + return encodingOps.base64url; + break; + } +} + +function byteLength(string, encoding) { + if (typeof string !== 'string') { + if (isArrayBufferView(string) || isAnyArrayBuffer(string)) { + return string.byteLength; + } + + throw new ERR_INVALID_ARG_TYPE( + 'string', ['string', 'Buffer', 'ArrayBuffer'], string, + ); + } + + const len = string.length; + if (len === 0) + return 0; + + if (!encoding || encoding === 'utf8') { + return byteLengthUtf8(string); + } + + if (encoding === 'ascii') { + return len; + } + + const ops = getEncodingOps(encoding); + if (ops === undefined) { + // TODO (ronag): Makes more sense to throw here. + // throw new ERR_UNKNOWN_ENCODING(encoding); + return byteLengthUtf8(string); + } + + return ops.byteLength(string); +} + +Buffer.byteLength = byteLength; + +// For backwards compatibility. +ObjectDefineProperty(Buffer.prototype, 'parent', { + __proto__: null, + enumerable: true, + get() { + if (!(this instanceof Buffer)) + return undefined; + return this.buffer; + }, +}); +ObjectDefineProperty(Buffer.prototype, 'offset', { + __proto__: null, + enumerable: true, + get() { + if (!(this instanceof Buffer)) + return undefined; + return this.byteOffset; + }, +}); + +Buffer.prototype.copy = + function copy(target, targetStart, sourceStart, sourceEnd) { + return copyImpl(this, target, targetStart, sourceStart, sourceEnd); + }; + +// No need to verify that "buf.length <= MAX_UINT32" since it's a read-only +// property of a typed array. +// This behaves neither like String nor Uint8Array in that we set start/end +// to their upper/lower bounds if the value passed is out of range. +Buffer.prototype.toString = function toString(encoding, start, end) { + if (arguments.length === 0) { + return this.utf8Slice(0, this.length); + } + + const len = this.length; + + if (start <= 0) + start = 0; + else if (start >= len) + return ''; + else + start = MathTrunc(start) || 0; + + if (end === undefined || end > len) + end = len; + else + end = MathTrunc(end) || 0; + + if (end <= start) + return ''; + + if (encoding === undefined) + return this.utf8Slice(start, end); + + const ops = getEncodingOps(encoding); + if (ops === undefined) + throw new ERR_UNKNOWN_ENCODING(encoding); + + return ops.slice(this, start, end); +}; + +Buffer.prototype.equals = function equals(otherBuffer) { + if (!isUint8Array(otherBuffer)) { + throw new ERR_INVALID_ARG_TYPE( + 'otherBuffer', ['Buffer', 'Uint8Array'], otherBuffer); + } + + if (this === otherBuffer) + return true; + const len = TypedArrayPrototypeGetByteLength(this); + if (len !== TypedArrayPrototypeGetByteLength(otherBuffer)) + return false; + + return len === 0 || _compare(this, otherBuffer) === 0; +}; + +let INSPECT_MAX_BYTES = 50; +// Override how buffers are presented by util.inspect(). +Buffer.prototype[customInspectSymbol] = function inspect(recurseTimes, ctx) { + const max = INSPECT_MAX_BYTES; + const actualMax = MathMin(max, this.length); + const remaining = this.length - max; + let str = StringPrototypeTrim(RegExpPrototypeSymbolReplace( + /(.{2})/g, this.hexSlice(0, actualMax), '$1 ')); + if (remaining > 0) + str += ` ... ${remaining} more byte${remaining > 1 ? 's' : ''}`; + // Inspect special properties as well, if possible. + if (ctx) { + let extras = false; + const filter = ctx.showHidden ? ALL_PROPERTIES : ONLY_ENUMERABLE; + const obj = { __proto__: null }; + ArrayPrototypeForEach(getOwnNonIndexProperties(this, filter), + (key) => { + extras = true; + obj[key] = this[key]; + }); + if (extras) { + if (this.length !== 0) + str += ', '; + // '[Object: null prototype] {'.length === 26 + // This is guarded with a test. + str += StringPrototypeSlice(utilInspect(obj, { + ...ctx, + breakLength: Infinity, + compact: true, + }), 27, -2); + } + } + let constructorName = 'Buffer'; + try { + const { constructor } = this; + if (typeof constructor === 'function' && ObjectPrototypeHasOwnProperty(constructor, 'name')) { + constructorName = constructor.name; + } + } catch { /* Ignore error and use default name */ } + return `<${constructorName} ${str}>`; +}; +Buffer.prototype.inspect = Buffer.prototype[customInspectSymbol]; + +Buffer.prototype.compare = function compare(target, + targetStart, + targetEnd, + sourceStart, + sourceEnd) { + if (!isUint8Array(target)) { + throw new ERR_INVALID_ARG_TYPE('target', ['Buffer', 'Uint8Array'], target); + } + if (arguments.length === 1) + return _compare(this, target); + + if (targetStart === undefined) + targetStart = 0; + else + validateOffset(targetStart, 'targetStart'); + + if (targetEnd === undefined) + targetEnd = target.length; + else + validateOffset(targetEnd, 'targetEnd', 0, target.length); + + if (sourceStart === undefined) + sourceStart = 0; + else + validateOffset(sourceStart, 'sourceStart'); + + if (sourceEnd === undefined) + sourceEnd = this.length; + else + validateOffset(sourceEnd, 'sourceEnd', 0, this.length); + + if (sourceStart >= sourceEnd) + return (targetStart >= targetEnd ? 0 : -1); + if (targetStart >= targetEnd) + return 1; + + return compareOffset(this, target, targetStart, sourceStart, targetEnd, + sourceEnd); +}; + +// Finds either the first index of `val` in `buffer` at offset >= `byteOffset`, +// OR the last index of `val` in `buffer` at offset <= `byteOffset`. +// +// Arguments: +// - buffer - a Buffer to search +// - val - a string, Buffer, or number +// - byteOffset - an index into `buffer`; will be clamped to an int32 +// - encoding - an optional encoding, relevant if val is a string +// - dir - true for indexOf, false for lastIndexOf +function bidirectionalIndexOf(buffer, val, byteOffset, encoding, dir) { + validateBuffer(buffer); + + if (typeof byteOffset === 'string') { + encoding = byteOffset; + byteOffset = undefined; + } else if (byteOffset > 0x7fffffff) { + byteOffset = 0x7fffffff; + } else if (byteOffset < -0x80000000) { + byteOffset = -0x80000000; + } + // Coerce to Number. Values like null and [] become 0. + byteOffset = +byteOffset; + // If the offset is undefined, "foo", {}, coerces to NaN, search whole buffer. + if (NumberIsNaN(byteOffset)) { + byteOffset = dir ? 0 : (buffer.length || buffer.byteLength); + } + dir = !!dir; // Cast to bool. + + if (typeof val === 'number') + return indexOfNumber(buffer, val >>> 0, byteOffset, dir); + + let ops; + if (encoding === undefined) + ops = encodingOps.utf8; + else + ops = getEncodingOps(encoding); + + if (typeof val === 'string') { + if (ops === undefined) + throw new ERR_UNKNOWN_ENCODING(encoding); + return ops.indexOf(buffer, val, byteOffset, dir); + } + + if (isUint8Array(val)) { + const encodingVal = + (ops === undefined ? encodingsMap.utf8 : ops.encodingVal); + return indexOfBuffer(buffer, val, byteOffset, encodingVal, dir); + } + + throw new ERR_INVALID_ARG_TYPE( + 'value', ['number', 'string', 'Buffer', 'Uint8Array'], val, + ); +} + +Buffer.prototype.indexOf = function indexOf(val, byteOffset, encoding) { + return bidirectionalIndexOf(this, val, byteOffset, encoding, true); +}; + +Buffer.prototype.lastIndexOf = function lastIndexOf(val, byteOffset, encoding) { + return bidirectionalIndexOf(this, val, byteOffset, encoding, false); +}; + +Buffer.prototype.includes = function includes(val, byteOffset, encoding) { + return this.indexOf(val, byteOffset, encoding) !== -1; +}; + +// Usage: +// buffer.fill(number[, offset[, end]]) +// buffer.fill(buffer[, offset[, end]]) +// buffer.fill(string[, offset[, end]][, encoding]) +Buffer.prototype.fill = function fill(value, offset, end, encoding) { + return _fill(this, value, offset, end, encoding); +}; + +function _fill(buf, value, offset, end, encoding) { + if (typeof value === 'string') { + if (offset === undefined || typeof offset === 'string') { + encoding = offset; + offset = 0; + end = buf.length; + } else if (typeof end === 'string') { + encoding = end; + end = buf.length; + } + + const normalizedEncoding = normalizeEncoding(encoding); + if (normalizedEncoding === undefined) { + validateString(encoding, 'encoding'); + throw new ERR_UNKNOWN_ENCODING(encoding); + } + + if (value.length === 0) { + // If value === '' default to zero. + value = 0; + } else if (value.length === 1) { + // Fast path: If `value` fits into a single byte, use that numeric value. + if (normalizedEncoding === 'utf8') { + const code = StringPrototypeCharCodeAt(value, 0); + if (code < 128) { + value = code; + } + } else if (normalizedEncoding === 'latin1') { + value = StringPrototypeCharCodeAt(value, 0); + } + } + } else { + encoding = undefined; + } + + if (offset === undefined) { + offset = 0; + end = buf.length; + } else { + validateOffset(offset, 'offset'); + // Invalid ranges are not set to a default, so can range check early. + if (end === undefined) { + end = buf.length; + } else { + validateOffset(end, 'end', 0, buf.length); + } + if (offset >= end) + return buf; + } + + + if (typeof value === 'number') { + // OOB check + const byteLen = TypedArrayPrototypeGetByteLength(buf); + const fillLength = end - offset; + if (offset > end || fillLength + offset > byteLen) + throw new ERR_BUFFER_OUT_OF_BOUNDS(); + + TypedArrayPrototypeFill(buf, value, offset, end); + } else { + const res = bindingFill(buf, value, offset, end, encoding); + if (res < 0) { + if (res === -1) + throw new ERR_INVALID_ARG_VALUE('value', value); + throw new ERR_BUFFER_OUT_OF_BOUNDS(); + } + } + + return buf; +} + +Buffer.prototype.write = function write(string, offset, length, encoding) { + // Buffer#write(string); + if (offset === undefined) { + return this.utf8Write(string, 0, this.length); + } + // Buffer#write(string, encoding) + if (length === undefined && typeof offset === 'string') { + encoding = offset; + length = this.length; + offset = 0; + + // Buffer#write(string, offset[, length][, encoding]) + } else { + validateOffset(offset, 'offset', 0, this.length); + + const remaining = this.length - offset; + + if (length === undefined) { + length = remaining; + } else if (typeof length === 'string') { + encoding = length; + length = remaining; + } else { + validateOffset(length, 'length', 0, this.length); + if (length > remaining) + length = remaining; + } + } + + if (!encoding || encoding === 'utf8') + return this.utf8Write(string, offset, length); + if (encoding === 'ascii') + return this.asciiWrite(string, offset, length); + + const ops = getEncodingOps(encoding); + if (ops === undefined) + throw new ERR_UNKNOWN_ENCODING(encoding); + return ops.write(this, string, offset, length); +}; + +Buffer.prototype.toJSON = function toJSON() { + if (this.length > 0) { + const data = new Array(this.length); + for (let i = 0; i < this.length; ++i) + data[i] = this[i]; + return { type: 'Buffer', data }; + } + return { type: 'Buffer', data: [] }; +}; + +function adjustOffset(offset, length) { + // Use Math.trunc() to convert offset to an integer value that can be larger + // than an Int32. Hence, don't use offset | 0 or similar techniques. + offset = MathTrunc(offset); + if (offset === 0) { + return 0; + } + if (offset < 0) { + offset += length; + return offset > 0 ? offset : 0; + } + if (offset < length) { + return offset; + } + return NumberIsNaN(offset) ? 0 : length; +} + +Buffer.prototype.subarray = function subarray(start, end) { + const srcLength = this.length; + start = adjustOffset(start, srcLength); + end = end !== undefined ? adjustOffset(end, srcLength) : srcLength; + const newLength = end > start ? end - start : 0; + return new FastBuffer(this.buffer, this.byteOffset + start, newLength); +}; + +Buffer.prototype.slice = function slice(start, end) { + return this.subarray(start, end); +}; + +function swap(b, n, m) { + const i = b[n]; + b[n] = b[m]; + b[m] = i; +} + +Buffer.prototype.swap16 = function swap16() { + // For Buffer.length < 128, it's generally faster to + // do the swap in javascript. For larger buffers, + // dropping down to the native code is faster. + const len = this.length; + if (len % 2 !== 0) + throw new ERR_INVALID_BUFFER_SIZE('16-bits'); + if (len < 128) { + for (let i = 0; i < len; i += 2) + swap(this, i, i + 1); + return this; + } + return _swap16(this); +}; + +Buffer.prototype.swap32 = function swap32() { + // For Buffer.length < 192, it's generally faster to + // do the swap in javascript. For larger buffers, + // dropping down to the native code is faster. + const len = this.length; + if (len % 4 !== 0) + throw new ERR_INVALID_BUFFER_SIZE('32-bits'); + if (len < 192) { + for (let i = 0; i < len; i += 4) { + swap(this, i, i + 3); + swap(this, i + 1, i + 2); + } + return this; + } + return _swap32(this); +}; + +Buffer.prototype.swap64 = function swap64() { + // For Buffer.length < 192, it's generally faster to + // do the swap in javascript. For larger buffers, + // dropping down to the native code is faster. + const len = this.length; + if (len % 8 !== 0) + throw new ERR_INVALID_BUFFER_SIZE('64-bits'); + if (len < 192) { + for (let i = 0; i < len; i += 8) { + swap(this, i, i + 7); + swap(this, i + 1, i + 6); + swap(this, i + 2, i + 5); + swap(this, i + 3, i + 4); + } + return this; + } + return _swap64(this); +}; + +Buffer.prototype.toLocaleString = Buffer.prototype.toString; + +let transcode; +if (internalBinding('config').hasIntl) { + const { + icuErrName, + transcode: _transcode, + } = internalBinding('icu'); + + // Transcodes the Buffer from one encoding to another, returning a new + // Buffer instance. + transcode = function transcode(source, fromEncoding, toEncoding) { + if (!isUint8Array(source)) { + throw new ERR_INVALID_ARG_TYPE('source', + ['Buffer', 'Uint8Array'], source); + } + if (source.length === 0) return Buffer.alloc(0); + + fromEncoding = normalizeEncoding(fromEncoding) || fromEncoding; + toEncoding = normalizeEncoding(toEncoding) || toEncoding; + const result = _transcode(source, fromEncoding, toEncoding); + if (typeof result !== 'number') + return result; + + const code = icuErrName(result); + const err = genericNodeError( + `Unable to transcode Buffer [${code}]`, + { code: code, errno: result }, + ); + throw err; + }; +} + +function btoa(input) { + // The implementation here has not been performance optimized in any way and + // should not be. + // Refs: https://github.com/nodejs/node/pull/38433#issuecomment-828426932 + if (arguments.length === 0) { + throw new ERR_MISSING_ARGS('input'); + } + const result = _btoa(`${input}`); + if (result === -1) { + throw lazyDOMException('Invalid character', 'InvalidCharacterError'); + } + return result; +} + +function atob(input) { + if (arguments.length === 0) { + throw new ERR_MISSING_ARGS('input'); + } + + const result = _atob(`${input}`); + + switch (result) { + case -2: // Invalid character + throw lazyDOMException('Invalid character', 'InvalidCharacterError'); + case -1: // Single character remained + throw lazyDOMException( + 'The string to be decoded is not correctly encoded.', + 'InvalidCharacterError'); + case -3: // Possible overflow + // TODO(@anonrig): Throw correct error in here. + throw lazyDOMException('The input causes overflow.', 'InvalidCharacterError'); + default: + return result; + } +} + +function isUtf8(input) { + if (isTypedArray(input) || isAnyArrayBuffer(input)) { + return bindingIsUtf8(input); + } + + throw new ERR_INVALID_ARG_TYPE('input', ['ArrayBuffer', 'Buffer', 'TypedArray'], input); +} + +function isAscii(input) { + if (isTypedArray(input) || isAnyArrayBuffer(input)) { + return bindingIsAscii(input); + } + + throw new ERR_INVALID_ARG_TYPE('input', ['ArrayBuffer', 'Buffer', 'TypedArray'], input); +} + +module.exports = { + Buffer, + SlowBuffer: deprecate( + SlowBuffer, + 'SlowBuffer() is deprecated. Please use Buffer.allocUnsafeSlow()', + 'DEP0030'), + transcode, + isUtf8, + isAscii, + + // Legacy + kMaxLength, + kStringMaxLength, + btoa, + atob, +}; + +ObjectDefineProperties(module.exports, { + constants: { + __proto__: null, + configurable: false, + enumerable: true, + value: constants, + }, + INSPECT_MAX_BYTES: { + __proto__: null, + configurable: true, + enumerable: true, + get() { return INSPECT_MAX_BYTES; }, + set(val) { + validateNumber(val, 'INSPECT_MAX_BYTES', 0); + INSPECT_MAX_BYTES = val; + }, + }, +}); + +defineLazyProperties( + module.exports, + 'internal/blob', + ['Blob', 'resolveObjectURL'], +); +defineLazyProperties( + module.exports, + 'internal/file', + ['File'], +); \ No newline at end of file diff --git a/node/child_process.js b/node/child_process.js new file mode 100644 index 00000000..f531ed51 --- /dev/null +++ b/node/child_process.js @@ -0,0 +1,1021 @@ +"use strict"; + +const { + ArrayIsArray, + ArrayPrototypeFilter, + ArrayPrototypeIncludes, + ArrayPrototypeJoin, + ArrayPrototypeLastIndexOf, + ArrayPrototypePush, + ArrayPrototypePushApply, + ArrayPrototypeSlice, + ArrayPrototypeSort, + ArrayPrototypeSplice, + ArrayPrototypeUnshift, + ObjectAssign, + ObjectDefineProperty, + ObjectPrototypeHasOwnProperty, + PromiseWithResolvers, + RegExpPrototypeExec, + SafeSet, + StringPrototypeIncludes, + StringPrototypeSlice, + StringPrototypeToUpperCase, + SymbolDispose, +} = primordials; + +const { + assignFunctionName, + convertToValidSignal, + getSystemErrorName, + kEmptyObject, + promisify, +} = require("internal/util"); +const { isArrayBufferView } = require("internal/util/types"); +let debug = require("internal/util/debuglog").debuglog( + "child_process", + (fn) => { + debug = fn; + } +); +const { Buffer } = require("buffer"); +const { Pipe, constants: PipeConstants } = internalBinding("pipe_wrap"); + +const { + AbortError, + codes: { + ERR_CHILD_PROCESS_IPC_REQUIRED, + ERR_CHILD_PROCESS_STDIO_MAXBUFFER, + ERR_INVALID_ARG_TYPE, + ERR_INVALID_ARG_VALUE, + }, + genericNodeError, +} = require("internal/errors"); +const { clearTimeout, setTimeout } = require("timers"); +const { getValidatedPath } = require("internal/fs/utils"); +const { + validateAbortSignal, + validateArray, + validateBoolean, + validateFunction, + validateInteger, + validateInt32, + validateNumber, + validateObject, + validateString, +} = require("internal/validators"); +const child_process = require("internal/child_process"); +const { getValidStdio, setupChannel, ChildProcess, stdioStringToArray } = + child_process; + +const MAX_BUFFER = 1024 * 1024; + +const permission = require("internal/process/permission"); + +const isZOS = process.platform === "os390"; +let addAbortListener; + +/** + * Spawns a new Node.js process + fork. + * @param {string|URL} modulePath + * @param {string[]} [args] + * @param {{ + * cwd?: string | URL; + * detached?: boolean; + * env?: Record; + * execPath?: string; + * execArgv?: string[]; + * gid?: number; + * serialization?: string; + * signal?: AbortSignal; + * killSignal?: string | number; + * silent?: boolean; + * stdio?: Array | string; + * uid?: number; + * windowsVerbatimArguments?: boolean; + * timeout?: number; + * }} [options] + * @returns {ChildProcess} + */ +function fork(modulePath, args = [], options) { + modulePath = getValidatedPath(modulePath, "modulePath"); + + // Get options and args arguments. + let execArgv; + + if (args == null) { + args = []; + } else if (typeof args === "object" && !ArrayIsArray(args)) { + options = args; + args = []; + } else { + validateArray(args, "args"); + } + + if (options != null) { + validateObject(options, "options"); + } + options = { __proto__: null, ...options, shell: false }; + options.execPath ||= process.execPath; + validateArgumentNullCheck(options.execPath, "options.execPath"); + + // Prepare arguments for fork: + execArgv = options.execArgv || process.execArgv; + validateArgumentsNullCheck(execArgv, "options.execArgv"); + + if (execArgv === process.execArgv && process._eval != null) { + const index = ArrayPrototypeLastIndexOf(execArgv, process._eval); + if (index > 0) { + // Remove the -e switch to avoid fork bombing ourselves. + execArgv = ArrayPrototypeSlice(execArgv); + ArrayPrototypeSplice(execArgv, index - 1, 2); + } + } + + args = [...execArgv, modulePath, ...args]; + + if (typeof options.stdio === "string") { + options.stdio = stdioStringToArray(options.stdio, "ipc"); + } else if (!ArrayIsArray(options.stdio)) { + // Use a separate fd=3 for the IPC channel. Inherit stdin, stdout, + // and stderr from the parent if silent isn't set. + options.stdio = stdioStringToArray( + options.silent ? "pipe" : "inherit", + "ipc" + ); + } else if (!ArrayPrototypeIncludes(options.stdio, "ipc")) { + throw new ERR_CHILD_PROCESS_IPC_REQUIRED("options.stdio"); + } + + return spawn(options.execPath, args, options); +} + +function _forkChild(fd, serializationMode) { + // set process.send() + const p = new Pipe(PipeConstants.IPC); + p.open(fd); + p.unref(); + const control = setupChannel(process, p, serializationMode); + process.on("newListener", function onNewListener(name) { + if (name === "message" || name === "disconnect") control.refCounted(); + }); + process.on("removeListener", function onRemoveListener(name) { + if (name === "message" || name === "disconnect") control.unrefCounted(); + }); +} + +function normalizeExecArgs(command, options, callback) { + validateString(command, "command"); + validateArgumentNullCheck(command, "command"); + + if (typeof options === "function") { + callback = options; + options = undefined; + } + + // Make a shallow copy so we don't clobber the user's options object. + options = { __proto__: null, ...options }; + options.shell = typeof options.shell === "string" ? options.shell : true; + + return { + file: command, + options: options, + callback: callback, + }; +} + +/** + * Spawns a shell executing the given command. + * @param {string} command + * @param {{ + * cmd?: string; + * env?: Record; + * encoding?: string; + * shell?: string; + * signal?: AbortSignal; + * timeout?: number; + * maxBuffer?: number; + * killSignal?: string | number; + * uid?: number; + * gid?: number; + * windowsHide?: boolean; + * }} [options] + * @param {( + * error?: Error, + * stdout?: string | Buffer, + * stderr?: string | Buffer + * ) => any} [callback] + * @returns {ChildProcess} + */ +function exec(command, options, callback) { + const opts = normalizeExecArgs(command, options, callback); + return module.exports.execFile(opts.file, opts.options, opts.callback); +} + +const customPromiseExecFunction = (orig) => { + return assignFunctionName(orig.name, function (...args) { + const { promise, resolve, reject } = PromiseWithResolvers(); + + promise.child = orig(...args, (err, stdout, stderr) => { + if (err !== null) { + err.stdout = stdout; + err.stderr = stderr; + reject(err); + } else { + resolve({ stdout, stderr }); + } + }); + + return promise; + }); +}; + +ObjectDefineProperty(exec, promisify.custom, { + __proto__: null, + enumerable: false, + value: customPromiseExecFunction(exec), +}); + +function normalizeExecFileArgs(file, args, options, callback) { + if (ArrayIsArray(args)) { + args = ArrayPrototypeSlice(args); + } else if (args != null && typeof args === "object") { + callback = options; + options = args; + args = null; + } else if (typeof args === "function") { + callback = args; + options = null; + args = null; + } + + args ??= []; + + if (typeof options === "function") { + callback = options; + } else if (options != null) { + validateObject(options, "options"); + } + + options ??= kEmptyObject; + + if (callback != null) { + validateFunction(callback, "callback"); + } + + // Validate argv0, if present. + if (options.argv0 != null) { + validateString(options.argv0, "options.argv0"); + validateArgumentNullCheck(options.argv0, "options.argv0"); + } + + return { file, args, options, callback }; +} + +/** + * Spawns the specified file as a shell. + * @param {string} file + * @param {string[]} [args] + * @param {{ + * cwd?: string | URL; + * env?: Record; + * encoding?: string; + * timeout?: number; + * maxBuffer?: number; + * killSignal?: string | number; + * uid?: number; + * gid?: number; + * windowsHide?: boolean; + * windowsVerbatimArguments?: boolean; + * shell?: boolean | string; + * signal?: AbortSignal; + * }} [options] + * @param {( + * error?: Error, + * stdout?: string | Buffer, + * stderr?: string | Buffer + * ) => any} [callback] + * @returns {ChildProcess} + */ +function execFile(file, args, options, callback) { + ({ file, args, options, callback } = normalizeExecFileArgs( + file, + args, + options, + callback + )); + + options = { + __proto__: null, + encoding: "utf8", + timeout: 0, + maxBuffer: MAX_BUFFER, + killSignal: "SIGTERM", + cwd: null, + env: null, + shell: false, + ...options, + }; + + // Validate the timeout, if present. + validateTimeout(options.timeout); + + // Validate maxBuffer, if present. + validateMaxBuffer(options.maxBuffer); + + options.killSignal = sanitizeKillSignal(options.killSignal); + + const child = spawn(file, args, { + cwd: options.cwd, + env: options.env, + gid: options.gid, + shell: options.shell, + signal: options.signal, + uid: options.uid, + windowsHide: !!options.windowsHide, + windowsVerbatimArguments: !!options.windowsVerbatimArguments, + }); + + let encoding; + const _stdout = []; + const _stderr = []; + if (options.encoding !== "buffer" && Buffer.isEncoding(options.encoding)) { + encoding = options.encoding; + } else { + encoding = null; + } + let stdoutLen = 0; + let stderrLen = 0; + let killed = false; + let exited = false; + let timeoutId; + + let ex = null; + + let cmd = file; + + function exithandler(code, signal) { + if (exited) return; + exited = true; + + if (timeoutId) { + clearTimeout(timeoutId); + timeoutId = null; + } + + if (!callback) return; + + // merge chunks + let stdout; + let stderr; + if (encoding || child.stdout?.readableEncoding) { + stdout = ArrayPrototypeJoin(_stdout, ""); + } else { + stdout = Buffer.concat(_stdout); + } + if (encoding || child.stderr?.readableEncoding) { + stderr = ArrayPrototypeJoin(_stderr, ""); + } else { + stderr = Buffer.concat(_stderr); + } + + if (!ex && code === 0 && signal === null) { + callback(null, stdout, stderr); + return; + } + + if (args?.length) cmd += ` ${ArrayPrototypeJoin(args, " ")}`; + + ex ||= genericNodeError(`Command failed: ${cmd}\n${stderr}`, { + code: code < 0 ? getSystemErrorName(code) : code, + killed: child.killed || killed, + signal: signal, + }); + + ex.cmd = cmd; + callback(ex, stdout, stderr); + } + + function errorhandler(e) { + ex = e; + + if (child.stdout) child.stdout.destroy(); + + if (child.stderr) child.stderr.destroy(); + + exithandler(); + } + + function kill() { + if (child.stdout) child.stdout.destroy(); + + if (child.stderr) child.stderr.destroy(); + + killed = true; + try { + child.kill(options.killSignal); + } catch (e) { + ex = e; + exithandler(); + } + } + + if (options.timeout > 0) { + timeoutId = setTimeout(function delayedKill() { + kill(); + timeoutId = null; + }, options.timeout); + } + + if (child.stdout) { + if (encoding) child.stdout.setEncoding(encoding); + + child.stdout.on("data", function onChildStdout(chunk) { + // Do not need to count the length + if (options.maxBuffer === Infinity) { + ArrayPrototypePush(_stdout, chunk); + return; + } + const encoding = child.stdout.readableEncoding; + const length = encoding + ? Buffer.byteLength(chunk, encoding) + : chunk.length; + const slice = encoding + ? StringPrototypeSlice + : (buf, ...args) => buf.slice(...args); + stdoutLen += length; + + if (stdoutLen > options.maxBuffer) { + const truncatedLen = options.maxBuffer - (stdoutLen - length); + ArrayPrototypePush(_stdout, slice(chunk, 0, truncatedLen)); + + ex = new ERR_CHILD_PROCESS_STDIO_MAXBUFFER("stdout"); + kill(); + } else { + ArrayPrototypePush(_stdout, chunk); + } + }); + } + + if (child.stderr) { + if (encoding) child.stderr.setEncoding(encoding); + + child.stderr.on("data", function onChildStderr(chunk) { + // Do not need to count the length + if (options.maxBuffer === Infinity) { + ArrayPrototypePush(_stderr, chunk); + return; + } + const encoding = child.stderr.readableEncoding; + const length = encoding + ? Buffer.byteLength(chunk, encoding) + : chunk.length; + stderrLen += length; + + if (stderrLen > options.maxBuffer) { + const truncatedLen = options.maxBuffer - (stderrLen - length); + ArrayPrototypePush(_stderr, chunk.slice(0, truncatedLen)); + + ex = new ERR_CHILD_PROCESS_STDIO_MAXBUFFER("stderr"); + kill(); + } else { + ArrayPrototypePush(_stderr, chunk); + } + }); + } + + child.addListener("close", exithandler); + child.addListener("error", errorhandler); + + return child; +} + +ObjectDefineProperty(execFile, promisify.custom, { + __proto__: null, + enumerable: false, + value: customPromiseExecFunction(execFile), +}); + +function copyProcessEnvToEnv(env, name, optionEnv) { + if ( + process.env[name] && + (!optionEnv || !ObjectPrototypeHasOwnProperty(optionEnv, name)) + ) { + env[name] = process.env[name]; + } +} + +let permissionModelFlagsToCopy; + +function getPermissionModelFlagsToCopy() { + if (permissionModelFlagsToCopy === undefined) { + permissionModelFlagsToCopy = [ + ...permission.availableFlags(), + "--permission", + ]; + } + return permissionModelFlagsToCopy; +} + +function copyPermissionModelFlagsToEnv(env, key, args) { + // Do not override if permission was already passed to file + if ( + args.includes("--permission") || + (env[key] && env[key].indexOf("--permission") !== -1) + ) { + return; + } + + const flagsToCopy = getPermissionModelFlagsToCopy(); + for (const arg of process.execArgv) { + for (const flag of flagsToCopy) { + if (arg.startsWith(flag)) { + env[key] = `${env[key] ? env[key] + " " + arg : arg}`; + } + } + } +} + +let emittedDEP0190Already = false; +function normalizeSpawnArguments(file, args, options) { + validateString(file, "file"); + validateArgumentNullCheck(file, "file"); + + if (file.length === 0) + throw new ERR_INVALID_ARG_VALUE("file", file, "cannot be empty"); + + if (ArrayIsArray(args)) { + args = ArrayPrototypeSlice(args); + } else if (args == null) { + args = []; + } else if (typeof args !== "object") { + throw new ERR_INVALID_ARG_TYPE("args", "object", args); + } else { + options = args; + args = []; + } + + validateArgumentsNullCheck(args, "args"); + + if (options === undefined) options = kEmptyObject; + else validateObject(options, "options"); + + options = { __proto__: null, ...options }; + let cwd = options.cwd; + + // Validate the cwd, if present. + if (cwd != null) { + cwd = getValidatedPath(cwd, "options.cwd"); + } + + // Validate detached, if present. + if (options.detached != null) { + validateBoolean(options.detached, "options.detached"); + } + + // Validate the uid, if present. + if (options.uid != null) { + validateInt32(options.uid, "options.uid"); + } + + // Validate the gid, if present. + if (options.gid != null) { + validateInt32(options.gid, "options.gid"); + } + + // Validate the shell, if present. + if ( + options.shell != null && + typeof options.shell !== "boolean" && + typeof options.shell !== "string" + ) { + throw new ERR_INVALID_ARG_TYPE( + "options.shell", + ["boolean", "string"], + options.shell + ); + } + + // Validate argv0, if present. + if (options.argv0 != null) { + validateString(options.argv0, "options.argv0"); + validateArgumentNullCheck(options.argv0, "options.argv0"); + } + + // Validate windowsHide, if present. + if (options.windowsHide != null) { + validateBoolean(options.windowsHide, "options.windowsHide"); + } + + // Validate windowsVerbatimArguments, if present. + let { windowsVerbatimArguments } = options; + if (windowsVerbatimArguments != null) { + validateBoolean( + windowsVerbatimArguments, + "options.windowsVerbatimArguments" + ); + } + + if (options.shell) { + validateArgumentNullCheck(options.shell, "options.shell"); + if (args.length > 0 && !emittedDEP0190Already) { + process.emitWarning( + "Passing args to a child process with shell option true can lead to security " + + "vulnerabilities, as the arguments are not escaped, only concatenated.", + "DeprecationWarning", + "DEP0190" + ); + emittedDEP0190Already = true; + } + + const command = + args.length > 0 ? `${file} ${ArrayPrototypeJoin(args, " ")}` : file; + // Set the shell, switches, and commands. + if (process.platform === "win32") { + if (typeof options.shell === "string") file = options.shell; + else file = process.env.comspec || "cmd.exe"; + // '/d /s /c' is used only for cmd.exe. + if (RegExpPrototypeExec(/^(?:.*\\)?cmd(?:\.exe)?$/i, file) !== null) { + args = ["/d", "/s", "/c", `"${command}"`]; + windowsVerbatimArguments = true; + } else { + args = ["-c", command]; + } + } else { + if (typeof options.shell === "string") file = options.shell; + else if (process.platform === "android") file = "/system/bin/sh"; + else file = "/bin/sh"; + args = ["-c", command]; + } + } + + if (typeof options.argv0 === "string") { + ArrayPrototypeUnshift(args, options.argv0); + } else { + ArrayPrototypeUnshift(args, file); + } + + // Shallow copy to guarantee changes won't impact process.env + const env = options.env || { ...process.env }; + const envPairs = []; + + // process.env.NODE_V8_COVERAGE always propagates, making it possible to + // collect coverage for programs that spawn with white-listed environment. + copyProcessEnvToEnv(env, "NODE_V8_COVERAGE", options.env); + + if (isZOS) { + // The following environment variables must always propagate if set. + copyProcessEnvToEnv(env, "_BPXK_AUTOCVT", options.env); + copyProcessEnvToEnv(env, "_CEE_RUNOPTS", options.env); + copyProcessEnvToEnv(env, "_TAG_REDIR_ERR", options.env); + copyProcessEnvToEnv(env, "_TAG_REDIR_IN", options.env); + copyProcessEnvToEnv(env, "_TAG_REDIR_OUT", options.env); + copyProcessEnvToEnv(env, "STEPLIB", options.env); + copyProcessEnvToEnv(env, "LIBPATH", options.env); + copyProcessEnvToEnv(env, "_EDC_SIG_DFLT", options.env); + copyProcessEnvToEnv(env, "_EDC_SUSV3", options.env); + } + + if (permission.isEnabled()) { + copyPermissionModelFlagsToEnv(env, "NODE_OPTIONS", args); + } + + let envKeys = []; + // Prototype values are intentionally included. + for (const key in env) { + ArrayPrototypePush(envKeys, key); + } + + if (process.platform === "win32") { + // On Windows env keys are case insensitive. Filter out duplicates, + // keeping only the first one (in lexicographic order) + const sawKey = new SafeSet(); + envKeys = ArrayPrototypeFilter(ArrayPrototypeSort(envKeys), (key) => { + const uppercaseKey = StringPrototypeToUpperCase(key); + if (sawKey.has(uppercaseKey)) { + return false; + } + sawKey.add(uppercaseKey); + return true; + }); + } + + for (const key of envKeys) { + const value = env[key]; + if (value !== undefined) { + validateArgumentNullCheck(key, `options.env['${key}']`); + validateArgumentNullCheck(value, `options.env['${key}']`); + ArrayPrototypePush(envPairs, `${key}=${value}`); + } + } + + return { + // Make a shallow copy so we don't clobber the user's options object. + __proto__: null, + ...options, + args, + cwd, + detached: !!options.detached, + envPairs, + file, + windowsHide: !!options.windowsHide, + windowsVerbatimArguments: !!windowsVerbatimArguments, + }; +} + +function abortChildProcess(child, killSignal, reason) { + if (!child) return; + try { + if (child.kill(killSignal)) { + child.emit("error", new AbortError(undefined, { cause: reason })); + } + } catch (err) { + child.emit("error", err); + } +} + +/** + * Spawns a new process using the given `file`. + * @param {string} file + * @param {string[]} [args] + * @param {{ + * cwd?: string | URL; + * env?: Record; + * argv0?: string; + * stdio?: Array | string; + * detached?: boolean; + * uid?: number; + * gid?: number; + * serialization?: string; + * shell?: boolean | string; + * windowsVerbatimArguments?: boolean; + * windowsHide?: boolean; + * signal?: AbortSignal; + * timeout?: number; + * killSignal?: string | number; + * }} [options] + * @returns {ChildProcess} + */ +function spawn(file, args, options) { + options = normalizeSpawnArguments(file, args, options); + validateTimeout(options.timeout); + validateAbortSignal(options.signal, "options.signal"); + const killSignal = sanitizeKillSignal(options.killSignal); + const child = new ChildProcess(); + + debug("spawn", options); + child.spawn(options); + + if (options.timeout > 0) { + let timeoutId = setTimeout(() => { + if (timeoutId) { + try { + child.kill(killSignal); + } catch (err) { + child.emit("error", err); + } + timeoutId = null; + } + }, options.timeout); + + child.once("exit", () => { + if (timeoutId) { + clearTimeout(timeoutId); + timeoutId = null; + } + }); + } + + if (options.signal) { + const signal = options.signal; + if (signal.aborted) { + process.nextTick(onAbortListener); + } else { + addAbortListener ??= + require("internal/events/abort_listener").addAbortListener; + const disposable = addAbortListener(signal, onAbortListener); + child.once("exit", disposable[SymbolDispose]); + } + + function onAbortListener() { + abortChildProcess(child, killSignal, options.signal.reason); + } + } + + return child; +} + +/** + * Spawns a new process synchronously using the given `file`. + * @param {string} file + * @param {string[]} [args] + * @param {{ + * cwd?: string | URL; + * input?: string | Buffer | TypedArray | DataView; + * argv0?: string; + * stdio?: string | Array; + * env?: Record; + * uid?: number; + * gid?: number; + * timeout?: number; + * killSignal?: string | number; + * maxBuffer?: number; + * encoding?: string; + * shell?: boolean | string; + * windowsVerbatimArguments?: boolean; + * windowsHide?: boolean; + * }} [options] + * @returns {{ + * pid: number; + * output: Array; + * stdout: Buffer | string; + * stderr: Buffer | string; + * status: number | null; + * signal: string | null; + * error: Error; + * }} + */ +function spawnSync(file, args, options) { + options = { + __proto__: null, + maxBuffer: MAX_BUFFER, + ...normalizeSpawnArguments(file, args, options), + }; + + debug("spawnSync", options); + + // Validate the timeout, if present. + validateTimeout(options.timeout); + + // Validate maxBuffer, if present. + validateMaxBuffer(options.maxBuffer); + + // Validate and translate the kill signal, if present. + options.killSignal = sanitizeKillSignal(options.killSignal); + + options.stdio = getValidStdio(options.stdio || "pipe", true).stdio; + + if (options.input) { + const stdin = (options.stdio[0] = { ...options.stdio[0] }); + stdin.input = options.input; + } + + // We may want to pass data in on any given fd, ensure it is a valid buffer + for (let i = 0; i < options.stdio.length; i++) { + const input = options.stdio[i]?.input; + if (input != null) { + const pipe = (options.stdio[i] = { ...options.stdio[i] }); + if (isArrayBufferView(input)) { + pipe.input = input; + } else if (typeof input === "string") { + pipe.input = Buffer.from(input, options.encoding); + } else { + throw new ERR_INVALID_ARG_TYPE( + `options.stdio[${i}]`, + ["Buffer", "TypedArray", "DataView", "string"], + input + ); + } + } + } + + return child_process.spawnSync(options); +} + +function checkExecSyncError(ret, args, cmd) { + let err; + if (ret.error) { + err = ret.error; + ObjectAssign(err, ret); + } else if (ret.status !== 0) { + let msg = "Command failed: "; + msg += cmd || ArrayPrototypeJoin(args, " "); + if (ret.stderr && ret.stderr.length > 0) + msg += `\n${ret.stderr.toString()}`; + err = genericNodeError(msg, ret); + } + return err; +} + +/** + * Spawns a file as a shell synchronously. + * @param {string} file + * @param {string[]} [args] + * @param {{ + * cwd?: string | URL; + * input?: string | Buffer | TypedArray | DataView; + * stdio?: string | Array; + * env?: Record; + * uid?: number; + * gid?: number; + * timeout?: number; + * killSignal?: string | number; + * maxBuffer?: number; + * encoding?: string; + * windowsHide?: boolean; + * shell?: boolean | string; + * }} [options] + * @returns {Buffer | string} + */ +function execFileSync(file, args, options) { + ({ file, args, options } = normalizeExecFileArgs(file, args, options)); + + const inheritStderr = !options.stdio; + const ret = spawnSync(file, args, options); + + if (inheritStderr && ret.stderr) process.stderr.write(ret.stderr); + + const errArgs = [options.argv0 || file]; + ArrayPrototypePushApply(errArgs, args); + const err = checkExecSyncError(ret, errArgs); + + if (err) throw err; + + return ret.stdout; +} + +/** + * Spawns a shell executing the given `command` synchronously. + * @param {string} command + * @param {{ + * cwd?: string | URL; + * input?: string | Buffer | TypedArray | DataView; + * stdio?: string | Array; + * env?: Record; + * shell?: string; + * uid?: number; + * gid?: number; + * timeout?: number; + * killSignal?: string | number; + * maxBuffer?: number; + * encoding?: string; + * windowsHide?: boolean; + * }} [options] + * @returns {Buffer | string} + */ +function execSync(command, options) { + const opts = normalizeExecArgs(command, options, null); + const inheritStderr = !opts.options.stdio; + + const ret = spawnSync(opts.file, opts.options); + + if (inheritStderr && ret.stderr) process.stderr.write(ret.stderr); + + const err = checkExecSyncError(ret, undefined, command); + + if (err) throw err; + + return ret.stdout; +} + +function validateArgumentNullCheck(arg, propName) { + if (typeof arg === "string" && StringPrototypeIncludes(arg, "\u0000")) { + throw new ERR_INVALID_ARG_VALUE( + propName, + arg, + "must be a string without null bytes" + ); + } +} + +function validateArgumentsNullCheck(args, propName) { + for (let i = 0; i < args.length; ++i) { + validateArgumentNullCheck(args[i], `${propName}[${i}]`); + } +} + +function validateTimeout(timeout) { + if (timeout != null) { + validateInteger(timeout, "timeout", 0); + } +} + +function validateMaxBuffer(maxBuffer) { + if (maxBuffer != null) { + validateNumber(maxBuffer, "options.maxBuffer", 0); + } +} + +function sanitizeKillSignal(killSignal) { + if (typeof killSignal === "string" || typeof killSignal === "number") { + return convertToValidSignal(killSignal); + } else if (killSignal != null) { + throw new ERR_INVALID_ARG_TYPE( + "options.killSignal", + ["string", "number"], + killSignal + ); + } +} + +module.exports = { + _forkChild, + ChildProcess, + exec, + execFile, + execFileSync, + execSync, + fork, + spawn, + spawnSync, +}; diff --git a/node/cluster.js b/node/cluster.js new file mode 100644 index 00000000..6f3dc168 --- /dev/null +++ b/node/cluster.js @@ -0,0 +1,8 @@ +'use strict'; + +const { + ObjectPrototypeHasOwnProperty: ObjectHasOwn, +} = primordials; + +const childOrPrimary = ObjectHasOwn(process.env, 'NODE_UNIQUE_ID') ? 'child' : 'primary'; +module.exports = require(`internal/cluster/${childOrPrimary}`); \ No newline at end of file diff --git a/node/console.js b/node/console.js new file mode 100644 index 00000000..d896d2e3 --- /dev/null +++ b/node/console.js @@ -0,0 +1,3 @@ +"use strict"; + +module.exports = require("internal/console/global"); diff --git a/node/crypto.js b/node/crypto.js new file mode 100644 index 00000000..06bfebc7 --- /dev/null +++ b/node/crypto.js @@ -0,0 +1,1365 @@ +'use strict'; + +const { + Array, + ArrayBufferIsView, + ArrayIsArray, + ArrayPrototypeForEach, + MathFloor, + MathMin, + MathTrunc, + NumberIsInteger, + NumberIsNaN, + NumberMAX_SAFE_INTEGER, + NumberMIN_SAFE_INTEGER, + ObjectDefineProperties, + ObjectDefineProperty, + ObjectPrototypeHasOwnProperty, + ObjectSetPrototypeOf, + RegExpPrototypeSymbolReplace, + StringPrototypeCharCodeAt, + StringPrototypeSlice, + StringPrototypeToLowerCase, + StringPrototypeTrim, + SymbolSpecies, + SymbolToPrimitive, + TypedArrayPrototypeFill, + TypedArrayPrototypeGetBuffer, + TypedArrayPrototypeGetByteLength, + TypedArrayPrototypeGetByteOffset, + TypedArrayPrototypeGetLength, + TypedArrayPrototypeSet, + TypedArrayPrototypeSlice, + Uint8Array, + Uint8ArrayPrototype, +} = primordials; + +const { + byteLengthUtf8, + compare: _compare, + compareOffset, + copy: _copy, + fill: bindingFill, + isAscii: bindingIsAscii, + isUtf8: bindingIsUtf8, + indexOfBuffer, + indexOfNumber, + indexOfString, + swap16: _swap16, + swap32: _swap32, + swap64: _swap64, + kMaxLength, + kStringMaxLength, + atob: _atob, + btoa: _btoa, +} = internalBinding('buffer'); +const { + constants: { + ALL_PROPERTIES, + ONLY_ENUMERABLE, + }, + getOwnNonIndexProperties, + isInsideNodeModules, +} = internalBinding('util'); +const { + customInspectSymbol, + lazyDOMException, + normalizeEncoding, + kIsEncodingSymbol, + defineLazyProperties, + encodingsMap, + deprecate, +} = require('internal/util'); +const { + isAnyArrayBuffer, + isArrayBufferView, + isUint8Array, + isTypedArray, +} = require('internal/util/types'); +const { + inspect: utilInspect, +} = require('internal/util/inspect'); + +const { + codes: { + ERR_BUFFER_OUT_OF_BOUNDS, + ERR_INVALID_ARG_TYPE, + ERR_INVALID_ARG_VALUE, + ERR_INVALID_BUFFER_SIZE, + ERR_MISSING_ARGS, + ERR_OUT_OF_RANGE, + ERR_UNKNOWN_ENCODING, + }, + genericNodeError, +} = require('internal/errors'); +const { + validateArray, + validateBuffer, + validateInteger, + validateNumber, + validateString, +} = require('internal/validators'); +// Provide validateInteger() but with kMaxLength as the default maximum value. +const validateOffset = (value, name, min = 0, max = kMaxLength) => + validateInteger(value, name, min, max); + +const { + FastBuffer, + markAsUntransferable, + addBufferPrototypeMethods, + createUnsafeBuffer, +} = require('internal/buffer'); + +FastBuffer.prototype.constructor = Buffer; +Buffer.prototype = FastBuffer.prototype; +addBufferPrototypeMethods(Buffer.prototype); + +const constants = ObjectDefineProperties({}, { + MAX_LENGTH: { + __proto__: null, + value: kMaxLength, + writable: false, + enumerable: true, + }, + MAX_STRING_LENGTH: { + __proto__: null, + value: kStringMaxLength, + writable: false, + enumerable: true, + }, +}); + +Buffer.poolSize = 8 * 1024; +let poolSize, poolOffset, allocPool, allocBuffer; + +function createPool() { + poolSize = Buffer.poolSize; + allocBuffer = createUnsafeBuffer(poolSize); + allocPool = allocBuffer.buffer; + markAsUntransferable(allocPool); + poolOffset = 0; +} +createPool(); + +function alignPool() { + // Ensure aligned slices + if (poolOffset & 0x7) { + poolOffset |= 0x7; + poolOffset++; + } +} + +let bufferWarningAlreadyEmitted = false; +let nodeModulesCheckCounter = 0; +const bufferWarning = 'Buffer() is deprecated due to security and usability ' + + 'issues. Please use the Buffer.alloc(), ' + + 'Buffer.allocUnsafe(), or Buffer.from() methods instead.'; + +function showFlaggedDeprecation() { + if (bufferWarningAlreadyEmitted || + ++nodeModulesCheckCounter > 10000 || + (!require('internal/options').getOptionValue('--pending-deprecation') && + isInsideNodeModules(100, true))) { + // We don't emit a warning, because we either: + // - Already did so, or + // - Already checked too many times whether a call is coming + // from node_modules and want to stop slowing down things, or + // - We aren't running with `--pending-deprecation` enabled, + // and the code is inside `node_modules`. + // - We found node_modules in up to the topmost 100 frames, or + // there are more than 100 frames and we don't want to search anymore. + return; + } + + process.emitWarning(bufferWarning, 'DeprecationWarning', 'DEP0005'); + bufferWarningAlreadyEmitted = true; +} + +function toInteger(n, defaultVal) { + n = +n; + if (!NumberIsNaN(n) && + n >= NumberMIN_SAFE_INTEGER && + n <= NumberMAX_SAFE_INTEGER) { + return ((n % 1) === 0 ? n : MathFloor(n)); + } + return defaultVal; +} + +function copyImpl(source, target, targetStart, sourceStart, sourceEnd) { + if (!ArrayBufferIsView(source)) + throw new ERR_INVALID_ARG_TYPE('source', ['Buffer', 'Uint8Array'], source); + if (!ArrayBufferIsView(target)) + throw new ERR_INVALID_ARG_TYPE('target', ['Buffer', 'Uint8Array'], target); + + if (targetStart === undefined) { + targetStart = 0; + } else { + targetStart = NumberIsInteger(targetStart) ? targetStart : toInteger(targetStart, 0); + if (targetStart < 0) + throw new ERR_OUT_OF_RANGE('targetStart', '>= 0', targetStart); + } + + if (sourceStart === undefined) { + sourceStart = 0; + } else { + sourceStart = NumberIsInteger(sourceStart) ? sourceStart : toInteger(sourceStart, 0); + if (sourceStart < 0 || sourceStart > source.byteLength) + throw new ERR_OUT_OF_RANGE('sourceStart', `>= 0 && <= ${source.byteLength}`, sourceStart); + } + + if (sourceEnd === undefined) { + sourceEnd = source.byteLength; + } else { + sourceEnd = NumberIsInteger(sourceEnd) ? sourceEnd : toInteger(sourceEnd, 0); + if (sourceEnd < 0) + throw new ERR_OUT_OF_RANGE('sourceEnd', '>= 0', sourceEnd); + } + + if (targetStart >= target.byteLength || sourceStart >= sourceEnd) + return 0; + + return _copyActual(source, target, targetStart, sourceStart, sourceEnd); +} + +function _copyActual(source, target, targetStart, sourceStart, sourceEnd) { + if (sourceEnd - sourceStart > target.byteLength - targetStart) + sourceEnd = sourceStart + target.byteLength - targetStart; + + let nb = sourceEnd - sourceStart; + const sourceLen = source.byteLength - sourceStart; + if (nb > sourceLen) + nb = sourceLen; + + if (nb <= 0) + return 0; + + _copy(source, target, targetStart, sourceStart, nb); + + return nb; +} + +/** + * The Buffer() constructor is deprecated in documentation and should not be + * used moving forward. Rather, developers should use one of the three new + * factory APIs: Buffer.from(), Buffer.allocUnsafe() or Buffer.alloc() based on + * their specific needs. There is no runtime deprecation because of the extent + * to which the Buffer constructor is used in the ecosystem currently -- a + * runtime deprecation would introduce too much breakage at this time. It's not + * likely that the Buffer constructors would ever actually be removed. + * Deprecation Code: DEP0005 + * @returns {Buffer} + */ +function Buffer(arg, encodingOrOffset, length) { + showFlaggedDeprecation(); + // Common case. + if (typeof arg === 'number') { + if (typeof encodingOrOffset === 'string') { + throw new ERR_INVALID_ARG_TYPE('string', 'string', arg); + } + return Buffer.alloc(arg); + } + return Buffer.from(arg, encodingOrOffset, length); +} + +ObjectDefineProperty(Buffer, SymbolSpecies, { + __proto__: null, + enumerable: false, + configurable: true, + get() { return FastBuffer; }, +}); + +/** + * Functionally equivalent to Buffer(arg, encoding) but throws a TypeError + * if value is a number. + * Buffer.from(str[, encoding]) + * Buffer.from(array) + * Buffer.from(buffer) + * Buffer.from(arrayBuffer[, byteOffset[, length]]) + * @param {any} value + * @param {BufferEncoding|number} encodingOrOffset + * @param {number} [length] + * @returns {Buffer} + */ +Buffer.from = function from(value, encodingOrOffset, length) { + if (typeof value === 'string') + return fromString(value, encodingOrOffset); + + if (typeof value === 'object' && value !== null) { + if (isAnyArrayBuffer(value)) + return fromArrayBuffer(value, encodingOrOffset, length); + + const valueOf = value.valueOf && value.valueOf(); + if (valueOf != null && + valueOf !== value && + (typeof valueOf === 'string' || typeof valueOf === 'object')) { + return from(valueOf, encodingOrOffset, length); + } + + const b = fromObject(value); + if (b) + return b; + + if (typeof value[SymbolToPrimitive] === 'function') { + const primitive = value[SymbolToPrimitive]('string'); + if (typeof primitive === 'string') { + return fromString(primitive, encodingOrOffset); + } + } + } + + throw new ERR_INVALID_ARG_TYPE( + 'first argument', + ['string', 'Buffer', 'ArrayBuffer', 'Array', 'Array-like Object'], + value, + ); +}; + +/** + * Creates the Buffer as a copy of the underlying ArrayBuffer of the view + * rather than the contents of the view. + * @param {TypedArray} view + * @param {number} [offset] + * @param {number} [length] + * @returns {Buffer} + */ +Buffer.copyBytesFrom = function copyBytesFrom(view, offset, length) { + if (!isTypedArray(view)) { + throw new ERR_INVALID_ARG_TYPE('view', [ 'TypedArray' ], view); + } + + const viewLength = TypedArrayPrototypeGetLength(view); + if (viewLength === 0) { + return Buffer.alloc(0); + } + + if (offset !== undefined || length !== undefined) { + if (offset !== undefined) { + validateInteger(offset, 'offset', 0); + if (offset >= viewLength) return Buffer.alloc(0); + } else { + offset = 0; + } + let end; + if (length !== undefined) { + validateInteger(length, 'length', 0); + end = offset + length; + } else { + end = viewLength; + } + + view = TypedArrayPrototypeSlice(view, offset, end); + } + + return fromArrayLike(new Uint8Array( + TypedArrayPrototypeGetBuffer(view), + TypedArrayPrototypeGetByteOffset(view), + TypedArrayPrototypeGetByteLength(view))); +}; + +// Identical to the built-in %TypedArray%.of(), but avoids using the deprecated +// Buffer() constructor. Must use arrow function syntax to avoid automatically +// adding a `prototype` property and making the function a constructor. +// +// Refs: https://tc39.github.io/ecma262/#sec-%typedarray%.of +// Refs: https://esdiscuss.org/topic/isconstructor#content-11 +const of = (...items) => { + const newObj = createUnsafeBuffer(items.length); + for (let k = 0; k < items.length; k++) + newObj[k] = items[k]; + return newObj; +}; +Buffer.of = of; + +ObjectSetPrototypeOf(Buffer, Uint8Array); + +/** + * Creates a new filled Buffer instance. + * alloc(size[, fill[, encoding]]) + * @returns {FastBuffer} + */ +Buffer.alloc = function alloc(size, fill, encoding) { + validateNumber(size, 'size', 0, kMaxLength); + if (fill !== undefined && fill !== 0 && size > 0) { + const buf = createUnsafeBuffer(size); + return _fill(buf, fill, 0, buf.length, encoding); + } + return new FastBuffer(size); +}; + +/** + * Equivalent to Buffer(num), by default creates a non-zero-filled Buffer + * instance. If `--zero-fill-buffers` is set, will zero-fill the buffer. + * @returns {FastBuffer} + */ +Buffer.allocUnsafe = function allocUnsafe(size) { + validateNumber(size, 'size', 0, kMaxLength); + return allocate(size); +}; + +/** + * Equivalent to SlowBuffer(num), by default creates a non-zero-filled + * Buffer instance that is not allocated off the pre-initialized pool. + * If `--zero-fill-buffers` is set, will zero-fill the buffer. + * @param {number} size + * @returns {FastBuffer|undefined} + */ +Buffer.allocUnsafeSlow = function allocUnsafeSlow(size) { + validateNumber(size, 'size', 0, kMaxLength); + return createUnsafeBuffer(size); +}; + +// If --zero-fill-buffers command line argument is set, a zero-filled +// buffer is returned. +function SlowBuffer(size) { + validateNumber(size, 'size', 0, kMaxLength); + return createUnsafeBuffer(size); +} + +ObjectSetPrototypeOf(SlowBuffer.prototype, Uint8ArrayPrototype); +ObjectSetPrototypeOf(SlowBuffer, Uint8Array); + +function allocate(size) { + if (size <= 0) { + return new FastBuffer(); + } + if (size < (Buffer.poolSize >>> 1)) { + if (size > (poolSize - poolOffset)) + createPool(); + const b = new FastBuffer(allocPool, poolOffset, size); + poolOffset += size; + alignPool(); + return b; + } + return createUnsafeBuffer(size); +} + +function fromStringFast(string, ops) { + const maxLength = Buffer.poolSize >>> 1; + + let length = string.length; // Min length + + if (length >= maxLength) + return createFromString(string, ops); + + length *= 4; // Max length (4 bytes per character) + + if (length >= maxLength) + length = ops.byteLength(string); // Actual length + + if (length >= maxLength) + return createFromString(string, ops, length); + + if (length > (poolSize - poolOffset)) + createPool(); + + const actual = ops.write(allocBuffer, string, poolOffset, length); + const b = new FastBuffer(allocPool, poolOffset, actual); + + poolOffset += actual; + alignPool(); + return b; +} + +function createFromString(string, ops, length = ops.byteLength(string)) { + const buf = Buffer.allocUnsafeSlow(length); + const actual = ops.write(buf, string, 0, length); + return actual < length ? new FastBuffer(buf.buffer, 0, actual) : buf; +} + +function fromString(string, encoding) { + let ops; + if (!encoding || encoding === 'utf8' || typeof encoding !== 'string') { + ops = encodingOps.utf8; + } else { + ops = getEncodingOps(encoding); + if (ops === undefined) + throw new ERR_UNKNOWN_ENCODING(encoding); + } + + return string.length === 0 ? new FastBuffer() : fromStringFast(string, ops); +} + +function fromArrayBuffer(obj, byteOffset, length) { + // Convert byteOffset to integer + if (byteOffset === undefined) { + byteOffset = 0; + } else { + byteOffset = +byteOffset; + if (NumberIsNaN(byteOffset)) + byteOffset = 0; + } + + const maxLength = obj.byteLength - byteOffset; + + if (maxLength < 0) + throw new ERR_BUFFER_OUT_OF_BOUNDS('offset'); + + if (length !== undefined) { + // Convert length to non-negative integer. + length = +length; + if (length > 0) { + if (length > maxLength) + throw new ERR_BUFFER_OUT_OF_BOUNDS('length'); + } else { + length = 0; + } + } + + return new FastBuffer(obj, byteOffset, length); +} + +function fromArrayLike(obj) { + if (obj.length <= 0) + return new FastBuffer(); + if (obj.length < (Buffer.poolSize >>> 1)) { + if (obj.length > (poolSize - poolOffset)) + createPool(); + const b = new FastBuffer(allocPool, poolOffset, obj.length); + TypedArrayPrototypeSet(b, obj, 0); + poolOffset += obj.length; + alignPool(); + return b; + } + return new FastBuffer(obj); +} + +function fromObject(obj) { + if (obj.length !== undefined || isAnyArrayBuffer(obj.buffer)) { + if (typeof obj.length !== 'number') { + return new FastBuffer(); + } + return fromArrayLike(obj); + } + + if (obj.type === 'Buffer' && ArrayIsArray(obj.data)) { + return fromArrayLike(obj.data); + } +} + +// Static methods + +Buffer.isBuffer = function isBuffer(b) { + return b instanceof Buffer; +}; + +Buffer.compare = function compare(buf1, buf2) { + if (!isUint8Array(buf1)) { + throw new ERR_INVALID_ARG_TYPE('buf1', ['Buffer', 'Uint8Array'], buf1); + } + + if (!isUint8Array(buf2)) { + throw new ERR_INVALID_ARG_TYPE('buf2', ['Buffer', 'Uint8Array'], buf2); + } + + if (buf1 === buf2) { + return 0; + } + + return _compare(buf1, buf2); +}; + +Buffer.isEncoding = function isEncoding(encoding) { + return typeof encoding === 'string' && encoding.length !== 0 && + normalizeEncoding(encoding) !== undefined; +}; +Buffer[kIsEncodingSymbol] = Buffer.isEncoding; + +Buffer.concat = function concat(list, length) { + validateArray(list, 'list'); + + if (list.length === 0) + return new FastBuffer(); + + if (length === undefined) { + length = 0; + for (let i = 0; i < list.length; i++) { + if (list[i].length) { + length += list[i].length; + } + } + } else { + validateOffset(length, 'length'); + } + + const buffer = Buffer.allocUnsafe(length); + let pos = 0; + for (let i = 0; i < list.length; i++) { + const buf = list[i]; + if (!isUint8Array(buf)) { + // TODO(BridgeAR): This should not be of type ERR_INVALID_ARG_TYPE. + // Instead, find the proper error code for this. + throw new ERR_INVALID_ARG_TYPE( + `list[${i}]`, ['Buffer', 'Uint8Array'], list[i]); + } + pos += _copyActual(buf, buffer, pos, 0, buf.length); + } + + // Note: `length` is always equal to `buffer.length` at this point + if (pos < length) { + // Zero-fill the remaining bytes if the specified `length` was more than + // the actual total length, i.e. if we have some remaining allocated bytes + // there were not initialized. + TypedArrayPrototypeFill(buffer, 0, pos, length); + } + + return buffer; +}; + +function base64ByteLength(str, bytes) { + // Handle padding + if (StringPrototypeCharCodeAt(str, bytes - 1) === 0x3D) + bytes--; + if (bytes > 1 && StringPrototypeCharCodeAt(str, bytes - 1) === 0x3D) + bytes--; + + // Base64 ratio: 3/4 + return (bytes * 3) >>> 2; +} + +const encodingOps = { + utf8: { + encoding: 'utf8', + encodingVal: encodingsMap.utf8, + byteLength: byteLengthUtf8, + write: (buf, string, offset, len) => buf.utf8Write(string, offset, len), + slice: (buf, start, end) => buf.utf8Slice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfString(buf, val, byteOffset, encodingsMap.utf8, dir), + }, + ucs2: { + encoding: 'ucs2', + encodingVal: encodingsMap.utf16le, + byteLength: (string) => string.length * 2, + write: (buf, string, offset, len) => buf.ucs2Write(string, offset, len), + slice: (buf, start, end) => buf.ucs2Slice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfString(buf, val, byteOffset, encodingsMap.utf16le, dir), + }, + utf16le: { + encoding: 'utf16le', + encodingVal: encodingsMap.utf16le, + byteLength: (string) => string.length * 2, + write: (buf, string, offset, len) => buf.ucs2Write(string, offset, len), + slice: (buf, start, end) => buf.ucs2Slice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfString(buf, val, byteOffset, encodingsMap.utf16le, dir), + }, + latin1: { + encoding: 'latin1', + encodingVal: encodingsMap.latin1, + byteLength: (string) => string.length, + write: (buf, string, offset, len) => buf.latin1Write(string, offset, len), + slice: (buf, start, end) => buf.latin1Slice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfString(buf, val, byteOffset, encodingsMap.latin1, dir), + }, + ascii: { + encoding: 'ascii', + encodingVal: encodingsMap.ascii, + byteLength: (string) => string.length, + write: (buf, string, offset, len) => buf.asciiWrite(string, offset, len), + slice: (buf, start, end) => buf.asciiSlice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfBuffer(buf, + fromStringFast(val, encodingOps.ascii), + byteOffset, + encodingsMap.ascii, + dir), + }, + base64: { + encoding: 'base64', + encodingVal: encodingsMap.base64, + byteLength: (string) => base64ByteLength(string, string.length), + write: (buf, string, offset, len) => buf.base64Write(string, offset, len), + slice: (buf, start, end) => buf.base64Slice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfBuffer(buf, + fromStringFast(val, encodingOps.base64), + byteOffset, + encodingsMap.base64, + dir), + }, + base64url: { + encoding: 'base64url', + encodingVal: encodingsMap.base64url, + byteLength: (string) => base64ByteLength(string, string.length), + write: (buf, string, offset, len) => + buf.base64urlWrite(string, offset, len), + slice: (buf, start, end) => buf.base64urlSlice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfBuffer(buf, + fromStringFast(val, encodingOps.base64url), + byteOffset, + encodingsMap.base64url, + dir), + }, + hex: { + encoding: 'hex', + encodingVal: encodingsMap.hex, + byteLength: (string) => string.length >>> 1, + write: (buf, string, offset, len) => buf.hexWrite(string, offset, len), + slice: (buf, start, end) => buf.hexSlice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfBuffer(buf, + fromStringFast(val, encodingOps.hex), + byteOffset, + encodingsMap.hex, + dir), + }, +}; +function getEncodingOps(encoding) { + encoding += ''; + switch (encoding.length) { + case 4: + if (encoding === 'utf8') return encodingOps.utf8; + if (encoding === 'ucs2') return encodingOps.ucs2; + encoding = StringPrototypeToLowerCase(encoding); + if (encoding === 'utf8') return encodingOps.utf8; + if (encoding === 'ucs2') return encodingOps.ucs2; + break; + case 5: + if (encoding === 'utf-8') return encodingOps.utf8; + if (encoding === 'ascii') return encodingOps.ascii; + if (encoding === 'ucs-2') return encodingOps.ucs2; + encoding = StringPrototypeToLowerCase(encoding); + if (encoding === 'utf-8') return encodingOps.utf8; + if (encoding === 'ascii') return encodingOps.ascii; + if (encoding === 'ucs-2') return encodingOps.ucs2; + break; + case 7: + if (encoding === 'utf16le' || + StringPrototypeToLowerCase(encoding) === 'utf16le') + return encodingOps.utf16le; + break; + case 8: + if (encoding === 'utf-16le' || + StringPrototypeToLowerCase(encoding) === 'utf-16le') + return encodingOps.utf16le; + break; + case 6: + if (encoding === 'latin1' || encoding === 'binary') + return encodingOps.latin1; + if (encoding === 'base64') return encodingOps.base64; + encoding = StringPrototypeToLowerCase(encoding); + if (encoding === 'latin1' || encoding === 'binary') + return encodingOps.latin1; + if (encoding === 'base64') return encodingOps.base64; + break; + case 3: + if (encoding === 'hex' || StringPrototypeToLowerCase(encoding) === 'hex') + return encodingOps.hex; + break; + case 9: + if (encoding === 'base64url' || + StringPrototypeToLowerCase(encoding) === 'base64url') + return encodingOps.base64url; + break; + } +} + +function byteLength(string, encoding) { + if (typeof string !== 'string') { + if (isArrayBufferView(string) || isAnyArrayBuffer(string)) { + return string.byteLength; + } + + throw new ERR_INVALID_ARG_TYPE( + 'string', ['string', 'Buffer', 'ArrayBuffer'], string, + ); + } + + const len = string.length; + if (len === 0) + return 0; + + if (!encoding || encoding === 'utf8') { + return byteLengthUtf8(string); + } + + if (encoding === 'ascii') { + return len; + } + + const ops = getEncodingOps(encoding); + if (ops === undefined) { + // TODO (ronag): Makes more sense to throw here. + // throw new ERR_UNKNOWN_ENCODING(encoding); + return byteLengthUtf8(string); + } + + return ops.byteLength(string); +} + +Buffer.byteLength = byteLength; + +// For backwards compatibility. +ObjectDefineProperty(Buffer.prototype, 'parent', { + __proto__: null, + enumerable: true, + get() { + if (!(this instanceof Buffer)) + return undefined; + return this.buffer; + }, +}); +ObjectDefineProperty(Buffer.prototype, 'offset', { + __proto__: null, + enumerable: true, + get() { + if (!(this instanceof Buffer)) + return undefined; + return this.byteOffset; + }, +}); + +Buffer.prototype.copy = + function copy(target, targetStart, sourceStart, sourceEnd) { + return copyImpl(this, target, targetStart, sourceStart, sourceEnd); + }; + +// No need to verify that "buf.length <= MAX_UINT32" since it's a read-only +// property of a typed array. +// This behaves neither like String nor Uint8Array in that we set start/end +// to their upper/lower bounds if the value passed is out of range. +Buffer.prototype.toString = function toString(encoding, start, end) { + if (arguments.length === 0) { + return this.utf8Slice(0, this.length); + } + + const len = this.length; + + if (start <= 0) + start = 0; + else if (start >= len) + return ''; + else + start = MathTrunc(start) || 0; + + if (end === undefined || end > len) + end = len; + else + end = MathTrunc(end) || 0; + + if (end <= start) + return ''; + + if (encoding === undefined) + return this.utf8Slice(start, end); + + const ops = getEncodingOps(encoding); + if (ops === undefined) + throw new ERR_UNKNOWN_ENCODING(encoding); + + return ops.slice(this, start, end); +}; + +Buffer.prototype.equals = function equals(otherBuffer) { + if (!isUint8Array(otherBuffer)) { + throw new ERR_INVALID_ARG_TYPE( + 'otherBuffer', ['Buffer', 'Uint8Array'], otherBuffer); + } + + if (this === otherBuffer) + return true; + const len = TypedArrayPrototypeGetByteLength(this); + if (len !== TypedArrayPrototypeGetByteLength(otherBuffer)) + return false; + + return len === 0 || _compare(this, otherBuffer) === 0; +}; + +let INSPECT_MAX_BYTES = 50; +// Override how buffers are presented by util.inspect(). +Buffer.prototype[customInspectSymbol] = function inspect(recurseTimes, ctx) { + const max = INSPECT_MAX_BYTES; + const actualMax = MathMin(max, this.length); + const remaining = this.length - max; + let str = StringPrototypeTrim(RegExpPrototypeSymbolReplace( + /(.{2})/g, this.hexSlice(0, actualMax), '$1 ')); + if (remaining > 0) + str += ` ... ${remaining} more byte${remaining > 1 ? 's' : ''}`; + // Inspect special properties as well, if possible. + if (ctx) { + let extras = false; + const filter = ctx.showHidden ? ALL_PROPERTIES : ONLY_ENUMERABLE; + const obj = { __proto__: null }; + ArrayPrototypeForEach(getOwnNonIndexProperties(this, filter), + (key) => { + extras = true; + obj[key] = this[key]; + }); + if (extras) { + if (this.length !== 0) + str += ', '; + // '[Object: null prototype] {'.length === 26 + // This is guarded with a test. + str += StringPrototypeSlice(utilInspect(obj, { + ...ctx, + breakLength: Infinity, + compact: true, + }), 27, -2); + } + } + let constructorName = 'Buffer'; + try { + const { constructor } = this; + if (typeof constructor === 'function' && ObjectPrototypeHasOwnProperty(constructor, 'name')) { + constructorName = constructor.name; + } + } catch { /* Ignore error and use default name */ } + return `<${constructorName} ${str}>`; +}; +Buffer.prototype.inspect = Buffer.prototype[customInspectSymbol]; + +Buffer.prototype.compare = function compare(target, + targetStart, + targetEnd, + sourceStart, + sourceEnd) { + if (!isUint8Array(target)) { + throw new ERR_INVALID_ARG_TYPE('target', ['Buffer', 'Uint8Array'], target); + } + if (arguments.length === 1) + return _compare(this, target); + + if (targetStart === undefined) + targetStart = 0; + else + validateOffset(targetStart, 'targetStart'); + + if (targetEnd === undefined) + targetEnd = target.length; + else + validateOffset(targetEnd, 'targetEnd', 0, target.length); + + if (sourceStart === undefined) + sourceStart = 0; + else + validateOffset(sourceStart, 'sourceStart'); + + if (sourceEnd === undefined) + sourceEnd = this.length; + else + validateOffset(sourceEnd, 'sourceEnd', 0, this.length); + + if (sourceStart >= sourceEnd) + return (targetStart >= targetEnd ? 0 : -1); + if (targetStart >= targetEnd) + return 1; + + return compareOffset(this, target, targetStart, sourceStart, targetEnd, + sourceEnd); +}; + +// Finds either the first index of `val` in `buffer` at offset >= `byteOffset`, +// OR the last index of `val` in `buffer` at offset <= `byteOffset`. +// +// Arguments: +// - buffer - a Buffer to search +// - val - a string, Buffer, or number +// - byteOffset - an index into `buffer`; will be clamped to an int32 +// - encoding - an optional encoding, relevant if val is a string +// - dir - true for indexOf, false for lastIndexOf +function bidirectionalIndexOf(buffer, val, byteOffset, encoding, dir) { + validateBuffer(buffer); + + if (typeof byteOffset === 'string') { + encoding = byteOffset; + byteOffset = undefined; + } else if (byteOffset > 0x7fffffff) { + byteOffset = 0x7fffffff; + } else if (byteOffset < -0x80000000) { + byteOffset = -0x80000000; + } + // Coerce to Number. Values like null and [] become 0. + byteOffset = +byteOffset; + // If the offset is undefined, "foo", {}, coerces to NaN, search whole buffer. + if (NumberIsNaN(byteOffset)) { + byteOffset = dir ? 0 : (buffer.length || buffer.byteLength); + } + dir = !!dir; // Cast to bool. + + if (typeof val === 'number') + return indexOfNumber(buffer, val >>> 0, byteOffset, dir); + + let ops; + if (encoding === undefined) + ops = encodingOps.utf8; + else + ops = getEncodingOps(encoding); + + if (typeof val === 'string') { + if (ops === undefined) + throw new ERR_UNKNOWN_ENCODING(encoding); + return ops.indexOf(buffer, val, byteOffset, dir); + } + + if (isUint8Array(val)) { + const encodingVal = + (ops === undefined ? encodingsMap.utf8 : ops.encodingVal); + return indexOfBuffer(buffer, val, byteOffset, encodingVal, dir); + } + + throw new ERR_INVALID_ARG_TYPE( + 'value', ['number', 'string', 'Buffer', 'Uint8Array'], val, + ); +} + +Buffer.prototype.indexOf = function indexOf(val, byteOffset, encoding) { + return bidirectionalIndexOf(this, val, byteOffset, encoding, true); +}; + +Buffer.prototype.lastIndexOf = function lastIndexOf(val, byteOffset, encoding) { + return bidirectionalIndexOf(this, val, byteOffset, encoding, false); +}; + +Buffer.prototype.includes = function includes(val, byteOffset, encoding) { + return this.indexOf(val, byteOffset, encoding) !== -1; +}; + +// Usage: +// buffer.fill(number[, offset[, end]]) +// buffer.fill(buffer[, offset[, end]]) +// buffer.fill(string[, offset[, end]][, encoding]) +Buffer.prototype.fill = function fill(value, offset, end, encoding) { + return _fill(this, value, offset, end, encoding); +}; + +function _fill(buf, value, offset, end, encoding) { + if (typeof value === 'string') { + if (offset === undefined || typeof offset === 'string') { + encoding = offset; + offset = 0; + end = buf.length; + } else if (typeof end === 'string') { + encoding = end; + end = buf.length; + } + + const normalizedEncoding = normalizeEncoding(encoding); + if (normalizedEncoding === undefined) { + validateString(encoding, 'encoding'); + throw new ERR_UNKNOWN_ENCODING(encoding); + } + + if (value.length === 0) { + // If value === '' default to zero. + value = 0; + } else if (value.length === 1) { + // Fast path: If `value` fits into a single byte, use that numeric value. + if (normalizedEncoding === 'utf8') { + const code = StringPrototypeCharCodeAt(value, 0); + if (code < 128) { + value = code; + } + } else if (normalizedEncoding === 'latin1') { + value = StringPrototypeCharCodeAt(value, 0); + } + } + } else { + encoding = undefined; + } + + if (offset === undefined) { + offset = 0; + end = buf.length; + } else { + validateOffset(offset, 'offset'); + // Invalid ranges are not set to a default, so can range check early. + if (end === undefined) { + end = buf.length; + } else { + validateOffset(end, 'end', 0, buf.length); + } + if (offset >= end) + return buf; + } + + + if (typeof value === 'number') { + // OOB check + const byteLen = TypedArrayPrototypeGetByteLength(buf); + const fillLength = end - offset; + if (offset > end || fillLength + offset > byteLen) + throw new ERR_BUFFER_OUT_OF_BOUNDS(); + + TypedArrayPrototypeFill(buf, value, offset, end); + } else { + const res = bindingFill(buf, value, offset, end, encoding); + if (res < 0) { + if (res === -1) + throw new ERR_INVALID_ARG_VALUE('value', value); + throw new ERR_BUFFER_OUT_OF_BOUNDS(); + } + } + + return buf; +} + +Buffer.prototype.write = function write(string, offset, length, encoding) { + // Buffer#write(string); + if (offset === undefined) { + return this.utf8Write(string, 0, this.length); + } + // Buffer#write(string, encoding) + if (length === undefined && typeof offset === 'string') { + encoding = offset; + length = this.length; + offset = 0; + + // Buffer#write(string, offset[, length][, encoding]) + } else { + validateOffset(offset, 'offset', 0, this.length); + + const remaining = this.length - offset; + + if (length === undefined) { + length = remaining; + } else if (typeof length === 'string') { + encoding = length; + length = remaining; + } else { + validateOffset(length, 'length', 0, this.length); + if (length > remaining) + length = remaining; + } + } + + if (!encoding || encoding === 'utf8') + return this.utf8Write(string, offset, length); + if (encoding === 'ascii') + return this.asciiWrite(string, offset, length); + + const ops = getEncodingOps(encoding); + if (ops === undefined) + throw new ERR_UNKNOWN_ENCODING(encoding); + return ops.write(this, string, offset, length); +}; + +Buffer.prototype.toJSON = function toJSON() { + if (this.length > 0) { + const data = new Array(this.length); + for (let i = 0; i < this.length; ++i) + data[i] = this[i]; + return { type: 'Buffer', data }; + } + return { type: 'Buffer', data: [] }; +}; + +function adjustOffset(offset, length) { + // Use Math.trunc() to convert offset to an integer value that can be larger + // than an Int32. Hence, don't use offset | 0 or similar techniques. + offset = MathTrunc(offset); + if (offset === 0) { + return 0; + } + if (offset < 0) { + offset += length; + return offset > 0 ? offset : 0; + } + if (offset < length) { + return offset; + } + return NumberIsNaN(offset) ? 0 : length; +} + +Buffer.prototype.subarray = function subarray(start, end) { + const srcLength = this.length; + start = adjustOffset(start, srcLength); + end = end !== undefined ? adjustOffset(end, srcLength) : srcLength; + const newLength = end > start ? end - start : 0; + return new FastBuffer(this.buffer, this.byteOffset + start, newLength); +}; + +Buffer.prototype.slice = function slice(start, end) { + return this.subarray(start, end); +}; + +function swap(b, n, m) { + const i = b[n]; + b[n] = b[m]; + b[m] = i; +} + +Buffer.prototype.swap16 = function swap16() { + // For Buffer.length < 128, it's generally faster to + // do the swap in javascript. For larger buffers, + // dropping down to the native code is faster. + const len = this.length; + if (len % 2 !== 0) + throw new ERR_INVALID_BUFFER_SIZE('16-bits'); + if (len < 128) { + for (let i = 0; i < len; i += 2) + swap(this, i, i + 1); + return this; + } + return _swap16(this); +}; + +Buffer.prototype.swap32 = function swap32() { + // For Buffer.length < 192, it's generally faster to + // do the swap in javascript. For larger buffers, + // dropping down to the native code is faster. + const len = this.length; + if (len % 4 !== 0) + throw new ERR_INVALID_BUFFER_SIZE('32-bits'); + if (len < 192) { + for (let i = 0; i < len; i += 4) { + swap(this, i, i + 3); + swap(this, i + 1, i + 2); + } + return this; + } + return _swap32(this); +}; + +Buffer.prototype.swap64 = function swap64() { + // For Buffer.length < 192, it's generally faster to + // do the swap in javascript. For larger buffers, + // dropping down to the native code is faster. + const len = this.length; + if (len % 8 !== 0) + throw new ERR_INVALID_BUFFER_SIZE('64-bits'); + if (len < 192) { + for (let i = 0; i < len; i += 8) { + swap(this, i, i + 7); + swap(this, i + 1, i + 6); + swap(this, i + 2, i + 5); + swap(this, i + 3, i + 4); + } + return this; + } + return _swap64(this); +}; + +Buffer.prototype.toLocaleString = Buffer.prototype.toString; + +let transcode; +if (internalBinding('config').hasIntl) { + const { + icuErrName, + transcode: _transcode, + } = internalBinding('icu'); + + // Transcodes the Buffer from one encoding to another, returning a new + // Buffer instance. + transcode = function transcode(source, fromEncoding, toEncoding) { + if (!isUint8Array(source)) { + throw new ERR_INVALID_ARG_TYPE('source', + ['Buffer', 'Uint8Array'], source); + } + if (source.length === 0) return Buffer.alloc(0); + + fromEncoding = normalizeEncoding(fromEncoding) || fromEncoding; + toEncoding = normalizeEncoding(toEncoding) || toEncoding; + const result = _transcode(source, fromEncoding, toEncoding); + if (typeof result !== 'number') + return result; + + const code = icuErrName(result); + const err = genericNodeError( + `Unable to transcode Buffer [${code}]`, + { code: code, errno: result }, + ); + throw err; + }; +} + +function btoa(input) { + // The implementation here has not been performance optimized in any way and + // should not be. + // Refs: https://github.com/nodejs/node/pull/38433#issuecomment-828426932 + if (arguments.length === 0) { + throw new ERR_MISSING_ARGS('input'); + } + const result = _btoa(`${input}`); + if (result === -1) { + throw lazyDOMException('Invalid character', 'InvalidCharacterError'); + } + return result; +} + +function atob(input) { + if (arguments.length === 0) { + throw new ERR_MISSING_ARGS('input'); + } + + const result = _atob(`${input}`); + + switch (result) { + case -2: // Invalid character + throw lazyDOMException('Invalid character', 'InvalidCharacterError'); + case -1: // Single character remained + throw lazyDOMException( + 'The string to be decoded is not correctly encoded.', + 'InvalidCharacterError'); + case -3: // Possible overflow + // TODO(@anonrig): Throw correct error in here. + throw lazyDOMException('The input causes overflow.', 'InvalidCharacterError'); + default: + return result; + } +} + +function isUtf8(input) { + if (isTypedArray(input) || isAnyArrayBuffer(input)) { + return bindingIsUtf8(input); + } + + throw new ERR_INVALID_ARG_TYPE('input', ['ArrayBuffer', 'Buffer', 'TypedArray'], input); +} + +function isAscii(input) { + if (isTypedArray(input) || isAnyArrayBuffer(input)) { + return bindingIsAscii(input); + } + + throw new ERR_INVALID_ARG_TYPE('input', ['ArrayBuffer', 'Buffer', 'TypedArray'], input); +} + +module.exports = { + Buffer, + SlowBuffer: deprecate( + SlowBuffer, + 'SlowBuffer() is deprecated. Please use Buffer.allocUnsafeSlow()', + 'DEP0030'), + transcode, + isUtf8, + isAscii, + + // Legacy + kMaxLength, + kStringMaxLength, + btoa, + atob, +}; + +ObjectDefineProperties(module.exports, { + constants: { + __proto__: null, + configurable: false, + enumerable: true, + value: constants, + }, + INSPECT_MAX_BYTES: { + __proto__: null, + configurable: true, + enumerable: true, + get() { return INSPECT_MAX_BYTES; }, + set(val) { + validateNumber(val, 'INSPECT_MAX_BYTES', 0); + INSPECT_MAX_BYTES = val; + }, + }, +}); + +defineLazyProperties( + module.exports, + 'internal/blob', + ['Blob', 'resolveObjectURL'], +); +defineLazyProperties( + module.exports, + 'internal/file', + ['File'], +); \ No newline at end of file diff --git a/node/dfn.js b/node/dfn.js new file mode 100644 index 00000000..29b453dc --- /dev/null +++ b/node/dfn.js @@ -0,0 +1,118 @@ +var dfnMapTarget = -1; +var dfnMapDone = 0; +var dfnMap = {}; +document.addEventListener('DOMContentLoaded', function (event) { + var links = []; + dfnMapTarget = document.links.length; + for (var i = 0; i < dfnMapTarget; i += 1) + links[i] = document.links[i]; + var inc = 100; + for (var i = 0; i < dfnMapTarget; i += inc) { + setTimeout(function (j) { + for (var k = j; k < j+inc && k < dfnMapTarget; k += 1) { + if (links[k].href.indexOf('#') >= 0) { + if (links[k].className != "no-backref" && + links[k].parentNode.className != "no-backref") { + var s = links[k].href.substr(links[k].href.indexOf('#') + 1); + if (!(s in dfnMap)) + dfnMap[s] = []; + dfnMap[s].push(links[k]); + } + } + dfnMapDone += 1; + } + }, 0, i); + } + document.body.className += " dfnEnabled"; +}, false); + +var dfnPanel; +var dfnUniqueId = 0; +var dfnTimeout; +document.addEventListener('click', dfnShow, false); +function dfnShow(event) { + if (dfnTimeout) { + clearTimeout(dfnTimeout); + dfnTimeout = null; + } + if (dfnPanel) { + dfnPanel.parentNode.removeChild(dfnPanel); + dfnPanel = null; + } + if (dfnMapDone == dfnMapTarget) { + var node = event.target; + while (node && (node.nodeType != event.target.ELEMENT_NODE || node.tagName != "DFN")) + node = node.parentNode; + if (node) { + var panel = document.createElement('div'); + panel.className = 'dfnPanel'; + if (node.id) { + var permalinkP = document.createElement('p'); + var permalinkA = document.createElement('a'); + permalinkA.href = '#' + node.id; + permalinkA.textContent = '#' + node.id; + permalinkP.appendChild(permalinkA); + panel.appendChild(permalinkP); + } + var p = document.createElement('p'); + panel.appendChild(p); + if (node.id in dfnMap || node.parentNode.id in dfnMap) { + p.textContent = 'Referenced in:'; + var ul = document.createElement('ul'); + var lastHeader; + var lastLi; + var n; + var sourceLinks = []; + if (node.id in dfnMap) + for (var i = 0; i < dfnMap[node.id].length; i += 1) + sourceLinks.push(dfnMap[node.id][i]); + if (node.parentNode.id in dfnMap) + for (var i = 0; i < dfnMap[node.parentNode.id].length; i += 1) + sourceLinks.push(dfnMap[node.parentNode.id][i]); + for (var i = 0; i < sourceLinks.length; i += 1) { + var link = sourceLinks[i]; + var header = dfnGetCaption(link); + var a = document.createElement('a'); + if (!link.id) + link.id = 'dfnReturnLink-' + dfnUniqueId++; + a.href = '#' + link.id; + if (header != lastHeader) { + lastHeader = header; + n = 1; + var li = document.createElement('li'); + var cloneHeader = header.cloneNode(true); + while (cloneHeader.hasChildNodes()) + if (cloneHeader.firstChild.className == 'section-link') + cloneHeader.removeChild(cloneHeader.firstChild); + else + a.appendChild(cloneHeader.firstChild); + lastLi = li; + li.appendChild(a); + ul.appendChild(li); + } else { + n += 1; + a.appendChild(document.createTextNode('(' + n + ')')); + lastLi.appendChild(document.createTextNode(' ')); + lastLi.appendChild(a); + } + } + panel.appendChild(ul); + } else { + p.textContent = 'No references in this file.'; + } + node.appendChild(panel); + dfnPanel = panel; + } + } else { + dfnTimeout = setTimeout(dfnShow, 250, event); + } +} + +function dfnGetCaption(link) { + var node = link; + while (node && !(node.parentNode.tagName == "DIV" && node.parentNode.className == "section")) + node = node.parentNode; + while (node && (node.nodeType != node.ELEMENT_NODE || !node.tagName.match(/^H[1-6]$/))) + node = node.previousSibling; + return node; +} \ No newline at end of file diff --git a/node/dgram.js b/node/dgram.js new file mode 100644 index 00000000..c77ea89c --- /dev/null +++ b/node/dgram.js @@ -0,0 +1,1111 @@ +'use strict'; + +const { + Array, + ArrayIsArray, + ArrayPrototypePush, + FunctionPrototypeBind, + FunctionPrototypeCall, + ObjectDefineProperty, + ObjectSetPrototypeOf, + ReflectApply, + SymbolAsyncDispose, + SymbolDispose, +} = primordials; + +const { + ErrnoException, + ExceptionWithHostPort, + codes: { + ERR_BUFFER_OUT_OF_BOUNDS, + ERR_INVALID_ARG_TYPE, + ERR_INVALID_FD_TYPE, + ERR_IP_BLOCKED, + ERR_MISSING_ARGS, + ERR_SOCKET_ALREADY_BOUND, + ERR_SOCKET_BAD_BUFFER_SIZE, + ERR_SOCKET_BUFFER_SIZE, + ERR_SOCKET_DGRAM_IS_CONNECTED, + ERR_SOCKET_DGRAM_NOT_CONNECTED, + ERR_SOCKET_DGRAM_NOT_RUNNING, + }, +} = require('internal/errors'); +const { + kStateSymbol, + _createSocketHandle, + newHandle, +} = require('internal/dgram'); +const { isIP } = require('internal/net'); +const { + isInt32, + validateAbortSignal, + validateString, + validateNumber, + validatePort, + validateUint32, +} = require('internal/validators'); +const { Buffer } = require('buffer'); +const { deprecate, guessHandleType, promisify } = require('internal/util'); +const { isArrayBufferView } = require('internal/util/types'); +const EventEmitter = require('events'); +const { addAbortListener } = require('internal/events/abort_listener'); +const { + defaultTriggerAsyncIdScope, + symbols: { async_id_symbol, owner_symbol }, +} = require('internal/async_hooks'); +const { UV_UDP_REUSEADDR } = internalBinding('constants').os; + +const { + constants: { UV_UDP_IPV6ONLY, UV_UDP_REUSEPORT }, + UDP, + SendWrap, +} = internalBinding('udp_wrap'); + +const dc = require('diagnostics_channel'); +const udpSocketChannel = dc.channel('udp.socket'); + +const BIND_STATE_UNBOUND = 0; +const BIND_STATE_BINDING = 1; +const BIND_STATE_BOUND = 2; + +const CONNECT_STATE_DISCONNECTED = 0; +const CONNECT_STATE_CONNECTING = 1; +const CONNECT_STATE_CONNECTED = 2; + +const RECV_BUFFER = true; +const SEND_BUFFER = false; + +// Lazily loaded +let _cluster = null; +function lazyLoadCluster() { + return _cluster ??= require('cluster'); +} +let _blockList = null; +function lazyLoadBlockList() { + return _blockList ??= require('internal/blocklist').BlockList; +} + +function Socket(type, listener) { + FunctionPrototypeCall(EventEmitter, this); + let lookup; + let recvBufferSize; + let sendBufferSize; + let receiveBlockList; + let sendBlockList; + + let options; + if (type !== null && typeof type === 'object') { + options = type; + type = options.type; + lookup = options.lookup; + if (options.recvBufferSize) { + validateUint32(options.recvBufferSize, 'options.recvBufferSize'); + } + if (options.sendBufferSize) { + validateUint32(options.sendBufferSize, 'options.sendBufferSize'); + } + recvBufferSize = options.recvBufferSize; + sendBufferSize = options.sendBufferSize; + if (options.receiveBlockList) { + if (!lazyLoadBlockList().isBlockList(options.receiveBlockList)) { + throw new ERR_INVALID_ARG_TYPE('options.receiveBlockList', 'net.BlockList', options.receiveBlockList); + } + receiveBlockList = options.receiveBlockList; + } + if (options.sendBlockList) { + if (!lazyLoadBlockList().isBlockList(options.sendBlockList)) { + throw new ERR_INVALID_ARG_TYPE('options.sendBlockList', 'net.BlockList', options.sendBlockList); + } + sendBlockList = options.sendBlockList; + } + } + + const handle = newHandle(type, lookup); + handle[owner_symbol] = this; + + this[async_id_symbol] = handle.getAsyncId(); + this.type = type; + + if (typeof listener === 'function') + this.on('message', listener); + + this[kStateSymbol] = { + handle, + receiving: false, + bindState: BIND_STATE_UNBOUND, + connectState: CONNECT_STATE_DISCONNECTED, + queue: undefined, + reuseAddr: options?.reuseAddr, // Use UV_UDP_REUSEADDR if true. + reusePort: options?.reusePort, + ipv6Only: options?.ipv6Only, + recvBufferSize, + sendBufferSize, + receiveBlockList, + sendBlockList, + }; + + if (options?.signal !== undefined) { + const { signal } = options; + validateAbortSignal(signal, 'options.signal'); + const onAborted = () => { + if (this[kStateSymbol].handle) this.close(); + }; + if (signal.aborted) { + onAborted(); + } else { + const disposable = addAbortListener(signal, onAborted); + this.once('close', disposable[SymbolDispose]); + } + } + if (udpSocketChannel.hasSubscribers) { + udpSocketChannel.publish({ + socket: this, + }); + } +} +ObjectSetPrototypeOf(Socket.prototype, EventEmitter.prototype); +ObjectSetPrototypeOf(Socket, EventEmitter); + + +function createSocket(type, listener) { + return new Socket(type, listener); +} + + +function startListening(socket) { + const state = socket[kStateSymbol]; + + state.handle.onmessage = onMessage; + state.handle.onerror = onError; + state.handle.recvStart(); + state.receiving = true; + state.bindState = BIND_STATE_BOUND; + + if (state.recvBufferSize) + bufferSize(socket, state.recvBufferSize, RECV_BUFFER); + + if (state.sendBufferSize) + bufferSize(socket, state.sendBufferSize, SEND_BUFFER); + + socket.emit('listening'); +} + +function replaceHandle(self, newHandle) { + const state = self[kStateSymbol]; + const oldHandle = state.handle; + // Sync the old handle state to new handle + if (!oldHandle.hasRef() && typeof newHandle.unref === 'function') { + newHandle.unref(); + } + // Set up the handle that we got from primary. + newHandle.lookup = oldHandle.lookup; + newHandle.bind = oldHandle.bind; + newHandle.send = oldHandle.send; + newHandle[owner_symbol] = self; + + // Replace the existing handle by the handle we got from primary. + oldHandle.close(); + state.handle = newHandle; +} + +function bufferSize(self, size, buffer) { + if (size >>> 0 !== size) + throw new ERR_SOCKET_BAD_BUFFER_SIZE(); + + const ctx = {}; + const ret = self[kStateSymbol].handle.bufferSize(size, buffer, ctx); + if (ret === undefined) { + throw new ERR_SOCKET_BUFFER_SIZE(ctx); + } + return ret; +} + +// Query primary process to get the server handle and utilize it. +function bindServerHandle(self, options, errCb) { + const cluster = lazyLoadCluster(); + + const state = self[kStateSymbol]; + cluster._getServer(self, options, (err, handle) => { + if (err) { + // Do not call callback if socket is closed + if (state.handle) { + errCb(err); + } + return; + } + + if (!state.handle) { + // Handle has been closed in the mean time. + return handle.close(); + } + + replaceHandle(self, handle); + startListening(self); + }); +} + +Socket.prototype.bind = function(port_, address_ /* , callback */) { + let port = port_; + + healthCheck(this); + const state = this[kStateSymbol]; + + if (state.bindState !== BIND_STATE_UNBOUND) + throw new ERR_SOCKET_ALREADY_BOUND(); + + state.bindState = BIND_STATE_BINDING; + + const cb = arguments.length && arguments[arguments.length - 1]; + if (typeof cb === 'function') { + function removeListeners() { + this.removeListener('error', removeListeners); + this.removeListener('listening', onListening); + } + + function onListening() { + FunctionPrototypeCall(removeListeners, this); + FunctionPrototypeCall(cb, this); + } + + this.on('error', removeListeners); + this.on('listening', onListening); + } + + if (port !== null && + typeof port === 'object' && + typeof port.recvStart === 'function') { + replaceHandle(this, port); + startListening(this); + return this; + } + + // Open an existing fd instead of creating a new one. + if (port !== null && typeof port === 'object' && + isInt32(port.fd) && port.fd > 0) { + const fd = port.fd; + const exclusive = !!port.exclusive; + const state = this[kStateSymbol]; + + const cluster = lazyLoadCluster(); + + if (cluster.isWorker && !exclusive) { + bindServerHandle(this, { + address: null, + port: null, + addressType: this.type, + fd, + flags: null, + }, (err) => { + // Callback to handle error. + const ex = new ErrnoException(err, 'open'); + state.bindState = BIND_STATE_UNBOUND; + this.emit('error', ex); + }); + return this; + } + + const type = guessHandleType(fd); + if (type !== 'UDP') + throw new ERR_INVALID_FD_TYPE(type); + const err = state.handle.open(fd); + + if (err) + throw new ErrnoException(err, 'open'); + + startListening(this); + return this; + } + + let address; + let exclusive; + + if (port !== null && typeof port === 'object') { + address = port.address || ''; + exclusive = !!port.exclusive; + port = port.port; + } else { + address = typeof address_ === 'function' ? '' : address_; + exclusive = false; + } + + // Defaulting address for bind to all interfaces + if (!address) { + if (this.type === 'udp4') + address = '0.0.0.0'; + else + address = '::'; + } + + // Resolve address first + state.handle.lookup(address, (err, ip) => { + if (!state.handle) + return; // Handle has been closed in the mean time + + if (err) { + state.bindState = BIND_STATE_UNBOUND; + this.emit('error', err); + return; + } + + const cluster = lazyLoadCluster(); + + let flags = 0; + if (state.reuseAddr) + flags |= UV_UDP_REUSEADDR; + if (state.ipv6Only) + flags |= UV_UDP_IPV6ONLY; + if (state.reusePort) { + exclusive = true; + flags |= UV_UDP_REUSEPORT; + } + + if (cluster.isWorker && !exclusive) { + bindServerHandle(this, { + address: ip, + port: port, + addressType: this.type, + fd: -1, + flags: flags, + }, (err) => { + // Callback to handle error. + const ex = new ExceptionWithHostPort(err, 'bind', ip, port); + state.bindState = BIND_STATE_UNBOUND; + this.emit('error', ex); + }); + } else { + const err = state.handle.bind(ip, port || 0, flags); + if (err) { + const ex = new ExceptionWithHostPort(err, 'bind', ip, port); + state.bindState = BIND_STATE_UNBOUND; + this.emit('error', ex); + // Todo: close? + return; + } + + startListening(this); + } + }); + + return this; +}; + +Socket.prototype.connect = function(port, address, callback) { + port = validatePort(port, 'Port', false); + if (typeof address === 'function') { + callback = address; + address = ''; + } else if (address === undefined) { + address = ''; + } + + validateString(address, 'address'); + + const state = this[kStateSymbol]; + + if (state.connectState !== CONNECT_STATE_DISCONNECTED) + throw new ERR_SOCKET_DGRAM_IS_CONNECTED(); + + state.connectState = CONNECT_STATE_CONNECTING; + if (state.bindState === BIND_STATE_UNBOUND) + this.bind({ port: 0, exclusive: true }, null); + + if (state.bindState !== BIND_STATE_BOUND) { + enqueue(this, FunctionPrototypeBind(_connect, this, + port, address, callback)); + return; + } + + ReflectApply(_connect, this, [port, address, callback]); +}; + + +function _connect(port, address, callback) { + const state = this[kStateSymbol]; + if (callback) + this.once('connect', callback); + + const afterDns = (ex, ip) => { + defaultTriggerAsyncIdScope( + this[async_id_symbol], + doConnect, + ex, this, ip, address, port, callback, + ); + }; + + state.handle.lookup(address, afterDns); +} + + +function doConnect(ex, self, ip, address, port, callback) { + const state = self[kStateSymbol]; + if (!state.handle) + return; + if (!ex && state.sendBlockList?.check(ip, `ipv${isIP(ip)}`)) { + ex = new ERR_IP_BLOCKED(ip); + } + if (!ex) { + const err = state.handle.connect(ip, port); + if (err) { + ex = new ExceptionWithHostPort(err, 'connect', address, port); + } + } + + if (ex) { + state.connectState = CONNECT_STATE_DISCONNECTED; + return process.nextTick(() => { + if (callback) { + self.removeListener('connect', callback); + callback(ex); + } else { + self.emit('error', ex); + } + }); + } + + state.connectState = CONNECT_STATE_CONNECTED; + process.nextTick(() => self.emit('connect')); +} + + +Socket.prototype.disconnect = function() { + const state = this[kStateSymbol]; + if (state.connectState !== CONNECT_STATE_CONNECTED) + throw new ERR_SOCKET_DGRAM_NOT_CONNECTED(); + + const err = state.handle.disconnect(); + if (err) + throw new ErrnoException(err, 'connect'); + else + state.connectState = CONNECT_STATE_DISCONNECTED; +}; + + +// Thin wrapper around `send`, here for compatibility with dgram_legacy.js +Socket.prototype.sendto = function(buffer, + offset, + length, + port, + address, + callback) { + validateNumber(offset, 'offset'); + validateNumber(length, 'length'); + validateNumber(port, 'port'); + validateString(address, 'address'); + + this.send(buffer, offset, length, port, address, callback); +}; + + +function sliceBuffer(buffer, offset, length) { + if (typeof buffer === 'string') { + buffer = Buffer.from(buffer); + } else if (!isArrayBufferView(buffer)) { + throw new ERR_INVALID_ARG_TYPE('buffer', + ['Buffer', + 'TypedArray', + 'DataView', + 'string'], + buffer); + } + + offset = offset >>> 0; + length = length >>> 0; + if (offset > buffer.byteLength) { + throw new ERR_BUFFER_OUT_OF_BOUNDS('offset'); + } + + if (offset + length > buffer.byteLength) { + throw new ERR_BUFFER_OUT_OF_BOUNDS('length'); + } + + return Buffer.from(buffer.buffer, buffer.byteOffset + offset, length); +} + + +function fixBufferList(list) { + const newlist = new Array(list.length); + + for (let i = 0, l = list.length; i < l; i++) { + const buf = list[i]; + if (typeof buf === 'string') + newlist[i] = Buffer.from(buf); + else if (Buffer.isBuffer(buf)) + newlist[i] = buf; + else if (!isArrayBufferView(buf)) + return null; + else + newlist[i] = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength); + } + + return newlist; +} + + +function enqueue(self, toEnqueue) { + const state = self[kStateSymbol]; + + // If the send queue hasn't been initialized yet, do it, and install an + // event handler that flushes the send queue after binding is done. + if (state.queue === undefined) { + state.queue = []; + self.once(EventEmitter.errorMonitor, onListenError); + self.once('listening', onListenSuccess); + } + ArrayPrototypePush(state.queue, toEnqueue); +} + + +function onListenSuccess() { + this.removeListener(EventEmitter.errorMonitor, onListenError); + FunctionPrototypeCall(clearQueue, this); +} + + +function onListenError(err) { + this.removeListener('listening', onListenSuccess); + this[kStateSymbol].queue = undefined; +} + + +function clearQueue() { + const state = this[kStateSymbol]; + const queue = state.queue; + state.queue = undefined; + + // Flush the send queue. + for (const queueEntry of queue) + queueEntry(); +} + +// valid combinations +// For connectionless sockets +// send(buffer, offset, length, port, address, callback) +// send(buffer, offset, length, port, address) +// send(buffer, offset, length, port, callback) +// send(buffer, offset, length, port) +// send(bufferOrList, port, address, callback) +// send(bufferOrList, port, address) +// send(bufferOrList, port, callback) +// send(bufferOrList, port) +// For connected sockets +// send(buffer, offset, length, callback) +// send(buffer, offset, length) +// send(bufferOrList, callback) +// send(bufferOrList) +Socket.prototype.send = function(buffer, + offset, + length, + port, + address, + callback) { + + let list; + const state = this[kStateSymbol]; + const connected = state.connectState === CONNECT_STATE_CONNECTED; + if (!connected) { + if (address || (port && typeof port !== 'function')) { + buffer = sliceBuffer(buffer, offset, length); + } else { + callback = port; + port = offset; + address = length; + } + } else { + if (typeof length === 'number') { + buffer = sliceBuffer(buffer, offset, length); + if (typeof port === 'function') { + callback = port; + port = null; + } + } else { + callback = offset; + } + + if (port || address) + throw new ERR_SOCKET_DGRAM_IS_CONNECTED(); + } + + if (!ArrayIsArray(buffer)) { + if (typeof buffer === 'string') { + list = [ Buffer.from(buffer) ]; + } else if (!isArrayBufferView(buffer)) { + throw new ERR_INVALID_ARG_TYPE('buffer', + ['Buffer', + 'TypedArray', + 'DataView', + 'string'], + buffer); + } else { + list = [ buffer ]; + } + } else if (!(list = fixBufferList(buffer))) { + throw new ERR_INVALID_ARG_TYPE('buffer list arguments', + ['Buffer', + 'TypedArray', + 'DataView', + 'string'], + buffer); + } + + if (!connected) + port = validatePort(port, 'Port', false); + + // Normalize callback so it's either a function or undefined but not anything + // else. + if (typeof callback !== 'function') + callback = undefined; + + if (typeof address === 'function') { + callback = address; + address = undefined; + } else if (address != null) { + validateString(address, 'address'); + } + + healthCheck(this); + + if (state.bindState === BIND_STATE_UNBOUND) + this.bind({ port: 0, exclusive: true }, null); + + if (list.length === 0) + ArrayPrototypePush(list, Buffer.alloc(0)); + + // If the socket hasn't been bound yet, push the outbound packet onto the + // send queue and send after binding is complete. + if (state.bindState !== BIND_STATE_BOUND) { + enqueue(this, FunctionPrototypeBind(this.send, this, + list, port, address, callback)); + return; + } + + const afterDns = (ex, ip) => { + defaultTriggerAsyncIdScope( + this[async_id_symbol], + doSend, + ex, this, ip, list, address, port, callback, + ); + }; + + if (!connected) { + state.handle.lookup(address, afterDns); + } else { + afterDns(null, null); + } +}; + +function doSend(ex, self, ip, list, address, port, callback) { + const state = self[kStateSymbol]; + + if (ex) { + if (typeof callback === 'function') { + process.nextTick(callback, ex); + return; + } + + process.nextTick(() => self.emit('error', ex)); + return; + } else if (!state.handle) { + return; + } + + if (ip && state.sendBlockList?.check(ip, `ipv${isIP(ip)}`)) { + if (callback) { + process.nextTick(callback, new ERR_IP_BLOCKED(ip)); + } + return; + } + + const req = new SendWrap(); + req.list = list; // Keep reference alive. + req.address = address; + req.port = port; + if (callback) { + req.callback = callback; + req.oncomplete = afterSend; + } + + let err; + if (port) + err = state.handle.send(req, list, list.length, port, ip, !!callback); + else + err = state.handle.send(req, list, list.length, !!callback); + + if (err >= 1) { + // Synchronous finish. The return code is msg_length + 1 so that we can + // distinguish between synchronous success and asynchronous success. + if (callback) + process.nextTick(callback, null, err - 1); + return; + } + + if (err && callback) { + // Don't emit as error, dgram_legacy.js compatibility + const ex = new ExceptionWithHostPort(err, 'send', address, port); + process.nextTick(callback, ex); + } +} + +function afterSend(err, sent) { + if (err) { + err = new ExceptionWithHostPort(err, 'send', this.address, this.port); + } else { + err = null; + } + + this.callback(err, sent); +} + +Socket.prototype.close = function(callback) { + const state = this[kStateSymbol]; + const queue = state.queue; + + if (typeof callback === 'function') + this.on('close', callback); + + if (queue !== undefined) { + ArrayPrototypePush(queue, FunctionPrototypeBind(this.close, this)); + return this; + } + + healthCheck(this); + stopReceiving(this); + state.handle.close(); + state.handle = null; + defaultTriggerAsyncIdScope(this[async_id_symbol], + process.nextTick, + socketCloseNT, + this); + + return this; +}; + +Socket.prototype[SymbolAsyncDispose] = async function() { + if (!this[kStateSymbol].handle) { + return; + } + await FunctionPrototypeCall(promisify(this.close), this); +}; + + +function socketCloseNT(self) { + self.emit('close'); +} + + +Socket.prototype.address = function() { + healthCheck(this); + + const out = {}; + const err = this[kStateSymbol].handle.getsockname(out); + if (err) { + throw new ErrnoException(err, 'getsockname'); + } + + return out; +}; + +Socket.prototype.remoteAddress = function() { + healthCheck(this); + + const state = this[kStateSymbol]; + if (state.connectState !== CONNECT_STATE_CONNECTED) + throw new ERR_SOCKET_DGRAM_NOT_CONNECTED(); + + const out = {}; + const err = state.handle.getpeername(out); + if (err) + throw new ErrnoException(err, 'getpeername'); + + return out; +}; + + +Socket.prototype.setBroadcast = function(arg) { + const err = this[kStateSymbol].handle.setBroadcast(arg ? 1 : 0); + if (err) { + throw new ErrnoException(err, 'setBroadcast'); + } +}; + + +Socket.prototype.setTTL = function(ttl) { + validateNumber(ttl, 'ttl'); + + const err = this[kStateSymbol].handle.setTTL(ttl); + if (err) { + throw new ErrnoException(err, 'setTTL'); + } + + return ttl; +}; + + +Socket.prototype.setMulticastTTL = function(ttl) { + validateNumber(ttl, 'ttl'); + + const err = this[kStateSymbol].handle.setMulticastTTL(ttl); + if (err) { + throw new ErrnoException(err, 'setMulticastTTL'); + } + + return ttl; +}; + + +Socket.prototype.setMulticastLoopback = function(arg) { + const err = this[kStateSymbol].handle.setMulticastLoopback(arg ? 1 : 0); + if (err) { + throw new ErrnoException(err, 'setMulticastLoopback'); + } + + return arg; // 0.4 compatibility +}; + + +Socket.prototype.setMulticastInterface = function(interfaceAddress) { + healthCheck(this); + validateString(interfaceAddress, 'interfaceAddress'); + + const err = this[kStateSymbol].handle.setMulticastInterface(interfaceAddress); + if (err) { + throw new ErrnoException(err, 'setMulticastInterface'); + } +}; + +Socket.prototype.addMembership = function(multicastAddress, + interfaceAddress) { + healthCheck(this); + + if (!multicastAddress) { + throw new ERR_MISSING_ARGS('multicastAddress'); + } + + const { handle } = this[kStateSymbol]; + const err = handle.addMembership(multicastAddress, interfaceAddress); + if (err) { + throw new ErrnoException(err, 'addMembership'); + } +}; + + +Socket.prototype.dropMembership = function(multicastAddress, + interfaceAddress) { + healthCheck(this); + + if (!multicastAddress) { + throw new ERR_MISSING_ARGS('multicastAddress'); + } + + const { handle } = this[kStateSymbol]; + const err = handle.dropMembership(multicastAddress, interfaceAddress); + if (err) { + throw new ErrnoException(err, 'dropMembership'); + } +}; + +Socket.prototype.addSourceSpecificMembership = function(sourceAddress, + groupAddress, + interfaceAddress) { + healthCheck(this); + + validateString(sourceAddress, 'sourceAddress'); + validateString(groupAddress, 'groupAddress'); + + const err = + this[kStateSymbol].handle.addSourceSpecificMembership(sourceAddress, + groupAddress, + interfaceAddress); + if (err) { + throw new ErrnoException(err, 'addSourceSpecificMembership'); + } +}; + + +Socket.prototype.dropSourceSpecificMembership = function(sourceAddress, + groupAddress, + interfaceAddress) { + healthCheck(this); + + validateString(sourceAddress, 'sourceAddress'); + validateString(groupAddress, 'groupAddress'); + + const err = + this[kStateSymbol].handle.dropSourceSpecificMembership(sourceAddress, + groupAddress, + interfaceAddress); + if (err) { + throw new ErrnoException(err, 'dropSourceSpecificMembership'); + } +}; + + +function healthCheck(socket) { + if (!socket[kStateSymbol].handle) { + // Error message from dgram_legacy.js. + throw new ERR_SOCKET_DGRAM_NOT_RUNNING(); + } +} + + +function stopReceiving(socket) { + const state = socket[kStateSymbol]; + + if (!state.receiving) + return; + + state.handle.recvStop(); + state.receiving = false; +} + + +function onMessage(nread, handle, buf, rinfo) { + const self = handle[owner_symbol]; + if (nread < 0) { + return self.emit('error', new ErrnoException(nread, 'recvmsg')); + } + if (self[kStateSymbol]?.receiveBlockList?.check(rinfo.address, + rinfo.family?.toLocaleLowerCase())) { + return; + } + rinfo.size = buf.length; // compatibility + self.emit('message', buf, rinfo); +} + + +function onError(nread, handle, error) { + const self = handle[owner_symbol]; + return self.emit('error', error); +} + + +Socket.prototype.ref = function() { + const handle = this[kStateSymbol].handle; + + if (handle) + handle.ref(); + + return this; +}; + + +Socket.prototype.unref = function() { + const handle = this[kStateSymbol].handle; + + if (handle) + handle.unref(); + + return this; +}; + + +Socket.prototype.setRecvBufferSize = function(size) { + bufferSize(this, size, RECV_BUFFER); +}; + + +Socket.prototype.setSendBufferSize = function(size) { + bufferSize(this, size, SEND_BUFFER); +}; + + +Socket.prototype.getRecvBufferSize = function() { + return bufferSize(this, 0, RECV_BUFFER); +}; + + +Socket.prototype.getSendBufferSize = function() { + return bufferSize(this, 0, SEND_BUFFER); +}; + +Socket.prototype.getSendQueueSize = function() { + return this[kStateSymbol].handle.getSendQueueSize(); +}; + +Socket.prototype.getSendQueueCount = function() { + return this[kStateSymbol].handle.getSendQueueCount(); +}; + +// Deprecated private APIs. +ObjectDefineProperty(Socket.prototype, '_handle', { + __proto__: null, + get: deprecate(function() { + return this[kStateSymbol].handle; + }, 'Socket.prototype._handle is deprecated', 'DEP0112'), + set: deprecate(function(val) { + this[kStateSymbol].handle = val; + }, 'Socket.prototype._handle is deprecated', 'DEP0112'), +}); + + +ObjectDefineProperty(Socket.prototype, '_receiving', { + __proto__: null, + get: deprecate(function() { + return this[kStateSymbol].receiving; + }, 'Socket.prototype._receiving is deprecated', 'DEP0112'), + set: deprecate(function(val) { + this[kStateSymbol].receiving = val; + }, 'Socket.prototype._receiving is deprecated', 'DEP0112'), +}); + + +ObjectDefineProperty(Socket.prototype, '_bindState', { + __proto__: null, + get: deprecate(function() { + return this[kStateSymbol].bindState; + }, 'Socket.prototype._bindState is deprecated', 'DEP0112'), + set: deprecate(function(val) { + this[kStateSymbol].bindState = val; + }, 'Socket.prototype._bindState is deprecated', 'DEP0112'), +}); + + +ObjectDefineProperty(Socket.prototype, '_queue', { + __proto__: null, + get: deprecate(function() { + return this[kStateSymbol].queue; + }, 'Socket.prototype._queue is deprecated', 'DEP0112'), + set: deprecate(function(val) { + this[kStateSymbol].queue = val; + }, 'Socket.prototype._queue is deprecated', 'DEP0112'), +}); + + +ObjectDefineProperty(Socket.prototype, '_reuseAddr', { + __proto__: null, + get: deprecate(function() { + return this[kStateSymbol].reuseAddr; + }, 'Socket.prototype._reuseAddr is deprecated', 'DEP0112'), + set: deprecate(function(val) { + this[kStateSymbol].reuseAddr = val; + }, 'Socket.prototype._reuseAddr is deprecated', 'DEP0112'), +}); + + +Socket.prototype._healthCheck = deprecate(function() { + healthCheck(this); +}, 'Socket.prototype._healthCheck() is deprecated', 'DEP0112'); + + +Socket.prototype._stopReceiving = deprecate(function() { + stopReceiving(this); +}, 'Socket.prototype._stopReceiving() is deprecated', 'DEP0112'); + + +// Legacy alias on the C++ wrapper object. This is not public API, so we may +// want to runtime-deprecate it at some point. There's no hurry, though. +ObjectDefineProperty(UDP.prototype, 'owner', { + __proto__: null, + get() { return this[owner_symbol]; }, + set(v) { return this[owner_symbol] = v; }, +}); + + +module.exports = { + _createSocketHandle: deprecate( + _createSocketHandle, + 'dgram._createSocketHandle() is deprecated', + 'DEP0112', + ), + createSocket, + Socket, +}; \ No newline at end of file diff --git a/node/diagnostics_channel.js b/node/diagnostics_channel.js new file mode 100644 index 00000000..1422b0db --- /dev/null +++ b/node/diagnostics_channel.js @@ -0,0 +1,439 @@ +"use strict"; + +const { + ArrayPrototypeAt, + ArrayPrototypeIndexOf, + ArrayPrototypePush, + ArrayPrototypePushApply, + ArrayPrototypeSlice, + ArrayPrototypeSplice, + ObjectDefineProperty, + ObjectGetPrototypeOf, + ObjectSetPrototypeOf, + Promise, + PromisePrototypeThen, + PromiseReject, + PromiseResolve, + ReflectApply, + SafeFinalizationRegistry, + SafeMap, + SymbolHasInstance, +} = primordials; + +const { + codes: { ERR_INVALID_ARG_TYPE }, +} = require("internal/errors"); +const { validateFunction } = require("internal/validators"); + +const { triggerUncaughtException } = internalBinding("errors"); + +const { WeakReference } = require("internal/util"); + +// Can't delete when weakref count reaches 0 as it could increment again. +// Only GC can be used as a valid time to clean up the channels map. +class WeakRefMap extends SafeMap { + #finalizers = new SafeFinalizationRegistry((key) => { + // Check that the key doesn't have any value before deleting, as the WeakRef for the key + // may have been replaced since finalization callbacks aren't synchronous with GC. + if (!this.has(key)) this.delete(key); + }); + + set(key, value) { + this.#finalizers.register(value, key); + return super.set(key, new WeakReference(value)); + } + + get(key) { + return super.get(key)?.get(); + } + + has(key) { + return !!this.get(key); + } + + incRef(key) { + return super.get(key)?.incRef(); + } + + decRef(key) { + return super.get(key)?.decRef(); + } +} + +function markActive(channel) { + // eslint-disable-next-line no-use-before-define + ObjectSetPrototypeOf(channel, ActiveChannel.prototype); + channel._subscribers = []; + channel._stores = new SafeMap(); +} + +function maybeMarkInactive(channel) { + // When there are no more active subscribers or bound, restore to fast prototype. + if (!channel._subscribers.length && !channel._stores.size) { + // eslint-disable-next-line no-use-before-define + ObjectSetPrototypeOf(channel, Channel.prototype); + channel._subscribers = undefined; + channel._stores = undefined; + } +} + +function defaultTransform(data) { + return data; +} + +function wrapStoreRun(store, data, next, transform = defaultTransform) { + return () => { + let context; + try { + context = transform(data); + } catch (err) { + process.nextTick(() => { + triggerUncaughtException(err, false); + }); + return next(); + } + + return store.run(context, next); + }; +} + +// TODO(qard): should there be a C++ channel interface? +class ActiveChannel { + subscribe(subscription) { + validateFunction(subscription, "subscription"); + this._subscribers = ArrayPrototypeSlice(this._subscribers); + ArrayPrototypePush(this._subscribers, subscription); + channels.incRef(this.name); + } + + unsubscribe(subscription) { + const index = ArrayPrototypeIndexOf(this._subscribers, subscription); + if (index === -1) return false; + + const before = ArrayPrototypeSlice(this._subscribers, 0, index); + const after = ArrayPrototypeSlice(this._subscribers, index + 1); + this._subscribers = before; + ArrayPrototypePushApply(this._subscribers, after); + + channels.decRef(this.name); + maybeMarkInactive(this); + + return true; + } + + bindStore(store, transform) { + const replacing = this._stores.has(store); + if (!replacing) channels.incRef(this.name); + this._stores.set(store, transform); + } + + unbindStore(store) { + if (!this._stores.has(store)) { + return false; + } + + this._stores.delete(store); + + channels.decRef(this.name); + maybeMarkInactive(this); + + return true; + } + + get hasSubscribers() { + return true; + } + + publish(data) { + const subscribers = this._subscribers; + for (let i = 0; i < (subscribers?.length || 0); i++) { + try { + const onMessage = subscribers[i]; + onMessage(data, this.name); + } catch (err) { + process.nextTick(() => { + triggerUncaughtException(err, false); + }); + } + } + } + + runStores(data, fn, thisArg, ...args) { + let run = () => { + this.publish(data); + return ReflectApply(fn, thisArg, args); + }; + + for (const entry of this._stores.entries()) { + const store = entry[0]; + const transform = entry[1]; + run = wrapStoreRun(store, data, run, transform); + } + + return run(); + } +} + +class Channel { + constructor(name) { + this._subscribers = undefined; + this._stores = undefined; + this.name = name; + + channels.set(name, this); + } + + static [SymbolHasInstance](instance) { + const prototype = ObjectGetPrototypeOf(instance); + return ( + prototype === Channel.prototype || prototype === ActiveChannel.prototype + ); + } + + subscribe(subscription) { + markActive(this); + this.subscribe(subscription); + } + + unsubscribe() { + return false; + } + + bindStore(store, transform) { + markActive(this); + this.bindStore(store, transform); + } + + unbindStore() { + return false; + } + + get hasSubscribers() { + return false; + } + + publish() {} + + runStores(data, fn, thisArg, ...args) { + return ReflectApply(fn, thisArg, args); + } +} + +const channels = new WeakRefMap(); + +function channel(name) { + const channel = channels.get(name); + if (channel) return channel; + + if (typeof name !== "string" && typeof name !== "symbol") { + throw new ERR_INVALID_ARG_TYPE("channel", ["string", "symbol"], name); + } + + return new Channel(name); +} + +function subscribe(name, subscription) { + return channel(name).subscribe(subscription); +} + +function unsubscribe(name, subscription) { + return channel(name).unsubscribe(subscription); +} + +function hasSubscribers(name) { + const channel = channels.get(name); + if (!channel) return false; + + return channel.hasSubscribers; +} + +const traceEvents = ["start", "end", "asyncStart", "asyncEnd", "error"]; + +function assertChannel(value, name) { + if (!(value instanceof Channel)) { + throw new ERR_INVALID_ARG_TYPE(name, ["Channel"], value); + } +} + +function tracingChannelFrom(nameOrChannels, name) { + if (typeof nameOrChannels === "string") { + return channel(`tracing:${nameOrChannels}:${name}`); + } + + if (typeof nameOrChannels === "object" && nameOrChannels !== null) { + const channel = nameOrChannels[name]; + assertChannel(channel, `nameOrChannels.${name}`); + return channel; + } + + throw new ERR_INVALID_ARG_TYPE( + "nameOrChannels", + ["string", "object", "TracingChannel"], + nameOrChannels + ); +} + +class TracingChannel { + constructor(nameOrChannels) { + for (let i = 0; i < traceEvents.length; ++i) { + const eventName = traceEvents[i]; + ObjectDefineProperty(this, eventName, { + __proto__: null, + value: tracingChannelFrom(nameOrChannels, eventName), + }); + } + } + + get hasSubscribers() { + return ( + this.start?.hasSubscribers || + this.end?.hasSubscribers || + this.asyncStart?.hasSubscribers || + this.asyncEnd?.hasSubscribers || + this.error?.hasSubscribers + ); + } + + subscribe(handlers) { + for (let i = 0; i < traceEvents.length; ++i) { + const name = traceEvents[i]; + if (!handlers[name]) continue; + + this[name]?.subscribe(handlers[name]); + } + } + + unsubscribe(handlers) { + let done = true; + + for (let i = 0; i < traceEvents.length; ++i) { + const name = traceEvents[i]; + if (!handlers[name]) continue; + + if (!this[name]?.unsubscribe(handlers[name])) { + done = false; + } + } + + return done; + } + + traceSync(fn, context = {}, thisArg, ...args) { + if (!this.hasSubscribers) { + return ReflectApply(fn, thisArg, args); + } + + const { start, end, error } = this; + + return start.runStores(context, () => { + try { + const result = ReflectApply(fn, thisArg, args); + context.result = result; + return result; + } catch (err) { + context.error = err; + error.publish(context); + throw err; + } finally { + end.publish(context); + } + }); + } + + tracePromise(fn, context = {}, thisArg, ...args) { + if (!this.hasSubscribers) { + return ReflectApply(fn, thisArg, args); + } + + const { start, end, asyncStart, asyncEnd, error } = this; + + function reject(err) { + context.error = err; + error.publish(context); + asyncStart.publish(context); + // TODO: Is there a way to have asyncEnd _after_ the continuation? + asyncEnd.publish(context); + return PromiseReject(err); + } + + function resolve(result) { + context.result = result; + asyncStart.publish(context); + // TODO: Is there a way to have asyncEnd _after_ the continuation? + asyncEnd.publish(context); + return result; + } + + return start.runStores(context, () => { + try { + let promise = ReflectApply(fn, thisArg, args); + // Convert thenables to native promises + if (!(promise instanceof Promise)) { + promise = PromiseResolve(promise); + } + return PromisePrototypeThen(promise, resolve, reject); + } catch (err) { + context.error = err; + error.publish(context); + throw err; + } finally { + end.publish(context); + } + }); + } + + traceCallback(fn, position = -1, context = {}, thisArg, ...args) { + if (!this.hasSubscribers) { + return ReflectApply(fn, thisArg, args); + } + + const { start, end, asyncStart, asyncEnd, error } = this; + + function wrappedCallback(err, res) { + if (err) { + context.error = err; + error.publish(context); + } else { + context.result = res; + } + + // Using runStores here enables manual context failure recovery + asyncStart.runStores(context, () => { + try { + return ReflectApply(callback, this, arguments); + } finally { + asyncEnd.publish(context); + } + }); + } + + const callback = ArrayPrototypeAt(args, position); + validateFunction(callback, "callback"); + ArrayPrototypeSplice(args, position, 1, wrappedCallback); + + return start.runStores(context, () => { + try { + return ReflectApply(fn, thisArg, args); + } catch (err) { + context.error = err; + error.publish(context); + throw err; + } finally { + end.publish(context); + } + }); + } +} + +function tracingChannel(nameOrChannels) { + return new TracingChannel(nameOrChannels); +} + +module.exports = { + channel, + hasSubscribers, + subscribe, + tracingChannel, + unsubscribe, + Channel, +}; diff --git a/node/dns.js b/node/dns.js new file mode 100644 index 00000000..22d23dea --- /dev/null +++ b/node/dns.js @@ -0,0 +1,345 @@ +"use strict"; + +const { ObjectDefineProperties, ObjectDefineProperty, Symbol } = primordials; + +const cares = internalBinding("cares_wrap"); +const { isIP } = require("internal/net"); +const { customPromisifyArgs } = require("internal/util"); +const { + DNSException, + codes: { ERR_INVALID_ARG_TYPE, ERR_INVALID_ARG_VALUE, ERR_MISSING_ARGS }, +} = require("internal/errors"); +const { + bindDefaultResolver, + setDefaultResolver, + validateHints, + getDefaultResultOrder, + setDefaultResultOrder, + errorCodes: dnsErrorCodes, + validDnsOrders, + validFamilies, +} = require("internal/dns/utils"); +const { Resolver } = require("internal/dns/callback_resolver"); +const { + NODATA, + FORMERR, + SERVFAIL, + NOTFOUND, + NOTIMP, + REFUSED, + BADQUERY, + BADNAME, + BADFAMILY, + BADRESP, + CONNREFUSED, + TIMEOUT, + EOF, + FILE, + NOMEM, + DESTRUCTION, + BADSTR, + BADFLAGS, + NONAME, + BADHINTS, + NOTINITIALIZED, + LOADIPHLPAPI, + ADDRGETNETWORKPARAMS, + CANCELLED, +} = dnsErrorCodes; +const { + validateBoolean, + validateFunction, + validateNumber, + validateOneOf, + validatePort, + validateString, +} = require("internal/validators"); + +const { + GetAddrInfoReqWrap, + GetNameInfoReqWrap, + DNS_ORDER_VERBATIM, + DNS_ORDER_IPV4_FIRST, + DNS_ORDER_IPV6_FIRST, +} = cares; + +const kPerfHooksDnsLookupContext = Symbol("kPerfHooksDnsLookupContext"); +const kPerfHooksDnsLookupServiceContext = Symbol( + "kPerfHooksDnsLookupServiceContext" +); + +const { hasObserver, startPerf, stopPerf } = require("internal/perf/observe"); + +let promises = null; // Lazy loaded + +function onlookup(err, addresses) { + if (err) { + return this.callback(new DNSException(err, "getaddrinfo", this.hostname)); + } + this.callback(null, addresses[0], this.family || isIP(addresses[0])); + if (this[kPerfHooksDnsLookupContext] && hasObserver("dns")) { + stopPerf(this, kPerfHooksDnsLookupContext, { detail: { addresses } }); + } +} + +function onlookupall(err, addresses) { + if (err) { + return this.callback(new DNSException(err, "getaddrinfo", this.hostname)); + } + + const family = this.family; + for (let i = 0; i < addresses.length; i++) { + const addr = addresses[i]; + addresses[i] = { + address: addr, + family: family || isIP(addr), + }; + } + + this.callback(null, addresses); + if (this[kPerfHooksDnsLookupContext] && hasObserver("dns")) { + stopPerf(this, kPerfHooksDnsLookupContext, { detail: { addresses } }); + } +} + +// Easy DNS A/AAAA look up +// lookup(hostname, [options,] callback) +function lookup(hostname, options, callback) { + let hints = 0; + let family = 0; + let all = false; + let dnsOrder = getDefaultResultOrder(); + + // Parse arguments + if (hostname) { + validateString(hostname, "hostname"); + } + + if (typeof options === "function") { + callback = options; + family = 0; + } else if (typeof options === "number") { + validateFunction(callback, "callback"); + + validateOneOf(options, "family", validFamilies); + family = options; + } else if (options !== undefined && typeof options !== "object") { + validateFunction(arguments.length === 2 ? options : callback, "callback"); + throw new ERR_INVALID_ARG_TYPE("options", ["integer", "object"], options); + } else { + validateFunction(callback, "callback"); + + if (options?.hints != null) { + validateNumber(options.hints, "options.hints"); + hints = options.hints >>> 0; + validateHints(hints); + } + if (options?.family != null) { + switch (options.family) { + case "IPv4": + family = 4; + break; + case "IPv6": + family = 6; + break; + default: + validateOneOf(options.family, "options.family", validFamilies); + family = options.family; + break; + } + } + if (options?.all != null) { + validateBoolean(options.all, "options.all"); + all = options.all; + } + if (options?.verbatim != null) { + validateBoolean(options.verbatim, "options.verbatim"); + dnsOrder = options.verbatim ? "verbatim" : "ipv4first"; + } + if (options?.order != null) { + validateOneOf(options.order, "options.order", validDnsOrders); + dnsOrder = options.order; + } + } + + if (!hostname) { + throw new ERR_INVALID_ARG_VALUE( + "hostname", + hostname, + "must be a non-empty string" + ); + } + + const matchedFamily = isIP(hostname); + if (matchedFamily) { + if (all) { + process.nextTick(callback, null, [ + { address: hostname, family: matchedFamily }, + ]); + } else { + process.nextTick(callback, null, hostname, matchedFamily); + } + return {}; + } + + const req = new GetAddrInfoReqWrap(); + req.callback = callback; + req.family = family; + req.hostname = hostname; + req.oncomplete = all ? onlookupall : onlookup; + + let order = DNS_ORDER_VERBATIM; + + if (dnsOrder === "ipv4first") { + order = DNS_ORDER_IPV4_FIRST; + } else if (dnsOrder === "ipv6first") { + order = DNS_ORDER_IPV6_FIRST; + } + + const err = cares.getaddrinfo(req, hostname, family, hints, order); + if (err) { + process.nextTick(callback, new DNSException(err, "getaddrinfo", hostname)); + return {}; + } + if (hasObserver("dns")) { + const detail = { + hostname, + family, + hints, + verbatim: order === DNS_ORDER_VERBATIM, + order: dnsOrder, + }; + + startPerf(req, kPerfHooksDnsLookupContext, { + type: "dns", + name: "lookup", + detail, + }); + } + return req; +} + +ObjectDefineProperty(lookup, customPromisifyArgs, { + __proto__: null, + value: ["address", "family"], + enumerable: false, +}); + +function onlookupservice(err, hostname, service) { + if (err) + return this.callback(new DNSException(err, "getnameinfo", this.hostname)); + + this.callback(null, hostname, service); + if (this[kPerfHooksDnsLookupServiceContext] && hasObserver("dns")) { + stopPerf(this, kPerfHooksDnsLookupServiceContext, { + detail: { hostname, service }, + }); + } +} + +function lookupService(address, port, callback) { + if (arguments.length !== 3) + throw new ERR_MISSING_ARGS("address", "port", "callback"); + + if (isIP(address) === 0) throw new ERR_INVALID_ARG_VALUE("address", address); + + validatePort(port); + + validateFunction(callback, "callback"); + + port = +port; + + const req = new GetNameInfoReqWrap(); + req.callback = callback; + req.hostname = address; + req.port = port; + req.oncomplete = onlookupservice; + + const err = cares.getnameinfo(req, address, port); + if (err) throw new DNSException(err, "getnameinfo", address); + if (hasObserver("dns")) { + startPerf(req, kPerfHooksDnsLookupServiceContext, { + type: "dns", + name: "lookupService", + detail: { + host: address, + port, + }, + }); + } + return req; +} + +ObjectDefineProperty(lookupService, customPromisifyArgs, { + __proto__: null, + value: ["hostname", "service"], + enumerable: false, +}); + +function defaultResolverSetServers(servers) { + const resolver = new Resolver(); + + resolver.setServers(servers); + setDefaultResolver(resolver); + bindDefaultResolver(module.exports, Resolver.prototype); + + if (promises !== null) + bindDefaultResolver(promises, promises.Resolver.prototype); +} + +module.exports = { + lookup, + lookupService, + + Resolver, + getDefaultResultOrder, + setDefaultResultOrder, + setServers: defaultResolverSetServers, + + // uv_getaddrinfo flags + ADDRCONFIG: cares.AI_ADDRCONFIG, + ALL: cares.AI_ALL, + V4MAPPED: cares.AI_V4MAPPED, + + // ERROR CODES + NODATA, + FORMERR, + SERVFAIL, + NOTFOUND, + NOTIMP, + REFUSED, + BADQUERY, + BADNAME, + BADFAMILY, + BADRESP, + CONNREFUSED, + TIMEOUT, + EOF, + FILE, + NOMEM, + DESTRUCTION, + BADSTR, + BADFLAGS, + NONAME, + BADHINTS, + NOTINITIALIZED, + LOADIPHLPAPI, + ADDRGETNETWORKPARAMS, + CANCELLED, +}; + +bindDefaultResolver(module.exports, Resolver.prototype); + +ObjectDefineProperties(module.exports, { + promises: { + __proto__: null, + configurable: true, + enumerable: true, + get() { + if (promises === null) { + promises = require("internal/dns/promises"); + } + return promises; + }, + }, +}); diff --git a/node/domain.js b/node/domain.js new file mode 100644 index 00000000..29aefdb9 --- /dev/null +++ b/node/domain.js @@ -0,0 +1,529 @@ +"use strict"; + +// WARNING: THIS MODULE IS PENDING DEPRECATION. +// +// No new pull requests targeting this module will be accepted +// unless they address existing, critical bugs. + +const { + ArrayPrototypeEvery, + ArrayPrototypeIndexOf, + ArrayPrototypeLastIndexOf, + ArrayPrototypePush, + ArrayPrototypeSlice, + ArrayPrototypeSplice, + Error, + FunctionPrototypeCall, + ObjectDefineProperty, + Promise, + ReflectApply, + SafeMap, + SafeWeakMap, + StringPrototypeRepeat, + Symbol, +} = primordials; + +const EventEmitter = require("events"); +const { + ERR_DOMAIN_CALLBACK_NOT_AVAILABLE, + ERR_DOMAIN_CANNOT_SET_UNCAUGHT_EXCEPTION_CAPTURE, + ERR_UNHANDLED_ERROR, +} = require("internal/errors").codes; +const { createHook } = require("async_hooks"); +const { useDomainTrampoline } = require("internal/async_hooks"); + +const kWeak = Symbol("kWeak"); +const { WeakReference } = require("internal/util"); + +// Overwrite process.domain with a getter/setter that will allow for more +// effective optimizations +const _domain = [null]; +ObjectDefineProperty(process, "domain", { + __proto__: null, + enumerable: true, + get: function () { + return _domain[0]; + }, + set: function (arg) { + return (_domain[0] = arg); + }, +}); + +const vmPromises = new SafeWeakMap(); +const pairing = new SafeMap(); +const asyncHook = createHook({ + init(asyncId, type, triggerAsyncId, resource) { + if (process.domain !== null && process.domain !== undefined) { + // If this operation is created while in a domain, let's mark it + pairing.set(asyncId, process.domain[kWeak]); + // Promises from other contexts, such as with the VM module, should not + // have a domain property as it can be used to escape the sandbox. + if (type !== "PROMISE" || resource instanceof Promise) { + ObjectDefineProperty(resource, "domain", { + __proto__: null, + configurable: true, + enumerable: false, + value: process.domain, + writable: true, + }); + // Because promises from other contexts don't get a domain field, + // the domain needs to be held alive another way. Stuffing it in a + // weakmap connected to the promise lifetime can fix that. + } else { + vmPromises.set(resource, process.domain); + } + } + }, + before(asyncId) { + const current = pairing.get(asyncId); + if (current !== undefined) { + // Enter domain for this cb + // We will get the domain through current.get(), because the resource + // object's .domain property makes sure it is not garbage collected. + // However, we do need to make the reference to the domain non-weak, + // so that it cannot be garbage collected before the after() hook. + current.incRef(); + current.get().enter(); + } + }, + after(asyncId) { + const current = pairing.get(asyncId); + if (current !== undefined) { + // Exit domain for this cb + const domain = current.get(); + current.decRef(); + domain.exit(); + } + }, + destroy(asyncId) { + pairing.delete(asyncId); // cleaning up + }, +}); + +// When domains are in use, they claim full ownership of the +// uncaught exception capture callback. +if (process.hasUncaughtExceptionCaptureCallback()) { + throw new ERR_DOMAIN_CALLBACK_NOT_AVAILABLE(); +} + +// Get the stack trace at the point where `domain` was required. +// eslint-disable-next-line no-restricted-syntax +const domainRequireStack = new Error("require(`domain`) at this point").stack; + +const { setUncaughtExceptionCaptureCallback } = process; +process.setUncaughtExceptionCaptureCallback = function (fn) { + const err = new ERR_DOMAIN_CANNOT_SET_UNCAUGHT_EXCEPTION_CAPTURE(); + err.stack += `\n${StringPrototypeRepeat("-", 40)}\n${domainRequireStack}`; + throw err; +}; + +let sendMakeCallbackDeprecation = false; +function emitMakeCallbackDeprecation({ target, method }) { + if (!sendMakeCallbackDeprecation) { + process.emitWarning( + "Using a domain property in MakeCallback is deprecated. Use the " + + "async_context variant of MakeCallback or the AsyncResource class " + + "instead. " + + `(Triggered by calling ${method?.name || ""} ` + + `on ${target?.constructor?.name}.)`, + "DeprecationWarning", + "DEP0097" + ); + sendMakeCallbackDeprecation = true; + } +} + +function topLevelDomainCallback(cb, ...args) { + const domain = this.domain; + if (exports.active && domain) + emitMakeCallbackDeprecation({ target: this, method: cb }); + + if (domain) domain.enter(); + const ret = ReflectApply(cb, this, args); + if (domain) domain.exit(); + + return ret; +} + +// It's possible to enter one domain while already inside +// another one. The stack is each entered domain. +let stack = []; +exports._stack = stack; +useDomainTrampoline(topLevelDomainCallback); + +function updateExceptionCapture() { + if ( + ArrayPrototypeEvery(stack, (domain) => domain.listenerCount("error") === 0) + ) { + setUncaughtExceptionCaptureCallback(null); + } else { + setUncaughtExceptionCaptureCallback(null); + setUncaughtExceptionCaptureCallback((er) => { + return process.domain._errorHandler(er); + }); + } +} + +process.on("newListener", (name, listener) => { + if ( + name === "uncaughtException" && + listener !== domainUncaughtExceptionClear + ) { + // Make sure the first listener for `uncaughtException` always clears + // the domain stack. + process.removeListener(name, domainUncaughtExceptionClear); + process.prependListener(name, domainUncaughtExceptionClear); + } +}); + +process.on("removeListener", (name, listener) => { + if ( + name === "uncaughtException" && + listener !== domainUncaughtExceptionClear + ) { + // If the domain listener would be the only remaining one, remove it. + const listeners = process.listeners("uncaughtException"); + if (listeners.length === 1 && listeners[0] === domainUncaughtExceptionClear) + process.removeListener(name, domainUncaughtExceptionClear); + } +}); + +function domainUncaughtExceptionClear() { + stack.length = 0; + exports.active = process.domain = null; + updateExceptionCapture(); +} + +class Domain extends EventEmitter { + constructor() { + super(); + + this.members = []; + this[kWeak] = new WeakReference(this); + asyncHook.enable(); + + this.on("removeListener", updateExceptionCapture); + this.on("newListener", updateExceptionCapture); + } +} + +exports.Domain = Domain; + +exports.create = exports.createDomain = function createDomain() { + return new Domain(); +}; + +// The active domain is always the one that we're currently in. +exports.active = null; +Domain.prototype.members = undefined; + +// Called by process._fatalException in case an error was thrown. +Domain.prototype._errorHandler = function (er) { + let caught = false; + + if ((typeof er === "object" && er !== null) || typeof er === "function") { + ObjectDefineProperty(er, "domain", { + __proto__: null, + configurable: true, + enumerable: false, + value: this, + writable: true, + }); + er.domainThrown = true; + } + // Pop all adjacent duplicates of the currently active domain from the stack. + // This is done to prevent a domain's error handler to run within the context + // of itself, and re-entering itself recursively handler as a result of an + // exception thrown in its context. + while (exports.active === this) { + this.exit(); + } + + // The top-level domain-handler is handled separately. + // + // The reason is that if V8 was passed a command line option + // asking it to abort on an uncaught exception (currently + // "--abort-on-uncaught-exception"), we want an uncaught exception + // in the top-level domain error handler to make the + // process abort. Using try/catch here would always make V8 think + // that these exceptions are caught, and thus would prevent it from + // aborting in these cases. + if (stack.length === 0) { + // If there's no error handler, do not emit an 'error' event + // as this would throw an error, make the process exit, and thus + // prevent the process 'uncaughtException' event from being emitted + // if a listener is set. + if (this.listenerCount("error") > 0) { + // Clear the uncaughtExceptionCaptureCallback so that we know that, since + // the top-level domain is not active anymore, it would be ok to abort on + // an uncaught exception at this point + setUncaughtExceptionCaptureCallback(null); + try { + caught = this.emit("error", er); + } finally { + updateExceptionCapture(); + } + } + } else { + // Wrap this in a try/catch so we don't get infinite throwing + try { + // One of three things will happen here. + // + // 1. There is a handler, caught = true + // 2. There is no handler, caught = false + // 3. It throws, caught = false + // + // If caught is false after this, then there's no need to exit() + // the domain, because we're going to crash the process anyway. + caught = this.emit("error", er); + } catch (er2) { + // The domain error handler threw! oh no! + // See if another domain can catch THIS error, + // or else crash on the original one. + updateExceptionCapture(); + if (stack.length) { + exports.active = process.domain = stack[stack.length - 1]; + caught = process.domain._errorHandler(er2); + } else { + // Pass on to the next exception handler. + throw er2; + } + } + } + + // Exit all domains on the stack. Uncaught exceptions end the + // current tick and no domains should be left on the stack + // between ticks. + domainUncaughtExceptionClear(); + + return caught; +}; + +Domain.prototype.enter = function () { + // Note that this might be a no-op, but we still need + // to push it onto the stack so that we can pop it later. + exports.active = process.domain = this; + ArrayPrototypePush(stack, this); + updateExceptionCapture(); +}; + +Domain.prototype.exit = function () { + // Don't do anything if this domain is not on the stack. + const index = ArrayPrototypeLastIndexOf(stack, this); + if (index === -1) return; + + // Exit all domains until this one. + ArrayPrototypeSplice(stack, index); + + exports.active = stack.length === 0 ? undefined : stack[stack.length - 1]; + process.domain = exports.active; + updateExceptionCapture(); +}; + +// note: this works for timers as well. +Domain.prototype.add = function (ee) { + // If the domain is already added, then nothing left to do. + if (ee.domain === this) return; + + // Has a domain already - remove it first. + if (ee.domain) ee.domain.remove(ee); + + // Check for circular Domain->Domain links. + // They cause big issues. + // + // For example: + // var d = domain.create(); + // var e = domain.create(); + // d.add(e); + // e.add(d); + // e.emit('error', er); // RangeError, stack overflow! + if (this.domain && ee instanceof Domain) { + for (let d = this.domain; d; d = d.domain) { + if (ee === d) return; + } + } + + ObjectDefineProperty(ee, "domain", { + __proto__: null, + configurable: true, + enumerable: false, + value: this, + writable: true, + }); + ArrayPrototypePush(this.members, ee); +}; + +Domain.prototype.remove = function (ee) { + ee.domain = null; + const index = ArrayPrototypeIndexOf(this.members, ee); + if (index !== -1) ArrayPrototypeSplice(this.members, index, 1); +}; + +Domain.prototype.run = function (fn) { + this.enter(); + const ret = ReflectApply(fn, this, ArrayPrototypeSlice(arguments, 1)); + this.exit(); + + return ret; +}; + +function intercepted(_this, self, cb, fnargs) { + if (fnargs[0] && fnargs[0] instanceof Error) { + const er = fnargs[0]; + er.domainBound = cb; + er.domainThrown = false; + ObjectDefineProperty(er, "domain", { + __proto__: null, + configurable: true, + enumerable: false, + value: self, + writable: true, + }); + self.emit("error", er); + return; + } + + self.enter(); + const ret = ReflectApply(cb, _this, ArrayPrototypeSlice(fnargs, 1)); + self.exit(); + + return ret; +} + +Domain.prototype.intercept = function (cb) { + const self = this; + + function runIntercepted() { + return intercepted(this, self, cb, arguments); + } + + return runIntercepted; +}; + +function bound(_this, self, cb, fnargs) { + self.enter(); + const ret = ReflectApply(cb, _this, fnargs); + self.exit(); + + return ret; +} + +Domain.prototype.bind = function (cb) { + const self = this; + + function runBound() { + return bound(this, self, cb, arguments); + } + + ObjectDefineProperty(runBound, "domain", { + __proto__: null, + configurable: true, + enumerable: false, + value: this, + writable: true, + }); + + return runBound; +}; + +// Override EventEmitter methods to make it domain-aware. +EventEmitter.usingDomains = true; + +const eventInit = EventEmitter.init; +EventEmitter.init = function (opts) { + ObjectDefineProperty(this, "domain", { + __proto__: null, + configurable: true, + enumerable: false, + value: null, + writable: true, + }); + if (exports.active && !(this instanceof exports.Domain)) { + this.domain = exports.active; + } + + return FunctionPrototypeCall(eventInit, this, opts); +}; + +const eventEmit = EventEmitter.prototype.emit; +EventEmitter.prototype.emit = function emit(...args) { + const domain = this.domain; + + const type = args[0]; + const shouldEmitError = type === "error" && this.listenerCount(type) > 0; + + // Just call original `emit` if current EE instance has `error` + // handler, there's no active domain or this is process + if ( + shouldEmitError || + domain === null || + domain === undefined || + this === process + ) { + return ReflectApply(eventEmit, this, args); + } + + if (type === "error") { + const er = args.length > 1 && args[1] ? args[1] : new ERR_UNHANDLED_ERROR(); + + if (typeof er === "object") { + er.domainEmitter = this; + ObjectDefineProperty(er, "domain", { + __proto__: null, + configurable: true, + enumerable: false, + value: domain, + writable: true, + }); + er.domainThrown = false; + } + + // Remove the current domain (and its duplicates) from the domains stack and + // set the active domain to its parent (if any) so that the domain's error + // handler doesn't run in its own context. This prevents any event emitter + // created or any exception thrown in that error handler from recursively + // executing that error handler. + const origDomainsStack = ArrayPrototypeSlice(stack); + const origActiveDomain = process.domain; + + // Travel the domains stack from top to bottom to find the first domain + // instance that is not a duplicate of the current active domain. + let idx = stack.length - 1; + while (idx > -1 && process.domain === stack[idx]) { + --idx; + } + + // Change the stack to not contain the current active domain, and only the + // domains above it on the stack. + if (idx < 0) { + stack.length = 0; + } else { + ArrayPrototypeSplice(stack, idx + 1); + } + + // Change the current active domain + if (stack.length > 0) { + exports.active = process.domain = stack[stack.length - 1]; + } else { + exports.active = process.domain = null; + } + + updateExceptionCapture(); + + domain.emit("error", er); + + // Now that the domain's error handler has completed, restore the domains + // stack and the active domain to their original values. + exports._stack = stack = origDomainsStack; + exports.active = process.domain = origActiveDomain; + updateExceptionCapture(); + + return false; + } + + domain.enter(); + const ret = ReflectApply(eventEmit, this, args); + domain.exit(); + + return ret; +}; diff --git a/node/events.js b/node/events.js new file mode 100644 index 00000000..fdb1605e --- /dev/null +++ b/node/events.js @@ -0,0 +1,1244 @@ +"use strict"; + +const { + ArrayPrototypeJoin, + ArrayPrototypePop, + ArrayPrototypePush, + ArrayPrototypeSlice, + ArrayPrototypeSplice, + ArrayPrototypeUnshift, + AsyncIteratorPrototype, + Boolean, + Error, + ErrorCaptureStackTrace, + FunctionPrototypeBind, + NumberMAX_SAFE_INTEGER, + ObjectDefineProperties, + ObjectDefineProperty, + ObjectGetPrototypeOf, + ObjectSetPrototypeOf, + Promise, + PromiseReject, + PromiseResolve, + ReflectApply, + ReflectOwnKeys, + String, + StringPrototypeSplit, + Symbol, + SymbolAsyncIterator, + SymbolDispose, + SymbolFor, +} = primordials; +const kRejection = SymbolFor("nodejs.rejection"); + +const { kEmptyObject, spliceOne } = require("internal/util"); + +const { inspect, identicalSequenceRange } = require("internal/util/inspect"); + +let FixedQueue; +let kFirstEventParam; +let kResistStopPropagation; + +const { + AbortError, + codes: { ERR_INVALID_ARG_TYPE, ERR_UNHANDLED_ERROR }, + genericNodeError, + kEnhanceStackBeforeInspector, +} = require("internal/errors"); + +const { + validateInteger, + validateAbortSignal, + validateBoolean, + validateFunction, + validateNumber, + validateObject, + validateString, +} = require("internal/validators"); +const { addAbortListener } = require("internal/events/abort_listener"); + +const kCapture = Symbol("kCapture"); +const kErrorMonitor = Symbol("events.errorMonitor"); +const kShapeMode = Symbol("shapeMode"); +const kMaxEventTargetListeners = Symbol("events.maxEventTargetListeners"); +const kMaxEventTargetListenersWarned = Symbol( + "events.maxEventTargetListenersWarned" +); +const kWatermarkData = SymbolFor("nodejs.watermarkData"); + +let EventEmitterAsyncResource; +// The EventEmitterAsyncResource has to be initialized lazily because event.js +// is loaded so early in the bootstrap process, before async_hooks is available. +// +// This implementation was adapted straight from addaleax's +// eventemitter-asyncresource MIT-licensed userland module. +// https://github.com/addaleax/eventemitter-asyncresource +function lazyEventEmitterAsyncResource() { + if (EventEmitterAsyncResource === undefined) { + const { AsyncResource } = require("async_hooks"); + + class EventEmitterReferencingAsyncResource extends AsyncResource { + #eventEmitter; + + /** + * @param {EventEmitter} ee + * @param {string} [type] + * @param {{ + * triggerAsyncId?: number, + * requireManualDestroy?: boolean, + * }} [options] + */ + constructor(ee, type, options) { + super(type, options); + this.#eventEmitter = ee; + } + + /** + * @type {EventEmitter} + */ + get eventEmitter() { + return this.#eventEmitter; + } + } + + EventEmitterAsyncResource = class EventEmitterAsyncResource extends ( + EventEmitter + ) { + #asyncResource; + + /** + * @param {{ + * name?: string, + * triggerAsyncId?: number, + * requireManualDestroy?: boolean, + * }} [options] + */ + constructor(options = undefined) { + let name; + if (typeof options === "string") { + name = options; + options = undefined; + } else { + if (new.target === EventEmitterAsyncResource) { + validateString(options?.name, "options.name"); + } + name = options?.name || new.target.name; + } + super(options); + + this.#asyncResource = new EventEmitterReferencingAsyncResource( + this, + name, + options + ); + } + + /** + * @param {symbol|string} event + * @param {any[]} args + * @returns {boolean} + */ + emit(event, ...args) { + const asyncResource = this.#asyncResource; + ArrayPrototypeUnshift(args, super.emit, this, event); + return ReflectApply(asyncResource.runInAsyncScope, asyncResource, args); + } + + /** + * @returns {void} + */ + emitDestroy() { + this.#asyncResource.emitDestroy(); + } + + /** + * @type {number} + */ + get asyncId() { + return this.#asyncResource.asyncId(); + } + + /** + * @type {number} + */ + get triggerAsyncId() { + return this.#asyncResource.triggerAsyncId(); + } + + /** + * @type {EventEmitterReferencingAsyncResource} + */ + get asyncResource() { + return this.#asyncResource; + } + }; + } + return EventEmitterAsyncResource; +} + +/** + * Creates a new `EventEmitter` instance. + * @param {{ captureRejections?: boolean; }} [opts] + * @constructs EventEmitter + */ +function EventEmitter(opts) { + EventEmitter.init.call(this, opts); +} +module.exports = EventEmitter; +module.exports.addAbortListener = addAbortListener; +module.exports.once = once; +module.exports.on = on; +module.exports.getEventListeners = getEventListeners; +module.exports.getMaxListeners = getMaxListeners; +module.exports.listenerCount = listenerCount; +// Backwards-compat with node 0.10.x +EventEmitter.EventEmitter = EventEmitter; + +EventEmitter.usingDomains = false; + +EventEmitter.captureRejectionSymbol = kRejection; +ObjectDefineProperty(EventEmitter, "captureRejections", { + __proto__: null, + get() { + return EventEmitter.prototype[kCapture]; + }, + set(value) { + validateBoolean(value, "EventEmitter.captureRejections"); + + EventEmitter.prototype[kCapture] = value; + }, + enumerable: true, +}); + +ObjectDefineProperty(EventEmitter, "EventEmitterAsyncResource", { + __proto__: null, + enumerable: true, + get: lazyEventEmitterAsyncResource, + set: undefined, + configurable: true, +}); + +EventEmitter.errorMonitor = kErrorMonitor; + +// The default for captureRejections is false +ObjectDefineProperty(EventEmitter.prototype, kCapture, { + __proto__: null, + value: false, + writable: true, + enumerable: false, +}); + +EventEmitter.prototype._events = undefined; +EventEmitter.prototype._eventsCount = 0; +EventEmitter.prototype._maxListeners = undefined; + +// By default EventEmitters will print a warning if more than 10 listeners are +// added to it. This is a useful default which helps finding memory leaks. +let defaultMaxListeners = 10; +let isEventTarget; + +function checkListener(listener) { + validateFunction(listener, "listener"); +} + +ObjectDefineProperty(EventEmitter, "defaultMaxListeners", { + __proto__: null, + enumerable: true, + get: function () { + return defaultMaxListeners; + }, + set: function (arg) { + validateNumber(arg, "defaultMaxListeners", 0); + defaultMaxListeners = arg; + }, +}); + +ObjectDefineProperties(EventEmitter, { + kMaxEventTargetListeners: { + __proto__: null, + value: kMaxEventTargetListeners, + enumerable: false, + configurable: false, + writable: false, + }, + kMaxEventTargetListenersWarned: { + __proto__: null, + value: kMaxEventTargetListenersWarned, + enumerable: false, + configurable: false, + writable: false, + }, +}); + +/** + * Sets the max listeners. + * @param {number} n + * @param {EventTarget[] | EventEmitter[]} [eventTargets] + * @returns {void} + */ +EventEmitter.setMaxListeners = function ( + n = defaultMaxListeners, + ...eventTargets +) { + validateNumber(n, "setMaxListeners", 0); + if (eventTargets.length === 0) { + defaultMaxListeners = n; + } else { + if (isEventTarget === undefined) + isEventTarget = require("internal/event_target").isEventTarget; + + for (let i = 0; i < eventTargets.length; i++) { + const target = eventTargets[i]; + if (isEventTarget(target)) { + target[kMaxEventTargetListeners] = n; + target[kMaxEventTargetListenersWarned] = false; + } else if (typeof target.setMaxListeners === "function") { + target.setMaxListeners(n); + } else { + throw new ERR_INVALID_ARG_TYPE( + "eventTargets", + ["EventEmitter", "EventTarget"], + target + ); + } + } + } +}; + +// If you're updating this function definition, please also update any +// re-definitions, such as the one in the Domain module (lib/domain.js). +EventEmitter.init = function (opts) { + if ( + this._events === undefined || + this._events === ObjectGetPrototypeOf(this)._events + ) { + this._events = { __proto__: null }; + this._eventsCount = 0; + this[kShapeMode] = false; + } else { + this[kShapeMode] = true; + } + + this._maxListeners ||= undefined; + + if (opts?.captureRejections) { + validateBoolean(opts.captureRejections, "options.captureRejections"); + this[kCapture] = Boolean(opts.captureRejections); + } else { + // Assigning the kCapture property directly saves an expensive + // prototype lookup in a very sensitive hot path. + this[kCapture] = EventEmitter.prototype[kCapture]; + } +}; + +function addCatch(that, promise, type, args) { + if (!that[kCapture]) { + return; + } + + // Handle Promises/A+ spec, then could be a getter + // that throws on second use. + try { + const then = promise.then; + + if (typeof then === "function") { + then.call(promise, undefined, function (err) { + // The callback is called with nextTick to avoid a follow-up + // rejection from this promise. + process.nextTick(emitUnhandledRejectionOrErr, that, err, type, args); + }); + } + } catch (err) { + that.emit("error", err); + } +} + +function emitUnhandledRejectionOrErr(ee, err, type, args) { + if (typeof ee[kRejection] === "function") { + ee[kRejection](err, type, ...args); + } else { + // We have to disable the capture rejections mechanism, otherwise + // we might end up in an infinite loop. + const prev = ee[kCapture]; + + // If the error handler throws, it is not catchable and it + // will end up in 'uncaughtException'. We restore the previous + // value of kCapture in case the uncaughtException is present + // and the exception is handled. + try { + ee[kCapture] = false; + ee.emit("error", err); + } finally { + ee[kCapture] = prev; + } + } +} + +/** + * Increases the max listeners of the event emitter. + * @param {number} n + * @returns {EventEmitter} + */ +EventEmitter.prototype.setMaxListeners = function setMaxListeners(n) { + validateNumber(n, "setMaxListeners", 0); + this._maxListeners = n; + return this; +}; + +function _getMaxListeners(that) { + if (that._maxListeners === undefined) return EventEmitter.defaultMaxListeners; + return that._maxListeners; +} + +/** + * Returns the current max listener value for the event emitter. + * @returns {number} + */ +EventEmitter.prototype.getMaxListeners = function getMaxListeners() { + return _getMaxListeners(this); +}; + +function enhanceStackTrace(err, own) { + let ctorInfo = ""; + try { + const { name } = this.constructor; + if (name !== "EventEmitter") ctorInfo = ` on ${name} instance`; + } catch { + // Continue regardless of error. + } + const sep = `\nEmitted 'error' event${ctorInfo} at:\n`; + + const errStack = ArrayPrototypeSlice( + StringPrototypeSplit(err.stack, "\n"), + 1 + ); + const ownStack = ArrayPrototypeSlice( + StringPrototypeSplit(own.stack, "\n"), + 1 + ); + + const { len, offset } = identicalSequenceRange(ownStack, errStack); + if (len > 0) { + ArrayPrototypeSplice( + ownStack, + offset + 1, + len - 2, + " [... lines matching original stack trace ...]" + ); + } + + return err.stack + sep + ArrayPrototypeJoin(ownStack, "\n"); +} + +/** + * Synchronously calls each of the listeners registered + * for the event. + * @param {string | symbol} type + * @param {...any} [args] + * @returns {boolean} + */ +EventEmitter.prototype.emit = function emit(type, ...args) { + let doError = type === "error"; + + const events = this._events; + if (events !== undefined) { + if (doError && events[kErrorMonitor] !== undefined) + this.emit(kErrorMonitor, ...args); + doError &&= events.error === undefined; + } else if (!doError) return false; + + // If there is no 'error' event listener then throw. + if (doError) { + let er; + if (args.length > 0) er = args[0]; + if (er instanceof Error) { + try { + const capture = {}; + ErrorCaptureStackTrace(capture, EventEmitter.prototype.emit); + ObjectDefineProperty(er, kEnhanceStackBeforeInspector, { + __proto__: null, + value: FunctionPrototypeBind(enhanceStackTrace, this, er, capture), + configurable: true, + }); + } catch { + // Continue regardless of error. + } + + // Note: The comments on the `throw` lines are intentional, they show + // up in Node's output if this results in an unhandled exception. + throw er; // Unhandled 'error' event + } + + let stringifiedEr; + try { + stringifiedEr = inspect(er); + } catch { + stringifiedEr = er; + } + + // At least give some kind of context to the user + const err = new ERR_UNHANDLED_ERROR(stringifiedEr); + err.context = er; + throw err; // Unhandled 'error' event + } + + const handler = events[type]; + + if (handler === undefined) return false; + + if (typeof handler === "function") { + const result = ReflectApply(handler, this, args); + + // We check if result is undefined first because that + // is the most common case so we do not pay any perf + // penalty + if (result !== undefined && result !== null) { + addCatch(this, result, type, args); + } + } else { + const len = handler.length; + const listeners = arrayClone(handler); + for (let i = 0; i < len; ++i) { + const result = ReflectApply(listeners[i], this, args); + + // We check if result is undefined first because that + // is the most common case so we do not pay any perf + // penalty. + // This code is duplicated because extracting it away + // would make it non-inlineable. + if (result !== undefined && result !== null) { + addCatch(this, result, type, args); + } + } + } + + return true; +}; + +function _addListener(target, type, listener, prepend) { + let m; + let events; + let existing; + + checkListener(listener); + + events = target._events; + if (events === undefined) { + events = target._events = { __proto__: null }; + target._eventsCount = 0; + } else { + // To avoid recursion in the case that type === "newListener"! Before + // adding it to the listeners, first emit "newListener". + if (events.newListener !== undefined) { + target.emit("newListener", type, listener.listener ?? listener); + + // Re-assign `events` because a newListener handler could have caused the + // this._events to be assigned to a new object + events = target._events; + } + existing = events[type]; + } + + if (existing === undefined) { + // Optimize the case of one listener. Don't need the extra array object. + events[type] = listener; + ++target._eventsCount; + } else { + if (typeof existing === "function") { + // Adding the second element, need to change to array. + existing = events[type] = prepend + ? [listener, existing] + : [existing, listener]; + // If we've already got an array, just append. + } else if (prepend) { + existing.unshift(listener); + } else { + existing.push(listener); + } + + // Check for listener leak + m = _getMaxListeners(target); + if (m > 0 && existing.length > m && !existing.warned) { + existing.warned = true; + // No error code for this since it is a Warning + const w = genericNodeError( + `Possible EventEmitter memory leak detected. ${ + existing.length + } ${String(type)} listeners ` + + `added to ${inspect(target, { + depth: -1, + })}. MaxListeners is ${m}. Use emitter.setMaxListeners() to increase limit`, + { + name: "MaxListenersExceededWarning", + emitter: target, + type: type, + count: existing.length, + } + ); + process.emitWarning(w); + } + } + + return target; +} + +/** + * Adds a listener to the event emitter. + * @param {string | symbol} type + * @param {Function} listener + * @returns {EventEmitter} + */ +EventEmitter.prototype.addListener = function addListener(type, listener) { + return _addListener(this, type, listener, false); +}; + +EventEmitter.prototype.on = EventEmitter.prototype.addListener; + +/** + * Adds the `listener` function to the beginning of + * the listeners array. + * @param {string | symbol} type + * @param {Function} listener + * @returns {EventEmitter} + */ +EventEmitter.prototype.prependListener = function prependListener( + type, + listener +) { + return _addListener(this, type, listener, true); +}; + +function onceWrapper() { + if (!this.fired) { + this.target.removeListener(this.type, this.wrapFn); + this.fired = true; + if (arguments.length === 0) return this.listener.call(this.target); + return ReflectApply(this.listener, this.target, arguments); + } +} + +function _onceWrap(target, type, listener) { + const state = { fired: false, wrapFn: undefined, target, type, listener }; + const wrapped = onceWrapper.bind(state); + wrapped.listener = listener; + state.wrapFn = wrapped; + return wrapped; +} + +/** + * Adds a one-time `listener` function to the event emitter. + * @param {string | symbol} type + * @param {Function} listener + * @returns {EventEmitter} + */ +EventEmitter.prototype.once = function once(type, listener) { + checkListener(listener); + + this.on(type, _onceWrap(this, type, listener)); + return this; +}; + +/** + * Adds a one-time `listener` function to the beginning of + * the listeners array. + * @param {string | symbol} type + * @param {Function} listener + * @returns {EventEmitter} + */ +EventEmitter.prototype.prependOnceListener = function prependOnceListener( + type, + listener +) { + checkListener(listener); + + this.prependListener(type, _onceWrap(this, type, listener)); + return this; +}; + +/** + * Removes the specified `listener` from the listeners array. + * @param {string | symbol} type + * @param {Function} listener + * @returns {EventEmitter} + */ +EventEmitter.prototype.removeListener = function removeListener( + type, + listener +) { + checkListener(listener); + + const events = this._events; + if (events === undefined) return this; + + const list = events[type]; + if (list === undefined) return this; + + if (list === listener || list.listener === listener) { + this._eventsCount -= 1; + + if (this[kShapeMode]) { + events[type] = undefined; + } else if (this._eventsCount === 0) { + this._events = { __proto__: null }; + } else { + delete events[type]; + if (events.removeListener) + this.emit("removeListener", type, list.listener || listener); + } + } else if (typeof list !== "function") { + let position = -1; + + for (let i = list.length - 1; i >= 0; i--) { + if (list[i] === listener || list[i].listener === listener) { + position = i; + break; + } + } + + if (position < 0) return this; + + if (position === 0) list.shift(); + else { + spliceOne(list, position); + } + + if (list.length === 1) events[type] = list[0]; + + if (events.removeListener !== undefined) + this.emit("removeListener", type, listener); + } + + return this; +}; + +EventEmitter.prototype.off = EventEmitter.prototype.removeListener; + +/** + * Removes all listeners from the event emitter. (Only + * removes listeners for a specific event name if specified + * as `type`). + * @param {string | symbol} [type] + * @returns {EventEmitter} + */ +EventEmitter.prototype.removeAllListeners = function removeAllListeners(type) { + const events = this._events; + if (events === undefined) return this; + + // Not listening for removeListener, no need to emit + if (events.removeListener === undefined) { + if (arguments.length === 0) { + this._events = { __proto__: null }; + this._eventsCount = 0; + } else if (events[type] !== undefined) { + if (--this._eventsCount === 0) this._events = { __proto__: null }; + else delete events[type]; + } + this[kShapeMode] = false; + return this; + } + + // Emit removeListener for all listeners on all events + if (arguments.length === 0) { + for (const key of ReflectOwnKeys(events)) { + if (key === "removeListener") continue; + this.removeAllListeners(key); + } + this.removeAllListeners("removeListener"); + this._events = { __proto__: null }; + this._eventsCount = 0; + this[kShapeMode] = false; + return this; + } + + const listeners = events[type]; + + if (typeof listeners === "function") { + this.removeListener(type, listeners); + } else if (listeners !== undefined) { + // LIFO order + for (let i = listeners.length - 1; i >= 0; i--) { + this.removeListener(type, listeners[i]); + } + } + + return this; +}; + +function _listeners(target, type, unwrap) { + const events = target._events; + + if (events === undefined) return []; + + const evlistener = events[type]; + if (evlistener === undefined) return []; + + if (typeof evlistener === "function") + return unwrap ? [evlistener.listener || evlistener] : [evlistener]; + + return unwrap ? unwrapListeners(evlistener) : arrayClone(evlistener); +} + +/** + * Returns a copy of the array of listeners for the event name + * specified as `type`. + * @param {string | symbol} type + * @returns {Function[]} + */ +EventEmitter.prototype.listeners = function listeners(type) { + return _listeners(this, type, true); +}; + +/** + * Returns a copy of the array of listeners and wrappers for + * the event name specified as `type`. + * @param {string | symbol} type + * @returns {Function[]} + */ +EventEmitter.prototype.rawListeners = function rawListeners(type) { + return _listeners(this, type, false); +}; + +/** + * Returns the number of listeners listening to event name + * specified as `type`. + * @param {string | symbol} type + * @param {Function} [listener] + * @returns {number} + */ +EventEmitter.prototype.listenerCount = function listenerCount(type, listener) { + const events = this._events; + + if (events !== undefined) { + const evlistener = events[type]; + + if (typeof evlistener === "function") { + if (listener != null) { + return listener === evlistener || listener === evlistener.listener + ? 1 + : 0; + } + + return 1; + } else if (evlistener !== undefined) { + if (listener != null) { + let matching = 0; + + for (let i = 0, l = evlistener.length; i < l; i++) { + if ( + evlistener[i] === listener || + evlistener[i].listener === listener + ) { + matching++; + } + } + + return matching; + } + + return evlistener.length; + } + } + + return 0; +}; + +/** + * Returns an array listing the events for which + * the emitter has registered listeners. + * @returns {(string | symbol)[]} + */ +EventEmitter.prototype.eventNames = function eventNames() { + return this._eventsCount > 0 ? ReflectOwnKeys(this._events) : []; +}; + +function arrayClone(arr) { + // At least since V8 8.3, this implementation is faster than the previous + // which always used a simple for-loop + switch (arr.length) { + case 2: + return [arr[0], arr[1]]; + case 3: + return [arr[0], arr[1], arr[2]]; + case 4: + return [arr[0], arr[1], arr[2], arr[3]]; + case 5: + return [arr[0], arr[1], arr[2], arr[3], arr[4]]; + case 6: + return [arr[0], arr[1], arr[2], arr[3], arr[4], arr[5]]; + } + return ArrayPrototypeSlice(arr); +} + +function unwrapListeners(arr) { + const ret = arrayClone(arr); + for (let i = 0; i < ret.length; ++i) { + const orig = ret[i].listener; + if (typeof orig === "function") ret[i] = orig; + } + return ret; +} + +/** + * Returns a copy of the array of listeners for the event name + * specified as `type`. + * @param {EventEmitter | EventTarget} emitterOrTarget + * @param {string | symbol} type + * @returns {Function[]} + */ +function getEventListeners(emitterOrTarget, type) { + // First check if EventEmitter + if (typeof emitterOrTarget.listeners === "function") { + return emitterOrTarget.listeners(type); + } + // Require event target lazily to avoid always loading it + const { isEventTarget, kEvents } = require("internal/event_target"); + if (isEventTarget(emitterOrTarget)) { + const root = emitterOrTarget[kEvents].get(type); + const listeners = []; + let handler = root?.next; + while (handler?.listener !== undefined) { + const listener = handler.listener?.deref + ? handler.listener.deref() + : handler.listener; + listeners.push(listener); + handler = handler.next; + } + return listeners; + } + throw new ERR_INVALID_ARG_TYPE( + "emitter", + ["EventEmitter", "EventTarget"], + emitterOrTarget + ); +} + +/** + * Returns the max listeners set. + * @param {EventEmitter | EventTarget} emitterOrTarget + * @returns {number} + */ +function getMaxListeners(emitterOrTarget) { + if (typeof emitterOrTarget?.getMaxListeners === "function") { + return _getMaxListeners(emitterOrTarget); + } else if (typeof emitterOrTarget?.[kMaxEventTargetListeners] === "number") { + return emitterOrTarget[kMaxEventTargetListeners]; + } + + throw new ERR_INVALID_ARG_TYPE( + "emitter", + ["EventEmitter", "EventTarget"], + emitterOrTarget + ); +} + +/** + * Returns the number of registered listeners for `type`. + * @param {EventEmitter | EventTarget} emitterOrTarget + * @param {string | symbol} type + * @returns {number} + */ +function listenerCount(emitterOrTarget, type) { + if (typeof emitterOrTarget.listenerCount === "function") { + return emitterOrTarget.listenerCount(type); + } + const { isEventTarget, kEvents } = require("internal/event_target"); + if (isEventTarget(emitterOrTarget)) { + return emitterOrTarget[kEvents].get(type)?.size ?? 0; + } + throw new ERR_INVALID_ARG_TYPE( + "emitter", + ["EventEmitter", "EventTarget"], + emitterOrTarget + ); +} + +/** + * Creates a `Promise` that is fulfilled when the emitter + * emits the given event. + * @param {EventEmitter} emitter + * @param {string | symbol} name + * @param {{ signal: AbortSignal; }} [options] + * @returns {Promise} + */ +async function once(emitter, name, options = kEmptyObject) { + validateObject(options, "options"); + const { signal } = options; + validateAbortSignal(signal, "options.signal"); + if (signal?.aborted) + throw new AbortError(undefined, { cause: signal.reason }); + return new Promise((resolve, reject) => { + const errorListener = (err) => { + emitter.removeListener(name, resolver); + if (signal != null) { + eventTargetAgnosticRemoveListener(signal, "abort", abortListener); + } + reject(err); + }; + const resolver = (...args) => { + if (typeof emitter.removeListener === "function") { + emitter.removeListener("error", errorListener); + } + if (signal != null) { + eventTargetAgnosticRemoveListener(signal, "abort", abortListener); + } + resolve(args); + }; + + kResistStopPropagation ??= + require("internal/event_target").kResistStopPropagation; + const opts = { + __proto__: null, + once: true, + [kResistStopPropagation]: true, + }; + eventTargetAgnosticAddListener(emitter, name, resolver, opts); + if (name !== "error" && typeof emitter.once === "function") { + // EventTarget does not have `error` event semantics like Node + // EventEmitters, we listen to `error` events only on EventEmitters. + emitter.once("error", errorListener); + } + function abortListener() { + eventTargetAgnosticRemoveListener(emitter, name, resolver); + eventTargetAgnosticRemoveListener(emitter, "error", errorListener); + reject(new AbortError(undefined, { cause: signal?.reason })); + } + if (signal != null) { + eventTargetAgnosticAddListener(signal, "abort", abortListener, { + __proto__: null, + once: true, + [kResistStopPropagation]: true, + }); + } + }); +} + +function createIterResult(value, done) { + return { value, done }; +} + +function eventTargetAgnosticRemoveListener(emitter, name, listener, flags) { + if (typeof emitter.removeListener === "function") { + emitter.removeListener(name, listener); + } else if (typeof emitter.removeEventListener === "function") { + emitter.removeEventListener(name, listener, flags); + } else { + throw new ERR_INVALID_ARG_TYPE("emitter", "EventEmitter", emitter); + } +} + +function eventTargetAgnosticAddListener(emitter, name, listener, flags) { + if (typeof emitter.on === "function") { + if (flags?.once) { + emitter.once(name, listener); + } else { + emitter.on(name, listener); + } + } else if (typeof emitter.addEventListener === "function") { + emitter.addEventListener(name, listener, flags); + } else { + throw new ERR_INVALID_ARG_TYPE("emitter", "EventEmitter", emitter); + } +} + +/** + * Returns an `AsyncIterator` that iterates `event` events. + * @param {EventEmitter} emitter + * @param {string | symbol} event + * @param {{ + * signal: AbortSignal; + * close?: string[]; + * highWaterMark?: number, + * lowWaterMark?: number + * }} [options] + * @returns {AsyncIterator} + */ +function on(emitter, event, options = kEmptyObject) { + // Parameters validation + validateObject(options, "options"); + const signal = options.signal; + validateAbortSignal(signal, "options.signal"); + if (signal?.aborted) + throw new AbortError(undefined, { cause: signal.reason }); + // Support both highWaterMark and highWatermark for backward compatibility + const highWatermark = + options.highWaterMark ?? options.highWatermark ?? NumberMAX_SAFE_INTEGER; + validateInteger(highWatermark, "options.highWaterMark", 1); + // Support both lowWaterMark and lowWatermark for backward compatibility + const lowWatermark = options.lowWaterMark ?? options.lowWatermark ?? 1; + validateInteger(lowWatermark, "options.lowWaterMark", 1); + + // Preparing controlling queues and variables + FixedQueue ??= require("internal/fixed_queue"); + const unconsumedEvents = new FixedQueue(); + const unconsumedPromises = new FixedQueue(); + let paused = false; + let error = null; + let finished = false; + let size = 0; + + const iterator = ObjectSetPrototypeOf( + { + next() { + // First, we consume all unread events + if (size) { + const value = unconsumedEvents.shift(); + size--; + if (paused && size < lowWatermark) { + emitter.resume(); + paused = false; + } + return PromiseResolve(createIterResult(value, false)); + } + + // Then we error, if an error happened + // This happens one time if at all, because after 'error' + // we stop listening + if (error) { + const p = PromiseReject(error); + // Only the first element errors + error = null; + return p; + } + + // If the iterator is finished, resolve to done + if (finished) return closeHandler(); + + // Wait until an event happens + return new Promise(function (resolve, reject) { + unconsumedPromises.push({ resolve, reject }); + }); + }, + + return() { + return closeHandler(); + }, + + throw(err) { + if (!err || !(err instanceof Error)) { + throw new ERR_INVALID_ARG_TYPE( + "EventEmitter.AsyncIterator", + "Error", + err + ); + } + errorHandler(err); + }, + [SymbolAsyncIterator]() { + return this; + }, + [kWatermarkData]: { + /** + * The current queue size + * @returns {number} + */ + get size() { + return size; + }, + /** + * The low watermark. The emitter is resumed every time size is lower than it + * @returns {number} + */ + get low() { + return lowWatermark; + }, + /** + * The high watermark. The emitter is paused every time size is higher than it + * @returns {number} + */ + get high() { + return highWatermark; + }, + /** + * It checks whether the emitter is paused by the watermark controller or not + * @returns {boolean} + */ + get isPaused() { + return paused; + }, + }, + }, + AsyncIteratorPrototype + ); + + // Adding event handlers + const { addEventListener, removeAll } = listenersController(); + kFirstEventParam ??= require("internal/events/symbols").kFirstEventParam; + addEventListener( + emitter, + event, + options[kFirstEventParam] + ? eventHandler + : function (...args) { + return eventHandler(args); + } + ); + if (event !== "error" && typeof emitter.on === "function") { + addEventListener(emitter, "error", errorHandler); + } + const closeEvents = options?.close; + if (closeEvents?.length) { + for (let i = 0; i < closeEvents.length; i++) { + addEventListener(emitter, closeEvents[i], closeHandler); + } + } + + const abortListenerDisposable = signal + ? addAbortListener(signal, abortListener) + : null; + + return iterator; + + function abortListener() { + errorHandler(new AbortError(undefined, { cause: signal?.reason })); + } + + function eventHandler(value) { + if (unconsumedPromises.isEmpty()) { + size++; + if (!paused && size > highWatermark) { + paused = true; + emitter.pause(); + } + unconsumedEvents.push(value); + } else unconsumedPromises.shift().resolve(createIterResult(value, false)); + } + + function errorHandler(err) { + if (unconsumedPromises.isEmpty()) error = err; + else unconsumedPromises.shift().reject(err); + + closeHandler(); + } + + function closeHandler() { + abortListenerDisposable?.[SymbolDispose](); + removeAll(); + finished = true; + const doneResult = createIterResult(undefined, true); + while (!unconsumedPromises.isEmpty()) { + unconsumedPromises.shift().resolve(doneResult); + } + + return PromiseResolve(doneResult); + } +} + +function listenersController() { + const listeners = []; + + return { + addEventListener(emitter, event, handler, flags) { + eventTargetAgnosticAddListener(emitter, event, handler, flags); + ArrayPrototypePush(listeners, [emitter, event, handler, flags]); + }, + removeAll() { + while (listeners.length > 0) { + ReflectApply( + eventTargetAgnosticRemoveListener, + undefined, + ArrayPrototypePop(listeners) + ); + } + }, + }; +} diff --git a/node/fs.js b/node/fs.js new file mode 100644 index 00000000..c505db73 --- /dev/null +++ b/node/fs.js @@ -0,0 +1,3397 @@ +"use strict"; + +const { + ArrayFromAsync, + ArrayPrototypePush, + BigIntPrototypeToString, + Boolean, + FunctionPrototypeCall, + MathMax, + Number, + ObjectDefineProperties, + ObjectDefineProperty, + Promise, + PromisePrototypeThen, + PromiseResolve, + ReflectApply, + SafeMap, + SafeSet, + StringPrototypeCharCodeAt, + StringPrototypeIndexOf, + StringPrototypeSlice, + SymbolDispose, + uncurryThis, +} = primordials; + +const { fs: constants } = internalBinding("constants"); +const { + S_IFIFO, + S_IFLNK, + S_IFMT, + S_IFREG, + S_IFSOCK, + F_OK, + O_WRONLY, + O_SYMLINK, +} = constants; + +const pathModule = require("path"); +const { isArrayBufferView } = require("internal/util/types"); + +const binding = internalBinding("fs"); + +const { createBlobFromFilePath } = require("internal/blob"); + +const { Buffer } = require("buffer"); +const { isBuffer: BufferIsBuffer } = Buffer; +const BufferToString = uncurryThis(Buffer.prototype.toString); +const { + AbortError, + aggregateTwoErrors, + codes: { ERR_ACCESS_DENIED, ERR_FS_FILE_TOO_LARGE, ERR_INVALID_ARG_VALUE }, +} = require("internal/errors"); + +const { FSReqCallback, statValues } = binding; +const { toPathIfFileURL } = require("internal/url"); +const { + customPromisifyArgs: kCustomPromisifyArgsSymbol, + getLazy, + kEmptyObject, + promisify: { custom: kCustomPromisifiedSymbol }, + SideEffectFreeRegExpPrototypeExec, + defineLazyProperties, + isWindows, + isMacOS, +} = require("internal/util"); +const { + constants: { kIoMaxLength, kMaxUserId }, + copyObject, + Dirent, + getDirent, + getDirents, + getOptions, + getValidatedFd, + getValidatedPath, + handleErrorFromBinding, + preprocessSymlinkDestination, + Stats, + getStatFsFromBinding, + getStatsFromBinding, + realpathCacheKey, + stringToFlags, + stringToSymlinkType, + toUnixTimestamp, + validateBufferArray, + validateCpOptions, + validateOffsetLengthRead, + validateOffsetLengthWrite, + validatePath, + validatePosition, + validateRmOptions, + validateRmOptionsSync, + validateRmdirOptions, + validateStringAfterArrayBufferView, + warnOnNonPortableTemplate, +} = require("internal/fs/utils"); +const { + CHAR_FORWARD_SLASH, + CHAR_BACKWARD_SLASH, +} = require("internal/constants"); +const { + isInt32, + parseFileMode, + validateBoolean, + validateBuffer, + validateEncoding, + validateFunction, + validateInteger, + validateObject, + validateOneOf, + validateString, + kValidateObjectAllowNullable, +} = require("internal/validators"); + +const permission = require("internal/process/permission"); + +let fs; + +// Lazy loaded +let cpFn; +let cpSyncFn; +let promises = null; +let ReadStream; +let WriteStream; +let rimraf; +let kResistStopPropagation; +let ReadFileContext; + +// These have to be separate because of how graceful-fs happens to do it's +// monkeypatching. +let FileReadStream; +let FileWriteStream; +let Utf8Stream; + +function lazyLoadUtf8Stream() { + Utf8Stream ??= require("internal/streams/fast-utf8-stream"); +} + +// Ensure that callbacks run in the global context. Only use this function +// for callbacks that are passed to the binding layer, callbacks that are +// invoked from JS already run in the proper scope. +function makeCallback(cb) { + validateFunction(cb, "cb"); + + return (...args) => ReflectApply(cb, this, args); +} + +// Special case of `makeCallback()` that is specific to async `*stat()` calls as +// an optimization, since the data passed back to the callback needs to be +// transformed anyway. +function makeStatsCallback(cb) { + validateFunction(cb, "cb"); + + return (err, stats) => { + if (err) return cb(err); + cb(err, getStatsFromBinding(stats)); + }; +} + +const isFd = isInt32; + +function isFileType(stats, fileType) { + // Use stats array directly to avoid creating an fs.Stats instance just for + // our internal use. + let mode = stats[1]; + if (typeof mode === "bigint") mode = Number(mode); + return (mode & S_IFMT) === fileType; +} + +/** + * Tests a user's permissions for the file or directory + * specified by `path`. + * @param {string | Buffer | URL} path + * @param {number} [mode] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function access(path, mode, callback) { + if (typeof mode === "function") { + callback = mode; + mode = F_OK; + } + + path = getValidatedPath(path); + callback = makeCallback(callback); + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.access(path, mode, req); +} + +/** + * Synchronously tests a user's permissions for the file or + * directory specified by `path`. + * @param {string | Buffer | URL} path + * @param {number} [mode] + * @returns {void} + */ +function accessSync(path, mode) { + binding.access(getValidatedPath(path), mode); +} + +/** + * Tests whether or not the given path exists. + * @param {string | Buffer | URL} path + * @param {(exists?: boolean) => any} callback + * @returns {void} + */ +function exists(path, callback) { + validateFunction(callback, "cb"); + + function suppressedCallback(err) { + callback(!err); + } + + try { + fs.access(path, F_OK, suppressedCallback); + } catch { + return callback(false); + } +} + +ObjectDefineProperty(exists, kCustomPromisifiedSymbol, { + __proto__: null, + value: function exists(path) { + // eslint-disable-line func-name-matching + return new Promise((resolve) => fs.exists(path, resolve)); + }, +}); + +let showExistsDeprecation = true; +/** + * Synchronously tests whether or not the given path exists. + * @param {string | Buffer | URL} path + * @returns {boolean} + */ +function existsSync(path) { + try { + path = getValidatedPath(path); + } catch (err) { + if (showExistsDeprecation && err?.code === "ERR_INVALID_ARG_TYPE") { + process.emitWarning( + "Passing invalid argument types to fs.existsSync is deprecated", + "DeprecationWarning", + "DEP0187" + ); + showExistsDeprecation = false; + } + return false; + } + + return binding.existsSync(path); +} + +function readFileAfterOpen(err, fd) { + const context = this.context; + + if (err) { + context.callback(err); + return; + } + + context.fd = fd; + + const req = new FSReqCallback(); + req.oncomplete = readFileAfterStat; + req.context = context; + binding.fstat(fd, false, req); +} + +function readFileAfterStat(err, stats) { + const context = this.context; + + if (err) return context.close(err); + + // TODO(BridgeAR): Check if allocating a smaller chunk is better performance + // wise, similar to the promise based version (less peak memory and chunked + // stringify operations vs multiple C++/JS boundary crossings). + const size = (context.size = isFileType(stats, S_IFREG) ? stats[8] : 0); + + if (size > kIoMaxLength) { + err = new ERR_FS_FILE_TOO_LARGE(size); + return context.close(err); + } + + try { + if (size === 0) { + // TODO(BridgeAR): If an encoding is set, use the StringDecoder to concat + // the result and reuse the buffer instead of allocating a new one. + context.buffers = []; + } else { + context.buffer = Buffer.allocUnsafeSlow(size); + } + } catch (err) { + return context.close(err); + } + context.read(); +} + +function checkAborted(signal, callback) { + if (signal?.aborted) { + callback(new AbortError(undefined, { cause: signal.reason })); + return true; + } + return false; +} + +/** + * Asynchronously reads the entire contents of a file. + * @param {string | Buffer | URL | number} path + * @param {{ + * encoding?: string | null; + * flag?: string; + * signal?: AbortSignal; + * } | string} [options] + * @param {( + * err?: Error, + * data?: string | Buffer + * ) => any} callback + * @returns {void} + */ +function readFile(path, options, callback) { + callback ||= options; + validateFunction(callback, "cb"); + options = getOptions(options, { flag: "r" }); + ReadFileContext ??= require("internal/fs/read/context"); + const context = new ReadFileContext(callback, options.encoding); + context.isUserFd = isFd(path); // File descriptor ownership + + if (options.signal) { + context.signal = options.signal; + } + if (context.isUserFd) { + process.nextTick(function tick(context) { + FunctionPrototypeCall(readFileAfterOpen, { context }, null, path); + }, context); + return; + } + + if (checkAborted(options.signal, callback)) return; + + const flagsNumber = stringToFlags(options.flag, "options.flag"); + const req = new FSReqCallback(); + req.context = context; + req.oncomplete = readFileAfterOpen; + binding.open(getValidatedPath(path), flagsNumber, 0o666, req); +} + +function tryStatSync(fd, isUserFd) { + const stats = binding.fstat(fd, false, undefined, true /* shouldNotThrow */); + if (stats === undefined && !isUserFd) { + fs.closeSync(fd); + } + return stats; +} + +function tryCreateBuffer(size, fd, isUserFd) { + let threw = true; + let buffer; + try { + if (size > kIoMaxLength) { + throw new ERR_FS_FILE_TOO_LARGE(size); + } + buffer = Buffer.allocUnsafe(size); + threw = false; + } finally { + if (threw && !isUserFd) fs.closeSync(fd); + } + return buffer; +} + +function tryReadSync(fd, isUserFd, buffer, pos, len) { + let threw = true; + let bytesRead; + try { + bytesRead = fs.readSync(fd, buffer, pos, len); + threw = false; + } finally { + if (threw && !isUserFd) fs.closeSync(fd); + } + return bytesRead; +} + +/** + * Synchronously reads the entire contents of a file. + * @param {string | Buffer | URL | number} path + * @param {{ + * encoding?: string | null; + * flag?: string; + * }} [options] + * @returns {string | Buffer} + */ +function readFileSync(path, options) { + options = getOptions(options, { flag: "r" }); + + if (options.encoding === "utf8" || options.encoding === "utf-8") { + if (!isInt32(path)) { + path = getValidatedPath(path); + } + return binding.readFileUtf8(path, stringToFlags(options.flag)); + } + + const isUserFd = isFd(path); // File descriptor ownership + const fd = isUserFd ? path : fs.openSync(path, options.flag, 0o666); + + const stats = tryStatSync(fd, isUserFd); + const size = isFileType(stats, S_IFREG) ? stats[8] : 0; + let pos = 0; + let buffer; // Single buffer with file data + let buffers; // List for when size is unknown + + if (size === 0) { + buffers = []; + } else { + buffer = tryCreateBuffer(size, fd, isUserFd); + } + + let bytesRead; + + if (size !== 0) { + do { + bytesRead = tryReadSync(fd, isUserFd, buffer, pos, size - pos); + pos += bytesRead; + } while (bytesRead !== 0 && pos < size); + } else { + do { + // The kernel lies about many files. + // Go ahead and try to read some bytes. + buffer = Buffer.allocUnsafe(8192); + bytesRead = tryReadSync(fd, isUserFd, buffer, 0, 8192); + if (bytesRead !== 0) { + ArrayPrototypePush(buffers, buffer.slice(0, bytesRead)); + } + pos += bytesRead; + } while (bytesRead !== 0); + } + + if (!isUserFd) fs.closeSync(fd); + + if (size === 0) { + // Data was collected into the buffers list. + buffer = Buffer.concat(buffers, pos); + } else if (pos < size) { + buffer = buffer.slice(0, pos); + } + + if (options.encoding) buffer = buffer.toString(options.encoding); + return buffer; +} + +function defaultCloseCallback(err) { + if (err != null) throw err; +} + +/** + * Closes the file descriptor. + * @param {number} fd + * @param {(err?: Error) => any} [callback] + * @returns {void} + */ +function close(fd, callback = defaultCloseCallback) { + if (callback !== defaultCloseCallback) callback = makeCallback(callback); + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.close(fd, req); +} + +/** + * Synchronously closes the file descriptor. + * @param {number} fd + * @returns {void} + */ +function closeSync(fd) { + binding.close(fd); +} + +/** + * Asynchronously opens a file. + * @param {string | Buffer | URL} path + * @param {string | number} [flags] + * @param {string | number} [mode] + * @param {( + * err?: Error, + * fd?: number + * ) => any} callback + * @returns {void} + */ +function open(path, flags, mode, callback) { + path = getValidatedPath(path); + if (arguments.length < 3) { + callback = flags; + flags = "r"; + mode = 0o666; + } else if (typeof mode === "function") { + callback = mode; + mode = 0o666; + } else { + mode = parseFileMode(mode, "mode", 0o666); + } + const flagsNumber = stringToFlags(flags); + callback = makeCallback(callback); + + const req = new FSReqCallback(); + req.oncomplete = callback; + + binding.open(path, flagsNumber, mode, req); +} + +/** + * Synchronously opens a file. + * @param {string | Buffer | URL} path + * @param {string | number} [flags] + * @param {string | number} [mode] + * @returns {number} + */ +function openSync(path, flags, mode) { + return binding.open( + getValidatedPath(path), + stringToFlags(flags), + parseFileMode(mode, "mode", 0o666) + ); +} + +/** + * @param {string | Buffer | URL } path + * @param {{ + * type?: string; + * }} [options] + * @returns {Promise} + */ +function openAsBlob(path, options = kEmptyObject) { + validateObject(options, "options"); + const type = options.type || ""; + validateString(type, "options.type"); + // The underlying implementation here returns the Blob synchronously for now. + // To give ourselves flexibility to maybe return the Blob asynchronously, + // this API returns a Promise. + path = getValidatedPath(path); + return PromiseResolve(createBlobFromFilePath(path, { type })); +} + +/** + * Reads file from the specified `fd` (file descriptor). + * @param {number} fd + * @param {Buffer | TypedArray | DataView} buffer + * @param {number | { + * offset?: number; + * length?: number; + * position?: number | bigint | null; + * }} [offsetOrOptions] + * @param {number} length + * @param {number | bigint | null} position + * @param {( + * err?: Error, + * bytesRead?: number, + * buffer?: Buffer + * ) => any} callback + * @returns {void} + */ +function read(fd, buffer, offsetOrOptions, length, position, callback) { + fd = getValidatedFd(fd); + + let offset = offsetOrOptions; + let params = null; + if (arguments.length <= 4) { + if (arguments.length === 4) { + // This is fs.read(fd, buffer, options, callback) + validateObject(offsetOrOptions, "options", kValidateObjectAllowNullable); + callback = length; + params = offsetOrOptions; + } else if (arguments.length === 3) { + // This is fs.read(fd, bufferOrParams, callback) + if (!isArrayBufferView(buffer)) { + // This is fs.read(fd, params, callback) + params = buffer; + ({ buffer = Buffer.alloc(16384) } = params ?? kEmptyObject); + } + callback = offsetOrOptions; + } else { + // This is fs.read(fd, callback) + callback = buffer; + buffer = Buffer.alloc(16384); + } + + if (params !== undefined) { + validateObject(params, "options", kValidateObjectAllowNullable); + } + ({ + offset = 0, + length = buffer?.byteLength - offset, + position = null, + } = params ?? kEmptyObject); + } + + validateBuffer(buffer); + validateFunction(callback, "cb"); + + if (offset == null) { + offset = 0; + } else { + validateInteger(offset, "offset", 0); + } + + length |= 0; + + if (length === 0) { + return process.nextTick(function tick() { + callback(null, 0, buffer); + }); + } + + if (buffer.byteLength === 0) { + throw new ERR_INVALID_ARG_VALUE( + "buffer", + buffer, + "is empty and cannot be written" + ); + } + + validateOffsetLengthRead(offset, length, buffer.byteLength); + + if (position == null) { + position = -1; + } else { + validatePosition(position, "position", length); + } + + function wrapper(err, bytesRead) { + // Retain a reference to buffer so that it can't be GC'ed too soon. + callback(err, bytesRead || 0, buffer); + } + + const req = new FSReqCallback(); + req.oncomplete = wrapper; + + binding.read(fd, buffer, offset, length, position, req); +} + +ObjectDefineProperty(read, kCustomPromisifyArgsSymbol, { + __proto__: null, + value: ["bytesRead", "buffer"], + enumerable: false, +}); + +/** + * Synchronously reads the file from the + * specified `fd` (file descriptor). + * @param {number} fd + * @param {Buffer | TypedArray | DataView} buffer + * @param {number | { + * offset?: number; + * length?: number; + * position?: number | bigint | null; + * }} [offsetOrOptions] + * @param {number} [length] + * @param {number} [position] + * @returns {number} + */ +function readSync(fd, buffer, offsetOrOptions, length, position) { + fd = getValidatedFd(fd); + + validateBuffer(buffer); + + let offset = offsetOrOptions; + if (arguments.length <= 3 || typeof offsetOrOptions === "object") { + if (offsetOrOptions !== undefined) { + validateObject(offsetOrOptions, "options", kValidateObjectAllowNullable); + } + + ({ + offset = 0, + length = buffer.byteLength - offset, + position = null, + } = offsetOrOptions ?? kEmptyObject); + } + + if (offset === undefined) { + offset = 0; + } else { + validateInteger(offset, "offset", 0); + } + + length |= 0; + + if (length === 0) { + return 0; + } + + if (buffer.byteLength === 0) { + throw new ERR_INVALID_ARG_VALUE( + "buffer", + buffer, + "is empty and cannot be written" + ); + } + + validateOffsetLengthRead(offset, length, buffer.byteLength); + + if (position == null) { + position = -1; + } else { + validatePosition(position, "position", length); + } + + return binding.read(fd, buffer, offset, length, position); +} + +/** + * Reads file from the specified `fd` (file descriptor) + * and writes to an array of `ArrayBufferView`s. + * @param {number} fd + * @param {ArrayBufferView[]} buffers + * @param {number | null} [position] + * @param {( + * err?: Error, + * bytesRead?: number, + * buffers?: ArrayBufferView[] + * ) => any} callback + * @returns {void} + */ +function readv(fd, buffers, position, callback) { + function wrapper(err, read) { + callback(err, read || 0, buffers); + } + + fd = getValidatedFd(fd); + validateBufferArray(buffers); + callback ||= position; + validateFunction(callback, "cb"); + + const req = new FSReqCallback(); + req.oncomplete = wrapper; + + if (typeof position !== "number") position = null; + + binding.readBuffers(fd, buffers, position, req); +} + +ObjectDefineProperty(readv, kCustomPromisifyArgsSymbol, { + __proto__: null, + value: ["bytesRead", "buffers"], + enumerable: false, +}); + +/** + * Synchronously reads file from the + * specified `fd` (file descriptor) and writes to an array + * of `ArrayBufferView`s. + * @param {number} fd + * @param {ArrayBufferView[]} buffers + * @param {number | null} [position] + * @returns {number} + */ +function readvSync(fd, buffers, position) { + fd = getValidatedFd(fd); + validateBufferArray(buffers); + + if (typeof position !== "number") position = null; + + return binding.readBuffers(fd, buffers, position); +} + +/** + * Writes `buffer` to the specified `fd` (file descriptor). + * @param {number} fd + * @param {Buffer | TypedArray | DataView | string} buffer + * @param {number | object} [offsetOrOptions] + * @param {number} [length] + * @param {number | null} [position] + * @param {( + * err?: Error, + * bytesWritten?: number, + * buffer?: Buffer | TypedArray | DataView + * ) => any} callback + * @returns {void} + */ +function write(fd, buffer, offsetOrOptions, length, position, callback) { + function wrapper(err, written) { + // Retain a reference to buffer so that it can't be GC'ed too soon. + callback(err, written || 0, buffer); + } + + fd = getValidatedFd(fd); + + let offset = offsetOrOptions; + if (isArrayBufferView(buffer)) { + callback ||= position || length || offset; + validateFunction(callback, "cb"); + + if (typeof offset === "object") { + ({ + offset = 0, + length = buffer.byteLength - offset, + position = null, + } = offsetOrOptions ?? kEmptyObject); + } + + if (offset == null || typeof offset === "function") { + offset = 0; + } else { + validateInteger(offset, "offset", 0); + } + if (typeof length !== "number") length = buffer.byteLength - offset; + if (typeof position !== "number") position = null; + validateOffsetLengthWrite(offset, length, buffer.byteLength); + + const req = new FSReqCallback(); + req.oncomplete = wrapper; + binding.writeBuffer(fd, buffer, offset, length, position, req); + return; + } + + validateStringAfterArrayBufferView(buffer, "buffer"); + + if (typeof position !== "function") { + if (typeof offset === "function") { + position = offset; + offset = null; + } else { + position = length; + } + length = "utf8"; + } + + const str = buffer; + validateEncoding(str, length); + callback = position; + validateFunction(callback, "cb"); + + const req = new FSReqCallback(); + req.oncomplete = wrapper; + binding.writeString(fd, str, offset, length, req); +} + +ObjectDefineProperty(write, kCustomPromisifyArgsSymbol, { + __proto__: null, + value: ["bytesWritten", "buffer"], + enumerable: false, +}); + +/** + * Synchronously writes `buffer` to the + * specified `fd` (file descriptor). + * @param {number} fd + * @param {Buffer | TypedArray | DataView | string} buffer + * @param {{ + * offset?: number; + * length?: number; + * position?: number | null; + * }} [offsetOrOptions] + * @param {number} [length] + * @param {number} [position] + * @returns {number} + */ +function writeSync(fd, buffer, offsetOrOptions, length, position) { + fd = getValidatedFd(fd); + const ctx = {}; + let result; + + let offset = offsetOrOptions; + if (isArrayBufferView(buffer)) { + if (typeof offset === "object") { + ({ + offset = 0, + length = buffer.byteLength - offset, + position = null, + } = offsetOrOptions ?? kEmptyObject); + } + if (position === undefined) position = null; + if (offset == null) { + offset = 0; + } else { + validateInteger(offset, "offset", 0); + } + if (typeof length !== "number") length = buffer.byteLength - offset; + validateOffsetLengthWrite(offset, length, buffer.byteLength); + result = binding.writeBuffer( + fd, + buffer, + offset, + length, + position, + undefined, + ctx + ); + } else { + validateStringAfterArrayBufferView(buffer, "buffer"); + validateEncoding(buffer, length); + + if (offset === undefined) offset = null; + result = binding.writeString(fd, buffer, offset, length, undefined, ctx); + } + handleErrorFromBinding(ctx); + return result; +} + +/** + * Writes an array of `ArrayBufferView`s to the + * specified `fd` (file descriptor). + * @param {number} fd + * @param {ArrayBufferView[]} buffers + * @param {number | null} [position] + * @param {( + * err?: Error, + * bytesWritten?: number, + * buffers?: ArrayBufferView[] + * ) => any} callback + * @returns {void} + */ +function writev(fd, buffers, position, callback) { + function wrapper(err, written) { + callback(err, written || 0, buffers); + } + + fd = getValidatedFd(fd); + validateBufferArray(buffers); + callback ||= position; + validateFunction(callback, "cb"); + + if (buffers.length === 0) { + process.nextTick(callback, null, 0, buffers); + return; + } + + const req = new FSReqCallback(); + req.oncomplete = wrapper; + + if (typeof position !== "number") position = null; + + binding.writeBuffers(fd, buffers, position, req); +} + +ObjectDefineProperty(writev, kCustomPromisifyArgsSymbol, { + __proto__: null, + value: ["bytesWritten", "buffer"], + enumerable: false, +}); + +/** + * Synchronously writes an array of `ArrayBufferView`s + * to the specified `fd` (file descriptor). + * @param {number} fd + * @param {ArrayBufferView[]} buffers + * @param {number | null} [position] + * @returns {number} + */ +function writevSync(fd, buffers, position) { + fd = getValidatedFd(fd); + validateBufferArray(buffers); + + if (buffers.length === 0) { + return 0; + } + + if (typeof position !== "number") position = null; + + return binding.writeBuffers(fd, buffers, position); +} + +/** + * Asynchronously renames file at `oldPath` to + * the pathname provided as `newPath`. + * @param {string | Buffer | URL} oldPath + * @param {string | Buffer | URL} newPath + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function rename(oldPath, newPath, callback) { + callback = makeCallback(callback); + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.rename( + getValidatedPath(oldPath, "oldPath"), + getValidatedPath(newPath, "newPath"), + req + ); +} + +/** + * Synchronously renames file at `oldPath` to + * the pathname provided as `newPath`. + * @param {string | Buffer | URL} oldPath + * @param {string | Buffer | URL} newPath + * @returns {void} + */ +function renameSync(oldPath, newPath) { + binding.rename( + getValidatedPath(oldPath, "oldPath"), + getValidatedPath(newPath, "newPath") + ); +} + +/** + * Truncates the file. + * @param {string | Buffer | URL} path + * @param {number} [len] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function truncate(path, len, callback) { + if (typeof len === "function") { + callback = len; + len = 0; + } else if (len === undefined) { + len = 0; + } + + validateInteger(len, "len"); + len = MathMax(0, len); + validateFunction(callback, "cb"); + fs.open(path, "r+", (er, fd) => { + if (er) return callback(er); + const req = new FSReqCallback(); + req.oncomplete = function oncomplete(er) { + fs.close(fd, (er2) => { + callback(aggregateTwoErrors(er2, er)); + }); + }; + binding.ftruncate(fd, len, req); + }); +} + +/** + * Synchronously truncates the file. + * @param {string | Buffer | URL} path + * @param {number} [len] + * @returns {void} + */ +function truncateSync(path, len) { + if (len === undefined) { + len = 0; + } + // Allow error to be thrown, but still close fd. + const fd = fs.openSync(path, "r+"); + try { + fs.ftruncateSync(fd, len); + } finally { + fs.closeSync(fd); + } +} + +/** + * Truncates the file descriptor. + * @param {number} fd + * @param {number} [len] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function ftruncate(fd, len = 0, callback) { + if (typeof len === "function") { + callback = len; + len = 0; + } + validateInteger(len, "len"); + len = MathMax(0, len); + callback = makeCallback(callback); + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.ftruncate(fd, len, req); +} + +/** + * Synchronously truncates the file descriptor. + * @param {number} fd + * @param {number} [len] + * @returns {void} + */ +function ftruncateSync(fd, len = 0) { + validateInteger(len, "len"); + binding.ftruncate(fd, len < 0 ? 0 : len); +} + +function lazyLoadCp() { + if (cpFn === undefined) { + ({ cpFn } = require("internal/fs/cp/cp")); + cpFn = require("util").callbackify(cpFn); + ({ cpSyncFn } = require("internal/fs/cp/cp-sync")); + } +} + +function lazyLoadRimraf() { + if (rimraf === undefined) ({ rimraf } = require("internal/fs/rimraf")); +} + +/** + * Asynchronously removes a directory. + * @param {string | Buffer | URL} path + * @param {object} [options] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function rmdir(path, options, callback) { + if (typeof options === "function") { + callback = options; + options = undefined; + } + + if (options?.recursive !== undefined) { + // This API previously accepted a `recursive` option that was deprecated + // and removed. However, in order to make the change more visible, we + // opted to throw an error if recursive is specified rather than removing it + // entirely. + throw new ERR_INVALID_ARG_VALUE( + "options.recursive", + options.recursive, + "is no longer supported" + ); + } + + callback = makeCallback(callback); + path = getValidatedPath(path); + + validateRmdirOptions(options); + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.rmdir(path, req); +} + +/** + * Synchronously removes a directory. + * @param {string | Buffer | URL} path + * @param {object} [options] + * @returns {void} + */ +function rmdirSync(path, options) { + path = getValidatedPath(path); + + if (options?.recursive !== undefined) { + throw new ERR_INVALID_ARG_VALUE( + "options.recursive", + options.recursive, + "is no longer supported" + ); + } + + validateRmdirOptions(options); + binding.rmdir(path); +} + +/** + * Asynchronously removes files and + * directories (modeled on the standard POSIX `rm` utility). + * @param {string | Buffer | URL} path + * @param {{ + * force?: boolean; + * maxRetries?: number; + * recursive?: boolean; + * retryDelay?: number; + * }} [options] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function rm(path, options, callback) { + if (typeof options === "function") { + callback = options; + options = undefined; + } + path = getValidatedPath(path); + + validateRmOptions(path, options, false, (err, options) => { + if (err) { + return callback(err); + } + lazyLoadRimraf(); + return rimraf(path, options, callback); + }); +} + +/** + * Synchronously removes files and + * directories (modeled on the standard POSIX `rm` utility). + * @param {string | Buffer | URL} path + * @param {{ + * force?: boolean; + * maxRetries?: number; + * recursive?: boolean; + * retryDelay?: number; + * }} [options] + * @returns {void} + */ +function rmSync(path, options) { + const opts = validateRmOptionsSync(path, options, false); + return binding.rmSync( + getValidatedPath(path), + opts.maxRetries, + opts.recursive, + opts.retryDelay + ); +} + +/** + * Forces all currently queued I/O operations associated + * with the file to the operating system's synchronized + * I/O completion state. + * @param {number} fd + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function fdatasync(fd, callback) { + const req = new FSReqCallback(); + req.oncomplete = makeCallback(callback); + + if (permission.isEnabled()) { + callback( + new ERR_ACCESS_DENIED( + "fdatasync API is disabled when Permission Model is enabled." + ) + ); + return; + } + binding.fdatasync(fd, req); +} + +/** + * Synchronously forces all currently queued I/O operations + * associated with the file to the operating + * system's synchronized I/O completion state. + * @param {number} fd + * @returns {void} + */ +function fdatasyncSync(fd) { + if (permission.isEnabled()) { + throw new ERR_ACCESS_DENIED( + "fdatasync API is disabled when Permission Model is enabled." + ); + } + binding.fdatasync(fd); +} + +/** + * Requests for all data for the open file descriptor + * to be flushed to the storage device. + * @param {number} fd + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function fsync(fd, callback) { + const req = new FSReqCallback(); + req.oncomplete = makeCallback(callback); + if (permission.isEnabled()) { + callback( + new ERR_ACCESS_DENIED( + "fsync API is disabled when Permission Model is enabled." + ) + ); + return; + } + binding.fsync(fd, req); +} + +/** + * Synchronously requests for all data for the open + * file descriptor to be flushed to the storage device. + * @param {number} fd + * @returns {void} + */ +function fsyncSync(fd) { + if (permission.isEnabled()) { + throw new ERR_ACCESS_DENIED( + "fsync API is disabled when Permission Model is enabled." + ); + } + binding.fsync(fd); +} + +/** + * Asynchronously creates a directory. + * @param {string | Buffer | URL} path + * @param {{ + * recursive?: boolean; + * mode?: string | number; + * } | number} [options] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function mkdir(path, options, callback) { + let mode = 0o777; + let recursive = false; + if (typeof options === "function") { + callback = options; + } else if (typeof options === "number" || typeof options === "string") { + mode = parseFileMode(options, "mode"); + } else if (options) { + if (options.recursive !== undefined) { + recursive = options.recursive; + validateBoolean(recursive, "options.recursive"); + } + if (options.mode !== undefined) { + mode = parseFileMode(options.mode, "options.mode"); + } + } + callback = makeCallback(callback); + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.mkdir(getValidatedPath(path), mode, recursive, req); +} + +/** + * Synchronously creates a directory. + * @param {string | Buffer | URL} path + * @param {{ + * recursive?: boolean; + * mode?: string | number; + * } | number} [options] + * @returns {string | void} + */ +function mkdirSync(path, options) { + let mode = 0o777; + let recursive = false; + if (typeof options === "number" || typeof options === "string") { + mode = parseFileMode(options, "mode"); + } else if (options) { + if (options.recursive !== undefined) { + recursive = options.recursive; + validateBoolean(recursive, "options.recursive"); + } + if (options.mode !== undefined) { + mode = parseFileMode(options.mode, "options.mode"); + } + } + + const result = binding.mkdir(getValidatedPath(path), mode, recursive); + + if (recursive) { + return result; + } +} + +/* + * An recursive algorithm for reading the entire contents of the `basePath` directory. + * This function does not validate `basePath` as a directory. It is passed directly to + * `binding.readdir`. + * @param {string} basePath + * @param {{ encoding: string, withFileTypes: boolean }} options + * @param {( + * err?: Error, + * files?: string[] | Buffer[] | Dirent[] + * ) => any} callback + * @returns {void} + */ +function readdirRecursive(basePath, options, callback) { + const context = { + withFileTypes: Boolean(options.withFileTypes), + encoding: options.encoding, + basePath, + readdirResults: [], + pathsQueue: [basePath], + }; + + let i = 0; + + function read(path) { + const req = new FSReqCallback(); + req.oncomplete = (err, result) => { + if (err) { + callback(err); + return; + } + + if (result === undefined) { + callback(null, context.readdirResults); + return; + } + + processReaddirResult({ + result, + currentPath: path, + context, + }); + + if (i < context.pathsQueue.length) { + read(context.pathsQueue[i++]); + } else { + callback(null, context.readdirResults); + } + }; + + binding.readdir(path, context.encoding, context.withFileTypes, req); + } + + read(context.pathsQueue[i++]); +} + +// Calling `readdir` with `withFileTypes=true`, the result is an array of arrays. +// The first array is the names, and the second array is the types. +// They are guaranteed to be the same length; hence, setting `length` to the length +// of the first array within the result. +const processReaddirResult = (args) => + args.context.withFileTypes ? handleDirents(args) : handleFilePaths(args); + +function handleDirents({ result, currentPath, context }) { + const { 0: names, 1: types } = result; + const { length } = names; + + for (let i = 0; i < length; i++) { + // Avoid excluding symlinks, as they are not directories. + // Refs: https://github.com/nodejs/node/issues/52663 + const fullPath = pathModule.join(currentPath, names[i]); + const dirent = getDirent(currentPath, names[i], types[i]); + ArrayPrototypePush(context.readdirResults, dirent); + + if (dirent.isDirectory() || binding.internalModuleStat(fullPath) === 1) { + ArrayPrototypePush(context.pathsQueue, fullPath); + } + } +} + +function handleFilePaths({ result, currentPath, context }) { + for (let i = 0; i < result.length; i++) { + const resultPath = pathModule.join(currentPath, result[i]); + const relativeResultPath = pathModule.relative( + context.basePath, + resultPath + ); + const stat = binding.internalModuleStat(resultPath); + ArrayPrototypePush(context.readdirResults, relativeResultPath); + + if (stat === 1) { + ArrayPrototypePush(context.pathsQueue, resultPath); + } + } +} + +/** + * An iterative algorithm for reading the entire contents of the `basePath` directory. + * This function does not validate `basePath` as a directory. It is passed directly to + * `binding.readdir`. + * @param {string} basePath + * @param {{ encoding: string, withFileTypes: boolean }} options + * @returns {string[] | Dirent[]} + */ +function readdirSyncRecursive(basePath, options) { + const context = { + withFileTypes: Boolean(options.withFileTypes), + encoding: options.encoding, + basePath, + readdirResults: [], + pathsQueue: [basePath], + }; + + function read(path) { + const readdirResult = binding.readdir( + path, + context.encoding, + context.withFileTypes + ); + + if (readdirResult === undefined) { + return; + } + + processReaddirResult({ + result: readdirResult, + currentPath: path, + context, + }); + } + + for (let i = 0; i < context.pathsQueue.length; i++) { + read(context.pathsQueue[i]); + } + + return context.readdirResults; +} + +/** + * Reads the contents of a directory. + * @param {string | Buffer | URL} path + * @param {string | { + * encoding?: string; + * withFileTypes?: boolean; + * recursive?: boolean; + * }} [options] + * @param {( + * err?: Error, + * files?: string[] | Buffer[] | Dirent[] + * ) => any} callback + * @returns {void} + */ +function readdir(path, options, callback) { + callback = makeCallback(typeof options === "function" ? options : callback); + options = getOptions(options); + path = getValidatedPath(path); + if (options.recursive != null) { + validateBoolean(options.recursive, "options.recursive"); + } + + if (options.recursive) { + // Make shallow copy to prevent mutating options from affecting results + options = copyObject(options); + + readdirRecursive(path, options, callback); + return; + } + + const req = new FSReqCallback(); + if (!options.withFileTypes) { + req.oncomplete = callback; + } else { + req.oncomplete = (err, result) => { + if (err) { + callback(err); + return; + } + getDirents(path, result, callback); + }; + } + binding.readdir(path, options.encoding, !!options.withFileTypes, req); +} + +/** + * Synchronously reads the contents of a directory. + * @param {string | Buffer | URL} path + * @param {string | { + * encoding?: string; + * withFileTypes?: boolean; + * recursive?: boolean; + * }} [options] + * @returns {string | Buffer[] | Dirent[]} + */ +function readdirSync(path, options) { + options = getOptions(options); + path = getValidatedPath(path); + if (options.recursive != null) { + validateBoolean(options.recursive, "options.recursive"); + } + + if (options.recursive) { + return readdirSyncRecursive(path, options); + } + + const result = binding.readdir( + path, + options.encoding, + !!options.withFileTypes + ); + + return result !== undefined && options.withFileTypes + ? getDirents(path, result) + : result; +} + +/** + * Invokes the callback with the `fs.Stats` + * for the file descriptor. + * @param {number} fd + * @param {{ bigint?: boolean; }} [options] + * @param {( + * err?: Error, + * stats?: Stats + * ) => any} [callback] + * @returns {void} + */ +function fstat(fd, options = { bigint: false }, callback) { + if (typeof options === "function") { + callback = options; + options = kEmptyObject; + } + callback = makeStatsCallback(callback); + + const req = new FSReqCallback(options.bigint); + req.oncomplete = callback; + binding.fstat(fd, options.bigint, req); +} + +/** + * Retrieves the `fs.Stats` for the symbolic link + * referred to by the `path`. + * @param {string | Buffer | URL} path + * @param {{ bigint?: boolean; }} [options] + * @param {( + * err?: Error, + * stats?: Stats + * ) => any} callback + * @returns {void} + */ +function lstat(path, options = { bigint: false }, callback) { + if (typeof options === "function") { + callback = options; + options = kEmptyObject; + } + callback = makeStatsCallback(callback); + path = getValidatedPath(path); + if (permission.isEnabled() && !permission.has("fs.read", path)) { + const resource = BufferIsBuffer(path) ? BufferToString(path) : path; + callback( + new ERR_ACCESS_DENIED( + "Access to this API has been restricted", + "FileSystemRead", + resource + ) + ); + return; + } + + const req = new FSReqCallback(options.bigint); + req.oncomplete = callback; + binding.lstat(path, options.bigint, req); +} + +/** + * Asynchronously gets the stats of a file. + * @param {string | Buffer | URL} path + * @param {{ bigint?: boolean; }} [options] + * @param {( + * err?: Error, + * stats?: Stats + * ) => any} callback + * @returns {void} + */ +function stat(path, options = { bigint: false }, callback) { + if (typeof options === "function") { + callback = options; + options = kEmptyObject; + } + callback = makeStatsCallback(callback); + + const req = new FSReqCallback(options.bigint); + req.oncomplete = callback; + binding.stat(getValidatedPath(path), options.bigint, req); +} + +function statfs(path, options = { bigint: false }, callback) { + if (typeof options === "function") { + callback = options; + options = kEmptyObject; + } + validateFunction(callback, "cb"); + path = getValidatedPath(path); + const req = new FSReqCallback(options.bigint); + req.oncomplete = (err, stats) => { + if (err) { + return callback(err); + } + + callback(err, getStatFsFromBinding(stats)); + }; + binding.statfs(getValidatedPath(path), options.bigint, req); +} + +/** + * Synchronously retrieves the `fs.Stats` for + * the file descriptor. + * @param {number} fd + * @param {{ bigint?: boolean; }} [options] + * @returns {Stats | undefined} + */ +function fstatSync(fd, options = { bigint: false }) { + const stats = binding.fstat(fd, options.bigint, undefined, false); + if (stats === undefined) { + return; + } + return getStatsFromBinding(stats); +} + +/** + * Synchronously retrieves the `fs.Stats` for + * the symbolic link referred to by the `path`. + * @param {string | Buffer | URL} path + * @param {{ + * bigint?: boolean; + * throwIfNoEntry?: boolean; + * }} [options] + * @returns {Stats | undefined} + */ +function lstatSync(path, options = { bigint: false, throwIfNoEntry: true }) { + path = getValidatedPath(path); + if (permission.isEnabled() && !permission.has("fs.read", path)) { + const resource = BufferIsBuffer(path) ? BufferToString(path) : path; + throw new ERR_ACCESS_DENIED( + "Access to this API has been restricted", + "FileSystemRead", + resource + ); + } + const stats = binding.lstat( + getValidatedPath(path), + options.bigint, + undefined, + options.throwIfNoEntry + ); + + if (stats === undefined) { + return; + } + return getStatsFromBinding(stats); +} + +/** + * Synchronously retrieves the `fs.Stats` + * for the `path`. + * @param {string | Buffer | URL} path + * @param {{ + * bigint?: boolean; + * throwIfNoEntry?: boolean; + * }} [options] + * @returns {Stats} + */ +function statSync(path, options = { bigint: false, throwIfNoEntry: true }) { + const stats = binding.stat( + getValidatedPath(path), + options.bigint, + undefined, + options.throwIfNoEntry + ); + if (stats === undefined) { + return undefined; + } + return getStatsFromBinding(stats); +} + +function statfsSync(path, options = { bigint: false }) { + const stats = binding.statfs(getValidatedPath(path), options.bigint); + return getStatFsFromBinding(stats); +} + +/** + * Reads the contents of a symbolic link + * referred to by `path`. + * @param {string | Buffer | URL} path + * @param {{ encoding?: string; } | string} [options] + * @param {( + * err?: Error, + * linkString?: string | Buffer + * ) => any} callback + * @returns {void} + */ +function readlink(path, options, callback) { + callback = makeCallback(typeof options === "function" ? options : callback); + options = getOptions(options); + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.readlink(getValidatedPath(path), options.encoding, req); +} + +/** + * Synchronously reads the contents of a symbolic link + * referred to by `path`. + * @param {string | Buffer | URL} path + * @param {{ encoding?: string; } | string} [options] + * @returns {string | Buffer} + */ +function readlinkSync(path, options) { + options = getOptions(options); + return binding.readlink(getValidatedPath(path), options.encoding); +} + +/** + * Creates the link called `path` pointing to `target`. + * @param {string | Buffer | URL} target + * @param {string | Buffer | URL} path + * @param {string | null} [type] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function symlink(target, path, type, callback) { + if (callback === undefined) { + callback = makeCallback(type); + type = undefined; + } else { + validateOneOf(type, "type", ["dir", "file", "junction", null, undefined]); + } + + // Due to the nature of Node.js runtime, symlinks has different edge cases that can bypass + // the permission model security guarantees. Thus, this API is disabled unless fs.read + // and fs.write permission has been given. + if (permission.isEnabled() && !permission.has("fs")) { + callback( + new ERR_ACCESS_DENIED( + "fs.symlink API requires full fs.read and fs.write permissions." + ) + ); + return; + } + + target = getValidatedPath(target, "target"); + path = getValidatedPath(path); + + if (isWindows && type == null) { + let absoluteTarget; + try { + // Symlinks targets can be relative to the newly created path. + // Calculate absolute file name of the symlink target, and check + // if it is a directory. Ignore resolve error to keep symlink + // errors consistent between platforms if invalid path is + // provided. + absoluteTarget = pathModule.resolve(path, "..", target); + } catch { + // Continue regardless of error. + } + if (absoluteTarget !== undefined) { + stat(absoluteTarget, (err, stat) => { + const resolvedType = !err && stat.isDirectory() ? "dir" : "file"; + const resolvedFlags = stringToSymlinkType(resolvedType); + const destination = preprocessSymlinkDestination( + target, + resolvedType, + path + ); + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.symlink(destination, path, resolvedFlags, req); + }); + return; + } + } + + const destination = preprocessSymlinkDestination(target, type, path); + + const flags = stringToSymlinkType(type); + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.symlink(destination, path, flags, req); +} + +/** + * Synchronously creates the link called `path` + * pointing to `target`. + * @param {string | Buffer | URL} target + * @param {string | Buffer | URL} path + * @param {string | null} [type] + * @returns {void} + */ +function symlinkSync(target, path, type) { + validateOneOf(type, "type", ["dir", "file", "junction", null, undefined]); + if (isWindows && type == null) { + const absoluteTarget = pathModule.resolve(`${path}`, "..", `${target}`); + if (statSync(absoluteTarget, { throwIfNoEntry: false })?.isDirectory()) { + type = "dir"; + } + } + + // Due to the nature of Node.js runtime, symlinks has different edge cases that can bypass + // the permission model security guarantees. Thus, this API is disabled unless fs.read + // and fs.write permission has been given. + if (permission.isEnabled() && !permission.has("fs")) { + throw new ERR_ACCESS_DENIED( + "fs.symlink API requires full fs.read and fs.write permissions." + ); + } + + target = getValidatedPath(target, "target"); + path = getValidatedPath(path); + + binding.symlink( + preprocessSymlinkDestination(target, type, path), + path, + stringToSymlinkType(type) + ); +} + +/** + * Creates a new link from the `existingPath` + * to the `newPath`. + * @param {string | Buffer | URL} existingPath + * @param {string | Buffer | URL} newPath + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function link(existingPath, newPath, callback) { + callback = makeCallback(callback); + + existingPath = getValidatedPath(existingPath, "existingPath"); + newPath = getValidatedPath(newPath, "newPath"); + + const req = new FSReqCallback(); + req.oncomplete = callback; + + binding.link(existingPath, newPath, req); +} + +/** + * Synchronously creates a new link from the `existingPath` + * to the `newPath`. + * @param {string | Buffer | URL} existingPath + * @param {string | Buffer | URL} newPath + * @returns {void} + */ +function linkSync(existingPath, newPath) { + existingPath = getValidatedPath(existingPath, "existingPath"); + newPath = getValidatedPath(newPath, "newPath"); + + binding.link(existingPath, newPath); +} + +/** + * Asynchronously removes a file or symbolic link. + * @param {string | Buffer | URL} path + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function unlink(path, callback) { + callback = makeCallback(callback); + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.unlink(getValidatedPath(path), req); +} + +/** + * Synchronously removes a file or symbolic link. + * @param {string | Buffer | URL} path + * @returns {void} + */ +function unlinkSync(path) { + binding.unlink(getValidatedPath(path)); +} + +/** + * Sets the permissions on the file. + * @param {number} fd + * @param {string | number} mode + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function fchmod(fd, mode, callback) { + mode = parseFileMode(mode, "mode"); + callback = makeCallback(callback); + + if (permission.isEnabled()) { + callback( + new ERR_ACCESS_DENIED( + "fchmod API is disabled when Permission Model is enabled." + ) + ); + return; + } + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.fchmod(fd, mode, req); +} + +/** + * Synchronously sets the permissions on the file. + * @param {number} fd + * @param {string | number} mode + * @returns {void} + */ +function fchmodSync(fd, mode) { + if (permission.isEnabled()) { + throw new ERR_ACCESS_DENIED( + "fchmod API is disabled when Permission Model is enabled." + ); + } + binding.fchmod(fd, parseFileMode(mode, "mode")); +} + +/** + * Changes the permissions on a symbolic link. + * @param {string | Buffer | URL} path + * @param {number} mode + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function lchmod(path, mode, callback) { + validateFunction(callback, "cb"); + mode = parseFileMode(mode, "mode"); + fs.open(path, O_WRONLY | O_SYMLINK, (err, fd) => { + if (err) { + callback(err); + return; + } + // Prefer to return the chmod error, if one occurs, + // but still try to close, and report closing errors if they occur. + fs.fchmod(fd, mode, (err) => { + fs.close(fd, (err2) => { + callback(aggregateTwoErrors(err2, err)); + }); + }); + }); +} + +/** + * Synchronously changes the permissions on a symbolic link. + * @param {string | Buffer | URL} path + * @param {number} mode + * @returns {void} + */ +function lchmodSync(path, mode) { + const fd = fs.openSync(path, O_WRONLY | O_SYMLINK); + + // Prefer to return the chmod error, if one occurs, + // but still try to close, and report closing errors if they occur. + try { + fs.fchmodSync(fd, mode); + } finally { + fs.closeSync(fd); + } +} + +/** + * Asynchronously changes the permissions of a file. + * @param {string | Buffer | URL} path + * @param {string | number} mode + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function chmod(path, mode, callback) { + path = getValidatedPath(path); + mode = parseFileMode(mode, "mode"); + callback = makeCallback(callback); + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.chmod(path, mode, req); +} + +/** + * Synchronously changes the permissions of a file. + * @param {string | Buffer | URL} path + * @param {string | number} mode + * @returns {void} + */ +function chmodSync(path, mode) { + path = getValidatedPath(path); + mode = parseFileMode(mode, "mode"); + + binding.chmod(path, mode); +} + +/** + * Sets the owner of the symbolic link. + * @param {string | Buffer | URL} path + * @param {number} uid + * @param {number} gid + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function lchown(path, uid, gid, callback) { + callback = makeCallback(callback); + path = getValidatedPath(path); + validateInteger(uid, "uid", -1, kMaxUserId); + validateInteger(gid, "gid", -1, kMaxUserId); + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.lchown(path, uid, gid, req); +} + +/** + * Synchronously sets the owner of the symbolic link. + * @param {string | Buffer | URL} path + * @param {number} uid + * @param {number} gid + * @returns {void} + */ +function lchownSync(path, uid, gid) { + path = getValidatedPath(path); + validateInteger(uid, "uid", -1, kMaxUserId); + validateInteger(gid, "gid", -1, kMaxUserId); + binding.lchown(path, uid, gid); +} + +/** + * Sets the owner of the file. + * @param {number} fd + * @param {number} uid + * @param {number} gid + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function fchown(fd, uid, gid, callback) { + validateInteger(uid, "uid", -1, kMaxUserId); + validateInteger(gid, "gid", -1, kMaxUserId); + callback = makeCallback(callback); + if (permission.isEnabled()) { + callback( + new ERR_ACCESS_DENIED( + "fchown API is disabled when Permission Model is enabled." + ) + ); + return; + } + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.fchown(fd, uid, gid, req); +} + +/** + * Synchronously sets the owner of the file. + * @param {number} fd + * @param {number} uid + * @param {number} gid + * @returns {void} + */ +function fchownSync(fd, uid, gid) { + validateInteger(uid, "uid", -1, kMaxUserId); + validateInteger(gid, "gid", -1, kMaxUserId); + if (permission.isEnabled()) { + throw new ERR_ACCESS_DENIED( + "fchown API is disabled when Permission Model is enabled." + ); + } + + binding.fchown(fd, uid, gid); +} + +/** + * Asynchronously changes the owner and group + * of a file. + * @param {string | Buffer | URL} path + * @param {number} uid + * @param {number} gid + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function chown(path, uid, gid, callback) { + callback = makeCallback(callback); + path = getValidatedPath(path); + validateInteger(uid, "uid", -1, kMaxUserId); + validateInteger(gid, "gid", -1, kMaxUserId); + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.chown(path, uid, gid, req); +} + +/** + * Synchronously changes the owner and group + * of a file. + * @param {string | Buffer | URL} path + * @param {number} uid + * @param {number} gid + * @returns {void} + */ +function chownSync(path, uid, gid) { + path = getValidatedPath(path); + validateInteger(uid, "uid", -1, kMaxUserId); + validateInteger(gid, "gid", -1, kMaxUserId); + binding.chown(path, uid, gid); +} + +/** + * Changes the file system timestamps of the object + * referenced by `path`. + * @param {string | Buffer | URL} path + * @param {number | string | Date} atime + * @param {number | string | Date} mtime + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function utimes(path, atime, mtime, callback) { + callback = makeCallback(callback); + path = getValidatedPath(path); + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.utimes(path, toUnixTimestamp(atime), toUnixTimestamp(mtime), req); +} + +/** + * Synchronously changes the file system timestamps + * of the object referenced by `path`. + * @param {string | Buffer | URL} path + * @param {number | string | Date} atime + * @param {number | string | Date} mtime + * @returns {void} + */ +function utimesSync(path, atime, mtime) { + binding.utimes( + getValidatedPath(path), + toUnixTimestamp(atime), + toUnixTimestamp(mtime) + ); +} + +/** + * Changes the file system timestamps of the object + * referenced by the supplied `fd` (file descriptor). + * @param {number} fd + * @param {number | string | Date} atime + * @param {number | string | Date} mtime + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function futimes(fd, atime, mtime, callback) { + atime = toUnixTimestamp(atime, "atime"); + mtime = toUnixTimestamp(mtime, "mtime"); + callback = makeCallback(callback); + + if (permission.isEnabled()) { + callback( + new ERR_ACCESS_DENIED( + "futimes API is disabled when Permission Model is enabled." + ) + ); + return; + } + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.futimes(fd, atime, mtime, req); +} + +/** + * Synchronously changes the file system timestamps + * of the object referenced by the + * supplied `fd` (file descriptor). + * @param {number} fd + * @param {number | string | Date} atime + * @param {number | string | Date} mtime + * @returns {void} + */ +function futimesSync(fd, atime, mtime) { + if (permission.isEnabled()) { + throw new ERR_ACCESS_DENIED( + "futimes API is disabled when Permission Model is enabled." + ); + } + + binding.futimes( + fd, + toUnixTimestamp(atime, "atime"), + toUnixTimestamp(mtime, "mtime") + ); +} + +/** + * Changes the access and modification times of + * a file in the same way as `fs.utimes()`. + * @param {string | Buffer | URL} path + * @param {number | string | Date} atime + * @param {number | string | Date} mtime + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function lutimes(path, atime, mtime, callback) { + callback = makeCallback(callback); + path = getValidatedPath(path); + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.lutimes(path, toUnixTimestamp(atime), toUnixTimestamp(mtime), req); +} + +/** + * Synchronously changes the access and modification + * times of a file in the same way as `fs.utimesSync()`. + * @param {string | Buffer | URL} path + * @param {number | string | Date} atime + * @param {number | string | Date} mtime + * @returns {void} + */ +function lutimesSync(path, atime, mtime) { + binding.lutimes( + getValidatedPath(path), + toUnixTimestamp(atime), + toUnixTimestamp(mtime) + ); +} + +function writeAll( + fd, + isUserFd, + buffer, + offset, + length, + signal, + flush, + callback +) { + if (signal?.aborted) { + const abortError = new AbortError(undefined, { cause: signal.reason }); + if (isUserFd) { + callback(abortError); + } else { + fs.close(fd, (err) => { + callback(aggregateTwoErrors(err, abortError)); + }); + } + return; + } + // write(fd, buffer, offset, length, position, callback) + fs.write(fd, buffer, offset, length, null, (writeErr, written) => { + if (writeErr) { + if (isUserFd) { + callback(writeErr); + } else { + fs.close(fd, (err) => { + callback(aggregateTwoErrors(err, writeErr)); + }); + } + } else if (written === length) { + if (!flush) { + if (isUserFd) { + callback(null); + } else { + fs.close(fd, callback); + } + } else { + fs.fsync(fd, (syncErr) => { + if (syncErr) { + if (isUserFd) { + callback(syncErr); + } else { + fs.close(fd, (err) => { + callback(aggregateTwoErrors(err, syncErr)); + }); + } + } else if (isUserFd) { + callback(null); + } else { + fs.close(fd, callback); + } + }); + } + } else { + offset += written; + length -= written; + writeAll(fd, isUserFd, buffer, offset, length, signal, flush, callback); + } + }); +} + +/** + * Asynchronously writes data to the file. + * @param {string | Buffer | URL | number} path + * @param {string | Buffer | TypedArray | DataView} data + * @param {{ + * encoding?: string | null; + * mode?: number; + * flag?: string; + * signal?: AbortSignal; + * flush?: boolean; + * } | string} [options] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function writeFile(path, data, options, callback) { + callback ||= options; + validateFunction(callback, "cb"); + options = getOptions(options, { + encoding: "utf8", + mode: 0o666, + flag: "w", + flush: false, + }); + const flag = options.flag || "w"; + const flush = options.flush ?? false; + + validateBoolean(flush, "options.flush"); + + if (!isArrayBufferView(data)) { + validateStringAfterArrayBufferView(data, "data"); + data = Buffer.from(data, options.encoding || "utf8"); + } + + if (isFd(path)) { + const isUserFd = true; + const signal = options.signal; + writeAll(path, isUserFd, data, 0, data.byteLength, signal, flush, callback); + return; + } + + if (checkAborted(options.signal, callback)) return; + + fs.open(path, flag, options.mode, (openErr, fd) => { + if (openErr) { + callback(openErr); + } else { + const isUserFd = false; + const signal = options.signal; + writeAll(fd, isUserFd, data, 0, data.byteLength, signal, flush, callback); + } + }); +} + +/** + * Synchronously writes data to the file. + * @param {string | Buffer | URL | number} path + * @param {string | Buffer | TypedArray | DataView} data + * @param {{ + * encoding?: string | null; + * mode?: number; + * flag?: string; + * flush?: boolean; + * } | string} [options] + * @returns {void} + */ +function writeFileSync(path, data, options) { + options = getOptions(options, { + encoding: "utf8", + mode: 0o666, + flag: "w", + flush: false, + }); + + const flush = options.flush ?? false; + + validateBoolean(flush, "options.flush"); + + const flag = options.flag || "w"; + + // C++ fast path for string data and UTF8 encoding + if ( + typeof data === "string" && + (options.encoding === "utf8" || options.encoding === "utf-8") + ) { + if (!isInt32(path)) { + path = getValidatedPath(path); + } + + return binding.writeFileUtf8( + path, + data, + stringToFlags(flag), + parseFileMode(options.mode, "mode", 0o666) + ); + } + + if (!isArrayBufferView(data)) { + validateStringAfterArrayBufferView(data, "data"); + data = Buffer.from(data, options.encoding || "utf8"); + } + + const isUserFd = isFd(path); // File descriptor ownership + const fd = isUserFd ? path : fs.openSync(path, flag, options.mode); + + let offset = 0; + let length = data.byteLength; + try { + while (length > 0) { + const written = fs.writeSync(fd, data, offset, length); + offset += written; + length -= written; + } + + if (flush) { + fs.fsyncSync(fd); + } + } finally { + if (!isUserFd) fs.closeSync(fd); + } +} + +/** + * Asynchronously appends data to a file. + * @param {string | Buffer | URL | number} path + * @param {string | Buffer} data + * @param {{ + * encoding?: string | null; + * mode?: number; + * flag?: string; + * flush?: boolean; + * } | string} [options] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function appendFile(path, data, options, callback) { + callback ||= options; + validateFunction(callback, "cb"); + options = getOptions(options, { encoding: "utf8", mode: 0o666, flag: "a" }); + + // Don't make changes directly on options object + options = copyObject(options); + + // Force append behavior when using a supplied file descriptor + if (!options.flag || isFd(path)) options.flag = "a"; + + fs.writeFile(path, data, options, callback); +} + +/** + * Synchronously appends data to a file. + * @param {string | Buffer | URL | number} path + * @param {string | Buffer} data + * @param {{ + * encoding?: string | null; + * mode?: number; + * flag?: string; + * } | string} [options] + * @returns {void} + */ +function appendFileSync(path, data, options) { + options = getOptions(options, { encoding: "utf8", mode: 0o666, flag: "a" }); + + // Don't make changes directly on options object + options = copyObject(options); + + // Force append behavior when using a supplied file descriptor + if (!options.flag || isFd(path)) options.flag = "a"; + + fs.writeFileSync(path, data, options); +} + +/** + * Watches for the changes on `filename`. + * @param {string | Buffer | URL} filename + * @param {string | { + * persistent?: boolean; + * recursive?: boolean; + * encoding?: string; + * signal?: AbortSignal; + * }} [options] + * @param {( + * eventType?: string, + * filename?: string | Buffer + * ) => any} [listener] + * @returns {watchers.FSWatcher} + */ +function watch(filename, options, listener) { + if (typeof options === "function") { + listener = options; + } + options = getOptions(options); + + // Don't make changes directly on options object + options = copyObject(options); + + if (options.persistent === undefined) options.persistent = true; + if (options.recursive === undefined) options.recursive = false; + + let watcher; + const watchers = require("internal/fs/watchers"); + const path = getValidatedPath(filename); + // TODO(anonrig): Remove non-native watcher when/if libuv supports recursive. + // As of November 2022, libuv does not support recursive file watch on all platforms, + // e.g. Linux due to the limitations of inotify. + if (options.recursive && !isMacOS && !isWindows) { + const nonNativeWatcher = require("internal/fs/recursive_watch"); + watcher = new nonNativeWatcher.FSWatcher(options); + watcher[watchers.kFSWatchStart](path); + } else { + watcher = new watchers.FSWatcher(); + watcher[watchers.kFSWatchStart]( + path, + options.persistent, + options.recursive, + options.encoding + ); + } + + if (listener) { + watcher.addListener("change", listener); + } + if (options.signal) { + if (options.signal.aborted) { + process.nextTick(() => watcher.close()); + } else { + const listener = () => watcher.close(); + kResistStopPropagation ??= + require("internal/event_target").kResistStopPropagation; + options.signal.addEventListener("abort", listener, { + __proto__: null, + [kResistStopPropagation]: true, + }); + watcher.once("close", () => { + options.signal.removeEventListener("abort", listener); + }); + } + } + + return watcher; +} + +const statWatchers = new SafeMap(); + +/** + * Watches for changes on `filename`. + * @param {string | Buffer | URL} filename + * @param {{ + * bigint?: boolean; + * persistent?: boolean; + * interval?: number; + * }} [options] + * @param {( + * current?: Stats, + * previous?: Stats + * ) => any} listener + * @returns {watchers.StatWatcher} + */ +function watchFile(filename, options, listener) { + filename = getValidatedPath(filename); + filename = pathModule.resolve(filename); + let stat; + + if (options === null || typeof options !== "object") { + listener = options; + options = null; + } + + options = { + // Poll interval in milliseconds. 5007 is what libev used to use. It's + // a little on the slow side but let's stick with it for now to keep + // behavioral changes to a minimum. + interval: 5007, + persistent: true, + ...options, + }; + + validateFunction(listener, "listener"); + + stat = statWatchers.get(filename); + const watchers = require("internal/fs/watchers"); + if (stat === undefined) { + stat = new watchers.StatWatcher(options.bigint); + stat[watchers.kFSStatWatcherStart]( + filename, + options.persistent, + options.interval + ); + statWatchers.set(filename, stat); + } else { + stat[watchers.kFSStatWatcherAddOrCleanRef]("add"); + } + + stat.addListener("change", listener); + return stat; +} + +/** + * Stops watching for changes on `filename`. + * @param {string | Buffer | URL} filename + * @param {() => any} [listener] + * @returns {void} + */ +function unwatchFile(filename, listener) { + filename = getValidatedPath(filename); + filename = pathModule.resolve(filename); + const stat = statWatchers.get(filename); + + if (stat === undefined) return; + const watchers = require("internal/fs/watchers"); + if (typeof listener === "function") { + const beforeListenerCount = stat.listenerCount("change"); + stat.removeListener("change", listener); + if (stat.listenerCount("change") < beforeListenerCount) + stat[watchers.kFSStatWatcherAddOrCleanRef]("clean"); + } else { + stat.removeAllListeners("change"); + stat[watchers.kFSStatWatcherAddOrCleanRef]("cleanAll"); + } + + if (stat.listenerCount("change") === 0) { + stat.stop(); + statWatchers.delete(filename); + } +} + +let splitRoot; +if (isWindows) { + // Regex to find the device root on Windows (e.g. 'c:\\'), including trailing + // slash. + const splitRootRe = /^(?:[a-zA-Z]:|[\\/]{2}[^\\/]+[\\/][^\\/]+)?[\\/]*/; + splitRoot = function splitRoot(str) { + return SideEffectFreeRegExpPrototypeExec(splitRootRe, str)[0]; + }; +} else { + splitRoot = function splitRoot(str) { + for (let i = 0; i < str.length; ++i) { + if (StringPrototypeCharCodeAt(str, i) !== CHAR_FORWARD_SLASH) + return StringPrototypeSlice(str, 0, i); + } + return str; + }; +} + +function encodeRealpathResult(result, options) { + if (!options || !options.encoding || options.encoding === "utf8") + return result; + const asBuffer = Buffer.from(result); + if (options.encoding === "buffer") { + return asBuffer; + } + return asBuffer.toString(options.encoding); +} + +// Finds the next portion of a (partial) path, up to the next path delimiter +let nextPart; +if (isWindows) { + nextPart = function nextPart(p, i) { + for (; i < p.length; ++i) { + const ch = StringPrototypeCharCodeAt(p, i); + + // Check for a separator character + if (ch === CHAR_BACKWARD_SLASH || ch === CHAR_FORWARD_SLASH) return i; + } + return -1; + }; +} else { + nextPart = function nextPart(p, i) { + return StringPrototypeIndexOf(p, "/", i); + }; +} + +/** + * Returns the resolved pathname. + * @param {string | Buffer | URL} p + * @param {string | { encoding?: string | null; }} [options] + * @returns {string | Buffer} + */ +function realpathSync(p, options) { + options = getOptions(options); + p = toPathIfFileURL(p); + if (typeof p !== "string") { + p += ""; + } + validatePath(p); + p = pathModule.resolve(p); + + const cache = options[realpathCacheKey]; + const maybeCachedResult = cache?.get(p); + if (maybeCachedResult) { + return maybeCachedResult; + } + + const seenLinks = new SafeMap(); + const knownHard = new SafeSet(); + const original = p; + + // Current character position in p + let pos; + // The partial path so far, including a trailing slash if any + let current; + // The partial path without a trailing slash (except when pointing at a root) + let base; + // The partial path scanned in the previous round, with slash + let previous; + + // Skip over roots + current = base = splitRoot(p); + pos = current.length; + + // On windows, check that the root exists. On unix there is no need. + if (isWindows) { + const out = binding.lstat( + base, + false, + undefined, + true /* throwIfNoEntry */ + ); + if (out === undefined) { + return; + } + knownHard.add(base); + } + + // Walk down the path, swapping out linked path parts for their real + // values + // NB: p.length changes. + while (pos < p.length) { + // find the next part + const result = nextPart(p, pos); + previous = current; + if (result === -1) { + const last = StringPrototypeSlice(p, pos); + current += last; + base = previous + last; + pos = p.length; + } else { + current += StringPrototypeSlice(p, pos, result + 1); + base = previous + StringPrototypeSlice(p, pos, result); + pos = result + 1; + } + + // Continue if not a symlink, break if a pipe/socket + if (knownHard.has(base) || cache?.get(base) === base) { + if (isFileType(statValues, S_IFIFO) || isFileType(statValues, S_IFSOCK)) { + break; + } + continue; + } + + let resolvedLink; + const maybeCachedResolved = cache?.get(base); + if (maybeCachedResolved) { + resolvedLink = maybeCachedResolved; + } else { + // Use stats array directly to avoid creating an fs.Stats instance just + // for our internal use. + + const stats = binding.lstat( + base, + true, + undefined, + true /* throwIfNoEntry */ + ); + if (stats === undefined) { + return; + } + + if (!isFileType(stats, S_IFLNK)) { + knownHard.add(base); + cache?.set(base, base); + continue; + } + + // Read the link if it wasn't read before + // dev/ino always return 0 on windows, so skip the check. + let linkTarget = null; + let id; + if (!isWindows) { + const dev = BigIntPrototypeToString(stats[0], 32); + const ino = BigIntPrototypeToString(stats[7], 32); + id = `${dev}:${ino}`; + if (seenLinks.has(id)) { + linkTarget = seenLinks.get(id); + } + } + if (linkTarget === null) { + binding.stat(base, false, undefined, true); + linkTarget = binding.readlink(base, undefined); + } + resolvedLink = pathModule.resolve(previous, linkTarget); + + cache?.set(base, resolvedLink); + if (!isWindows) seenLinks.set(id, linkTarget); + } + + // Resolve the link, then start over + p = pathModule.resolve(resolvedLink, StringPrototypeSlice(p, pos)); + + // Skip over roots + current = base = splitRoot(p); + pos = current.length; + + // On windows, check that the root exists. On unix there is no need. + if (isWindows && !knownHard.has(base)) { + const out = binding.lstat( + base, + false, + undefined, + true /* throwIfNoEntry */ + ); + if (out === undefined) { + return; + } + knownHard.add(base); + } + } + + cache?.set(original, p); + return encodeRealpathResult(p, options); +} + +/** + * Returns the resolved pathname. + * @param {string | Buffer | URL} path + * @param {string | { encoding?: string; }} [options] + * @returns {string | Buffer} + */ +realpathSync.native = (path, options) => { + options = getOptions(options); + return binding.realpath(getValidatedPath(path), options.encoding); +}; + +/** + * Asynchronously computes the canonical pathname by + * resolving `.`, `..` and symbolic links. + * @param {string | Buffer | URL} p + * @param {string | { encoding?: string; }} [options] + * @param {( + * err?: Error, + * resolvedPath?: string | Buffer + * ) => any} callback + * @returns {void} + */ +function realpath(p, options, callback) { + if (typeof options === "function") { + callback = options; + } else { + validateFunction(callback, "cb"); + } + options = getOptions(options); + p = toPathIfFileURL(p); + + if (typeof p !== "string") { + p += ""; + } + validatePath(p); + p = pathModule.resolve(p); + + const seenLinks = new SafeMap(); + const knownHard = new SafeSet(); + + // Current character position in p + let pos; + // The partial path so far, including a trailing slash if any + let current; + // The partial path without a trailing slash (except when pointing at a root) + let base; + // The partial path scanned in the previous round, with slash + let previous; + + current = base = splitRoot(p); + pos = current.length; + + // On windows, check that the root exists. On unix there is no need. + if (isWindows && !knownHard.has(base)) { + fs.lstat(base, (err) => { + if (err) return callback(err); + knownHard.add(base); + LOOP(); + }); + } else { + process.nextTick(LOOP); + } + + // Walk down the path, swapping out linked path parts for their real + // values + function LOOP() { + // Stop if scanned past end of path + if (pos >= p.length) { + return callback(null, encodeRealpathResult(p, options)); + } + + // find the next part + const result = nextPart(p, pos); + previous = current; + if (result === -1) { + const last = StringPrototypeSlice(p, pos); + current += last; + base = previous + last; + pos = p.length; + } else { + current += StringPrototypeSlice(p, pos, result + 1); + base = previous + StringPrototypeSlice(p, pos, result); + pos = result + 1; + } + + // Continue if not a symlink, break if a pipe/socket + if (knownHard.has(base)) { + if (isFileType(statValues, S_IFIFO) || isFileType(statValues, S_IFSOCK)) { + return callback(null, encodeRealpathResult(p, options)); + } + return process.nextTick(LOOP); + } + + return fs.lstat(base, { bigint: true }, gotStat); + } + + function gotStat(err, stats) { + if (err) return callback(err); + + // If not a symlink, skip to the next path part + if (!stats.isSymbolicLink()) { + knownHard.add(base); + return process.nextTick(LOOP); + } + + // Stat & read the link if not read before. + // Call `gotTarget()` as soon as the link target is known. + // `dev`/`ino` always return 0 on windows, so skip the check. + let id; + if (!isWindows) { + const dev = BigIntPrototypeToString(stats.dev, 32); + const ino = BigIntPrototypeToString(stats.ino, 32); + id = `${dev}:${ino}`; + if (seenLinks.has(id)) { + return gotTarget(null, seenLinks.get(id)); + } + } + fs.stat(base, (err) => { + if (err) return callback(err); + + fs.readlink(base, (err, target) => { + if (!isWindows) seenLinks.set(id, target); + gotTarget(err, target); + }); + }); + } + + function gotTarget(err, target) { + if (err) return callback(err); + + gotResolvedLink(pathModule.resolve(previous, target)); + } + + function gotResolvedLink(resolvedLink) { + // Resolve the link, then start over + p = pathModule.resolve(resolvedLink, StringPrototypeSlice(p, pos)); + current = base = splitRoot(p); + pos = current.length; + + // On windows, check that the root exists. On unix there is no need. + if (isWindows && !knownHard.has(base)) { + fs.lstat(base, (err) => { + if (err) return callback(err); + knownHard.add(base); + LOOP(); + }); + } else { + process.nextTick(LOOP); + } + } +} + +/** + * Asynchronously computes the canonical pathname by + * resolving `.`, `..` and symbolic links. + * @param {string | Buffer | URL} path + * @param {string | { encoding?: string; }} [options] + * @param {( + * err?: Error, + * resolvedPath?: string | Buffer + * ) => any} callback + * @returns {void} + */ +realpath.native = (path, options, callback) => { + callback = makeCallback(callback || options); + options = getOptions(options); + path = getValidatedPath(path); + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.realpath(path, options.encoding, req); +}; + +/** + * Creates a unique temporary directory. + * @param {string | Buffer | URL} prefix + * @param {string | { encoding?: string; }} [options] + * @param {( + * err?: Error, + * directory?: string + * ) => any} callback + * @returns {void} + */ +function mkdtemp(prefix, options, callback) { + callback = makeCallback(typeof options === "function" ? options : callback); + options = getOptions(options); + + prefix = getValidatedPath(prefix, "prefix"); + warnOnNonPortableTemplate(prefix); + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.mkdtemp(prefix, options.encoding, req); +} + +/** + * Synchronously creates a unique temporary directory. + * @param {string | Buffer | URL} prefix + * @param {string | { encoding?: string; }} [options] + * @returns {string} + */ +function mkdtempSync(prefix, options) { + options = getOptions(options); + + prefix = getValidatedPath(prefix, "prefix"); + warnOnNonPortableTemplate(prefix); + return binding.mkdtemp(prefix, options.encoding); +} + +/** + * Synchronously creates a unique temporary directory. + * The returned value is a disposable object which removes the + * directory and its contents when disposed. + * @param {string | Buffer | URL} prefix + * @param {string | { encoding?: string; }} [options] + * @returns {object} A disposable object with a "path" property. + */ +function mkdtempDisposableSync(prefix, options) { + options = getOptions(options); + + prefix = getValidatedPath(prefix, "prefix"); + warnOnNonPortableTemplate(prefix); + + const path = binding.mkdtemp(prefix, options.encoding); + // Stash the full path in case of process.chdir() + const fullPath = pathModule.resolve(process.cwd(), path); + + const remove = () => { + binding.rmSync( + fullPath, + 0 /* maxRetries */, + true /* recursive */, + 100 /* retryDelay */ + ); + }; + return { + path, + remove, + [SymbolDispose]() { + remove(); + }, + }; +} + +/** + * Asynchronously copies `src` to `dest`. By + * default, `dest` is overwritten if it already exists. + * @param {string | Buffer | URL} src + * @param {string | Buffer | URL} dest + * @param {number} [mode] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function copyFile(src, dest, mode, callback) { + if (typeof mode === "function") { + callback = mode; + mode = 0; + } + + src = getValidatedPath(src, "src"); + dest = getValidatedPath(dest, "dest"); + callback = makeCallback(callback); + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.copyFile(src, dest, mode, req); +} + +/** + * Synchronously copies `src` to `dest`. By + * default, `dest` is overwritten if it already exists. + * @param {string | Buffer | URL} src + * @param {string | Buffer | URL} dest + * @param {number} [mode] + * @returns {void} + */ +function copyFileSync(src, dest, mode) { + binding.copyFile( + getValidatedPath(src, "src"), + getValidatedPath(dest, "dest"), + mode + ); +} + +/** + * Asynchronously copies `src` to `dest`. `src` can be a file, directory, or + * symlink. The contents of directories will be copied recursively. + * @param {string | URL} src + * @param {string | URL} dest + * @param {object} [options] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function cp(src, dest, options, callback) { + if (typeof options === "function") { + callback = options; + options = undefined; + } + callback = makeCallback(callback); + options = validateCpOptions(options); + src = getValidatedPath(src, "src"); + dest = getValidatedPath(dest, "dest"); + lazyLoadCp(); + cpFn(src, dest, options, callback); +} + +/** + * Synchronously copies `src` to `dest`. `src` can be a file, directory, or + * symlink. The contents of directories will be copied recursively. + * @param {string | URL} src + * @param {string | URL} dest + * @param {object} [options] + * @returns {void} + */ +function cpSync(src, dest, options) { + options = validateCpOptions(options); + src = getValidatedPath(src, "src"); + dest = getValidatedPath(dest, "dest"); + lazyLoadCp(); + cpSyncFn(src, dest, options); +} + +function lazyLoadStreams() { + if (!ReadStream) { + ({ ReadStream, WriteStream } = require("internal/fs/streams")); + FileReadStream = ReadStream; + FileWriteStream = WriteStream; + } +} + +/** + * Creates a readable stream with a default `highWaterMark` + * of 64 KiB. + * @param {string | Buffer | URL} path + * @param {string | { + * flags?: string; + * encoding?: string; + * fd?: number | FileHandle; + * mode?: number; + * autoClose?: boolean; + * emitClose?: boolean; + * start: number; + * end?: number; + * highWaterMark?: number; + * fs?: object | null; + * signal?: AbortSignal | null; + * }} [options] + * @returns {ReadStream} + */ +function createReadStream(path, options) { + lazyLoadStreams(); + return new ReadStream(path, options); +} + +/** + * Creates a write stream. + * @param {string | Buffer | URL} path + * @param {string | { + * flags?: string; + * encoding?: string; + * fd?: number | FileHandle; + * mode?: number; + * autoClose?: boolean; + * emitClose?: boolean; + * start: number; + * fs?: object | null; + * signal?: AbortSignal | null; + * highWaterMark?: number; + * flush?: boolean; + * }} [options] + * @returns {WriteStream} + */ +function createWriteStream(path, options) { + lazyLoadStreams(); + return new WriteStream(path, options); +} + +const lazyGlob = getLazy(() => require("internal/fs/glob").Glob); + +function glob(pattern, options, callback) { + if (typeof options === "function") { + callback = options; + options = undefined; + } + callback = makeCallback(callback); + + const Glob = lazyGlob(); + PromisePrototypeThen( + ArrayFromAsync(new Glob(pattern, options).glob()), + (res) => callback(null, res), + callback + ); +} + +function globSync(pattern, options) { + const Glob = lazyGlob(); + return new Glob(pattern, options).globSync(); +} + +module.exports = fs = { + appendFile, + appendFileSync, + access, + accessSync, + chown, + chownSync, + chmod, + chmodSync, + close, + closeSync, + copyFile, + copyFileSync, + cp, + cpSync, + createReadStream, + createWriteStream, + exists, + existsSync, + fchown, + fchownSync, + fchmod, + fchmodSync, + fdatasync, + fdatasyncSync, + fstat, + fstatSync, + fsync, + fsyncSync, + ftruncate, + ftruncateSync, + futimes, + futimesSync, + glob, + globSync, + lchown, + lchownSync, + lchmod: constants.O_SYMLINK !== undefined ? lchmod : undefined, + lchmodSync: constants.O_SYMLINK !== undefined ? lchmodSync : undefined, + link, + linkSync, + lstat, + lstatSync, + lutimes, + lutimesSync, + mkdir, + mkdirSync, + mkdtemp, + mkdtempSync, + mkdtempDisposableSync, + open, + openSync, + openAsBlob, + readdir, + readdirSync, + read, + readSync, + readv, + readvSync, + readFile, + readFileSync, + readlink, + readlinkSync, + realpath, + realpathSync, + rename, + renameSync, + rm, + rmSync, + rmdir, + rmdirSync, + stat, + statfs, + statSync, + statfsSync, + symlink, + symlinkSync, + truncate, + truncateSync, + unwatchFile, + unlink, + unlinkSync, + utimes, + utimesSync, + watch, + watchFile, + writeFile, + writeFileSync, + write, + writeSync, + writev, + writevSync, + Dirent, + Stats, + + get ReadStream() { + lazyLoadStreams(); + return ReadStream; + }, + + set ReadStream(val) { + ReadStream = val; + }, + + get WriteStream() { + lazyLoadStreams(); + return WriteStream; + }, + + set WriteStream(val) { + WriteStream = val; + }, + + // Legacy names... these have to be separate because of how graceful-fs + // (and possibly other) modules monkey patch the values. + get FileReadStream() { + lazyLoadStreams(); + return FileReadStream; + }, + + set FileReadStream(val) { + FileReadStream = val; + }, + + get FileWriteStream() { + lazyLoadStreams(); + return FileWriteStream; + }, + + set FileWriteStream(val) { + FileWriteStream = val; + }, + + get Utf8Stream() { + lazyLoadUtf8Stream(); + return Utf8Stream; + }, + + // For tests + _toUnixTimestamp: toUnixTimestamp, +}; + +defineLazyProperties(fs, "internal/fs/dir", ["Dir", "opendir", "opendirSync"]); + +ObjectDefineProperties(fs, { + constants: { + __proto__: null, + configurable: false, + enumerable: true, + value: constants, + }, + promises: { + __proto__: null, + configurable: true, + enumerable: true, + get() { + promises ??= require("internal/fs/promises").exports; + return promises; + }, + }, +}); diff --git a/node/hello-world.js b/node/hello-world.js new file mode 100644 index 00000000..85722910 --- /dev/null +++ b/node/hello-world.js @@ -0,0 +1,14 @@ +const http = require('node:http'); + +const hostname = '127.0.0.1'; +const port = 3000; + +const server = http.createServer((req, res) => { + res.statusCode = 200; + res.setHeader('Content-Type', 'text/plain'); + res.end('Hello, World!\n'); +}); + +server.listen(port, hostname, () => { + console.log(`Server running at http://${hostname}:${port}/`); +}); \ No newline at end of file diff --git a/node/http.js b/node/http.js new file mode 100644 index 00000000..f068a887 --- /dev/null +++ b/node/http.js @@ -0,0 +1,235 @@ +"use strict"; + +const { ObjectDefineProperty } = primordials; + +const { validateInteger, validateObject } = require("internal/validators"); +const httpAgent = require("_http_agent"); +const { ClientRequest } = require("_http_client"); +const { methods, parsers } = require("_http_common"); +const { IncomingMessage } = require("_http_incoming"); +const { ERR_PROXY_INVALID_CONFIG } = require("internal/errors").codes; +const { + validateHeaderName, + validateHeaderValue, + OutgoingMessage, +} = require("_http_outgoing"); +const { + _connectionListener, + STATUS_CODES, + Server, + ServerResponse, +} = require("_http_server"); +const { parseProxyUrl, getGlobalAgent } = require("internal/http"); +const { URL } = require("internal/url"); +let maxHeaderSize; +let undici; + +/** + * Returns a new instance of `http.Server`. + * @param {{ + * IncomingMessage?: IncomingMessage; + * ServerResponse?: ServerResponse; + * insecureHTTPParser?: boolean; + * maxHeaderSize?: number; + * requireHostHeader?: boolean; + * joinDuplicateHeaders?: boolean; + * highWaterMark?: number; + * rejectNonStandardBodyWrites?: boolean; + * }} [opts] + * @param {Function} [requestListener] + * @returns {Server} + */ +function createServer(opts, requestListener) { + return new Server(opts, requestListener); +} + +/** + * @typedef {object} HTTPRequestOptions + * @property {httpAgent.Agent | boolean} [agent] Controls Agent behavior. + * @property {string} [auth] Basic authentication ('user:password') to compute an Authorization header. + * @property {Function} [createConnection] Produces a socket/stream to use when the agent option is not used. + * @property {number} [defaultPort] Default port for the protocol. + * @property {number} [family] IP address family to use when resolving host or hostname. + * @property {object} [headers] An object containing request headers. + * @property {number} [hints] Optional dns.lookup() hints. + * @property {string} [host] A domain name or IP address of the server to issue the request to. + * @property {string} [hostname] Alias for host. + * @property {boolean} [insecureHTTPParser] Use an insecure HTTP parser that accepts invalid HTTP headers when true. + * @property {boolean} [joinDuplicateHeaders] Multiple header that joined with `,` field line values. + * @property {string} [localAddress] Local interface to bind for network connections. + * @property {number} [localPort] Local port to connect from. + * @property {Function} [lookup] Custom lookup function. Default: dns.lookup(). + * @property {number} [maxHeaderSize] Overrides the --max-http-header-size value for responses received from the server. + * @property {string} [method] A string specifying the HTTP request method. + * @property {string} [path] Request path. + * @property {number} [port] Port of remote server. + * @property {string} [protocol] Protocol to use. + * @property {boolean} [setHost] Specifies whether or not to automatically add the Host header. + * @property {AbortSignal} [signal] An AbortSignal that may be used to abort an ongoing request. + * @property {string} [socketPath] Unix domain socket. + * @property {number} [timeout] A number specifying the socket timeout in milliseconds. + * @property {Array} [uniqueHeaders] A list of request headers that should be sent only once. + */ + +/** + * Makes an HTTP request. + * @param {string | URL} url + * @param {HTTPRequestOptions} [options] + * @param {Function} [cb] + * @returns {ClientRequest} + */ +function request(url, options, cb) { + return new ClientRequest(url, options, cb); +} + +/** + * Makes a `GET` HTTP request. + * @param {string | URL} url + * @param {HTTPRequestOptions} [options] + * @param {Function} [cb] + * @returns {ClientRequest} + */ +function get(url, options, cb) { + const req = request(url, options, cb); + req.end(); + return req; +} + +/** + * Lazy loads WebSocket, CloseEvent and MessageEvent classes from undici + * @returns {object} An object containing WebSocket, CloseEvent, and MessageEvent classes. + */ +function lazyUndici() { + return (undici ??= require("internal/deps/undici/undici")); +} + +function setGlobalProxyFromEnv(env = process.env) { + validateObject(env, "proxyEnv"); + const httpProxy = parseProxyUrl(env, "http:"); + const httpsProxy = parseProxyUrl(env, "https:"); + const noProxy = env.no_proxy || env.NO_PROXY; + + if (!httpProxy && !httpsProxy) { + return () => {}; + } + + if (httpProxy && !URL.canParse(httpProxy)) { + throw new ERR_PROXY_INVALID_CONFIG(httpProxy); + } + if (httpsProxy && !URL.canParse(httpsProxy)) { + throw new ERR_PROXY_INVALID_CONFIG(httpsProxy); + } + + let originalDispatcher, originalHttpsAgent, originalHttpAgent; + if (httpProxy || httpsProxy) { + // Set it for fetch. + const { setGlobalDispatcher, getGlobalDispatcher, EnvHttpProxyAgent } = + lazyUndici(); + const envHttpProxyAgent = new EnvHttpProxyAgent({ + __proto__: null, + httpProxy, + httpsProxy, + noProxy, + }); + originalDispatcher = getGlobalDispatcher(); + setGlobalDispatcher(envHttpProxyAgent); + } + + if (httpProxy) { + originalHttpAgent = module.exports.globalAgent; + module.exports.globalAgent = getGlobalAgent(env, httpAgent.Agent); + } + if (httpsProxy && !!process.versions.openssl) { + const https = require("https"); + originalHttpsAgent = https.globalAgent; + https.globalAgent = getGlobalAgent(env, https.Agent); + } + + return function restore() { + if (originalDispatcher) { + const { setGlobalDispatcher } = lazyUndici(); + setGlobalDispatcher(originalDispatcher); + } + if (originalHttpAgent) { + module.exports.globalAgent = originalHttpAgent; + } + if (originalHttpsAgent) { + require("https").globalAgent = originalHttpsAgent; + } + }; +} + +module.exports = { + _connectionListener, + METHODS: methods.toSorted(), + STATUS_CODES, + Agent: httpAgent.Agent, + ClientRequest, + IncomingMessage, + OutgoingMessage, + Server, + ServerResponse, + createServer, + validateHeaderName, + validateHeaderValue, + get, + request, + setMaxIdleHTTPParsers(max) { + validateInteger(max, "max", 1); + parsers.max = max; + }, + setGlobalProxyFromEnv, +}; + +ObjectDefineProperty(module.exports, "maxHeaderSize", { + __proto__: null, + configurable: true, + enumerable: true, + get() { + if (maxHeaderSize === undefined) { + const { getOptionValue } = require("internal/options"); + maxHeaderSize = getOptionValue("--max-http-header-size"); + } + + return maxHeaderSize; + }, +}); + +ObjectDefineProperty(module.exports, "globalAgent", { + __proto__: null, + configurable: true, + enumerable: true, + get() { + return httpAgent.globalAgent; + }, + set(value) { + httpAgent.globalAgent = value; + }, +}); + +ObjectDefineProperty(module.exports, "WebSocket", { + __proto__: null, + configurable: true, + enumerable: true, + get() { + return lazyUndici().WebSocket; + }, +}); + +ObjectDefineProperty(module.exports, "CloseEvent", { + __proto__: null, + configurable: true, + enumerable: true, + get() { + return lazyUndici().CloseEvent; + }, +}); + +ObjectDefineProperty(module.exports, "MessageEvent", { + __proto__: null, + configurable: true, + enumerable: true, + get() { + return lazyUndici().MessageEvent; + }, +}); diff --git a/node/http2.js b/node/http2.js new file mode 100644 index 00000000..1dabb8be --- /dev/null +++ b/node/http2.js @@ -0,0 +1,29 @@ +"use strict"; + +const { + connect, + constants, + createServer, + createSecureServer, + getDefaultSettings, + getPackedSettings, + getUnpackedSettings, + performServerHandshake, + sensitiveHeaders, + Http2ServerRequest, + Http2ServerResponse, +} = require("internal/http2/core"); + +module.exports = { + connect, + constants, + createServer, + createSecureServer, + getDefaultSettings, + getPackedSettings, + getUnpackedSettings, + performServerHandshake, + sensitiveHeaders, + Http2ServerRequest, + Http2ServerResponse, +}; diff --git a/node/https.js b/node/https.js new file mode 100644 index 00000000..05fb02fe --- /dev/null +++ b/node/https.js @@ -0,0 +1,660 @@ +'use strict'; + +const { + ArrayPrototypeIndexOf, + ArrayPrototypePush, + ArrayPrototypeShift, + ArrayPrototypeSplice, + ArrayPrototypeUnshift, + FunctionPrototypeCall, + JSONStringify, + NumberParseInt, + ObjectAssign, + ObjectSetPrototypeOf, + ReflectApply, + ReflectConstruct, + SymbolAsyncDispose, +} = primordials; + +const { + assertCrypto, + kEmptyObject, + promisify, + once, +} = require('internal/util'); +const { ERR_PROXY_TUNNEL } = require('internal/errors').codes; +assertCrypto(); + +const tls = require('tls'); +const { + kProxyConfig, + checkShouldUseProxy, + filterEnvForProxies, + kWaitForProxyTunnel, +} = require('internal/http'); +const { Agent: HttpAgent } = require('_http_agent'); +const { + httpServerPreClose, + Server: HttpServer, + setupConnectionsTracking, + storeHTTPOptions, + _connectionListener, +} = require('_http_server'); +const { ClientRequest } = require('_http_client'); +let debug = require('internal/util/debuglog').debuglog('https', (fn) => { + debug = fn; +}); +const net = require('net'); +const { URL, urlToHttpOptions, isURL } = require('internal/url'); +const { validateObject } = require('internal/validators'); +const { isIP } = require('internal/net'); +const assert = require('internal/assert'); +const { getOptionValue } = require('internal/options'); + +function Server(opts, requestListener) { + if (!(this instanceof Server)) return new Server(opts, requestListener); + + let ALPNProtocols = ['http/1.1']; + if (typeof opts === 'function') { + requestListener = opts; + opts = kEmptyObject; + } else if (opts == null) { + opts = kEmptyObject; + } else { + validateObject(opts, 'options'); + // Only one of ALPNProtocols and ALPNCallback can be set, so make sure we + // only set a default ALPNProtocols if the caller has not set either of them + if (opts.ALPNProtocols || opts.ALPNCallback) + ALPNProtocols = undefined; + } + + FunctionPrototypeCall(storeHTTPOptions, this, opts); + FunctionPrototypeCall(tls.Server, this, + { + noDelay: true, + ALPNProtocols, + ...opts, + }, + _connectionListener); + + this.httpAllowHalfOpen = false; + + if (requestListener) { + this.addListener('request', requestListener); + } + + this.addListener('tlsClientError', function addListener(err, conn) { + if (!this.emit('clientError', err, conn)) + conn.destroy(err); + }); + + this.timeout = 0; + this.maxHeadersCount = null; + this.on('listening', setupConnectionsTracking); +} + +ObjectSetPrototypeOf(Server.prototype, tls.Server.prototype); +ObjectSetPrototypeOf(Server, tls.Server); + +Server.prototype.closeAllConnections = HttpServer.prototype.closeAllConnections; + +Server.prototype.closeIdleConnections = HttpServer.prototype.closeIdleConnections; + +Server.prototype.setTimeout = HttpServer.prototype.setTimeout; + +Server.prototype.close = function close() { + httpServerPreClose(this); + ReflectApply(tls.Server.prototype.close, this, arguments); + return this; +}; + +Server.prototype[SymbolAsyncDispose] = async function() { + await FunctionPrototypeCall(promisify(this.close), this); +}; + +/** + * Creates a new `https.Server` instance. + * @param {{ + * IncomingMessage?: IncomingMessage; + * ServerResponse?: ServerResponse; + * insecureHTTPParser?: boolean; + * maxHeaderSize?: number; + * }} [opts] + * @param {Function} [requestListener] + * @returns {Server} + */ +function createServer(opts, requestListener) { + return new Server(opts, requestListener); +} + +// When proxying a HTTPS request, the following needs to be done: +// https://datatracker.ietf.org/doc/html/rfc9110#CONNECT +// 1. Send a CONNECT request to the proxy server. +// 2. Wait for 200 connection established response to establish the tunnel. +// 3. Perform TLS handshake with the endpoint over the socket. +// 4. Tunnel the request using the established connection. +// +// This function computes the tunnel configuration for HTTPS requests. +// The handling of the tunnel connection is done in createConnection. +function getTunnelConfigForProxiedHttps(agent, reqOptions) { + if (!agent[kProxyConfig]) { + return null; + } + if ((reqOptions.protocol || agent.protocol) !== 'https:') { + return null; + } + const shouldUseProxy = checkShouldUseProxy(agent[kProxyConfig], reqOptions); + debug(`getTunnelConfigForProxiedHttps should use proxy for ${reqOptions.host}:${reqOptions.port}:`, shouldUseProxy); + if (!shouldUseProxy) { + return null; + } + const { auth, href } = agent[kProxyConfig]; + // The request is a HTTPS request, assemble the payload for establishing the tunnel. + const ipType = isIP(reqOptions.host); + // The request target must put IPv6 address in square brackets. + // Here reqOptions is already processed by urlToHttpOptions so we'll add them back if necessary. + // See https://www.rfc-editor.org/rfc/rfc3986#section-3.2.2 + const requestHost = ipType === 6 ? `[${reqOptions.host}]` : reqOptions.host; + const requestPort = reqOptions.port || agent.defaultPort; + const endpoint = `${requestHost}:${requestPort}`; + // The ClientRequest constructor should already have validated the host and the port. + // When the request options come from a string invalid characters would be stripped away, + // when it's an object ERR_INVALID_CHAR would be thrown. Here we just assert in case + // agent.createConnection() is called with invalid options. + assert(!endpoint.includes('\r')); + assert(!endpoint.includes('\n')); + + let payload = `CONNECT ${endpoint} HTTP/1.1\r\n`; + // The parseProxyConfigFromEnv() method should have already validated the authorization header + // value. + if (auth) { + payload += `proxy-authorization: ${auth}\r\n`; + } + if (agent.keepAlive || agent.maxSockets !== Infinity) { + payload += 'proxy-connection: keep-alive\r\n'; + } + payload += `host: ${endpoint}`; + payload += '\r\n\r\n'; + + const result = { + __proto__: null, + proxyTunnelPayload: payload, + requestOptions: { // Options used for the request sent after the tunnel is established. + __proto__: null, + servername: reqOptions.servername || ipType ? undefined : reqOptions.host, + ...reqOptions, + }, + }; + debug(`updated request for HTTPS proxy ${href} with`, result); + return result; +}; + +function establishTunnel(agent, socket, options, tunnelConfig, afterSocket) { + const { proxyTunnelPayload } = tunnelConfig; + // By default, the socket is in paused mode. Read to look for the 200 + // connection established response. + function read() { + let chunk; + while ((chunk = socket.read()) !== null) { + if (onProxyData(chunk) !== -1) { + break; + } + } + socket.on('readable', read); + } + + function cleanup() { + socket.removeListener('end', onProxyEnd); + socket.removeListener('error', onProxyError); + socket.removeListener('readable', read); + socket.setTimeout(0); // Clear the timeout for the tunnel establishment. + } + + function onProxyError(err) { + debug('onProxyError', err); + cleanup(); + afterSocket(err, socket); + } + + // Read the headers from the chunks and check for the status code. If it fails we + // clean up the socket and return an error. Otherwise we establish the tunnel. + let buffer = ''; + function onProxyData(chunk) { + const str = chunk.toString(); + debug('onProxyData', str); + buffer += str; + const headerEndIndex = buffer.indexOf('\r\n\r\n'); + if (headerEndIndex === -1) return headerEndIndex; + const statusLine = buffer.substring(0, buffer.indexOf('\r\n')); + const statusCode = statusLine.split(' ')[1]; + if (statusCode !== '200') { + debug(`onProxyData receives ${statusCode}, cleaning up`); + cleanup(); + const targetHost = proxyTunnelPayload.split('\r')[0].split(' ')[1]; + const message = `Failed to establish tunnel to ${targetHost} via ${agent[kProxyConfig].href}: ${statusLine}`; + const err = new ERR_PROXY_TUNNEL(message); + err.statusCode = NumberParseInt(statusCode); + afterSocket(err, socket); + } else { + // https://datatracker.ietf.org/doc/html/rfc9110#CONNECT + // RFC 9110 says that it can be 2xx but in the real world, proxy clients generally only + // accepts 200. + // Proxy servers are not supposed to send anything after the headers - the payload must be + // be empty. So after this point we will proceed with the tunnel e.g. starting TLS handshake. + debug('onProxyData receives 200, establishing tunnel'); + cleanup(); + + // Reuse the tunneled socket to perform the TLS handshake with the endpoint, + // then send the request. + const { requestOptions } = tunnelConfig; + tunnelConfig.requestOptions = null; + requestOptions.socket = socket; + let tunneldSocket; + const onTLSHandshakeError = (err) => { + debug('Propagate error event from tunneled socket to tunnel socket'); + afterSocket(err, tunneldSocket); + }; + tunneldSocket = tls.connect(requestOptions, () => { + debug('TLS handshake over tunnel succeeded'); + tunneldSocket.removeListener('error', onTLSHandshakeError); + afterSocket(null, tunneldSocket); + }); + tunneldSocket.on('free', () => { + debug('Propagate free event from tunneled socket to tunnel socket'); + socket.emit('free'); + }); + tunneldSocket.on('error', onTLSHandshakeError); + } + return headerEndIndex; + } + + function onProxyEnd() { + cleanup(); + const err = new ERR_PROXY_TUNNEL('Connection to establish proxy tunnel ended unexpectedly'); + afterSocket(err, socket); + } + + const proxyTunnelTimeout = tunnelConfig.requestOptions.timeout; + debug('proxyTunnelTimeout', proxyTunnelTimeout, options.timeout); + // It may be worth a separate timeout error/event. + // But it also makes sense to treat the tunnel establishment timeout as + // a normal timeout for the request. + function onProxyTimeout() { + debug('onProxyTimeout', proxyTunnelTimeout); + cleanup(); + const err = new ERR_PROXY_TUNNEL(`Connection to establish proxy tunnel timed out after ${proxyTunnelTimeout}ms`); + err.proxyTunnelTimeout = proxyTunnelTimeout; + afterSocket(err, socket); + } + + if (proxyTunnelTimeout && proxyTunnelTimeout > 0) { + debug('proxy tunnel setTimeout', proxyTunnelTimeout); + socket.setTimeout(proxyTunnelTimeout, onProxyTimeout); + } + + socket.on('error', onProxyError); + socket.on('end', onProxyEnd); + socket.write(proxyTunnelPayload); + + read(); +} + +// HTTPS agents. +// See ProxyConfig in internal/http.js for how the connection should be handled +// when the agent is configured to use a proxy server. +function createConnection(...args) { + // XXX: This signature (port, host, options) is different from all the other + // createConnection() methods. + let options, cb; + if (args[0] !== null && typeof args[0] === 'object') { + options = args[0]; + } else if (args[1] !== null && typeof args[1] === 'object') { + options = { ...args[1] }; + } else if (args[2] === null || typeof args[2] !== 'object') { + options = {}; + } else { + options = { ...args[2] }; + } + if (typeof args[0] === 'number') { + options.port = args[0]; + } + if (typeof args[1] === 'string') { + options.host = args[1]; + } + if (typeof args[args.length - 1] === 'function') { + cb = args[args.length - 1]; + } + + debug('createConnection', options); + + if (options._agentKey) { + const session = this._getSession(options._agentKey); + if (session) { + debug('reuse session for %j', options._agentKey); + options = { + session, + ...options, + }; + } + } + + let socket; + const tunnelConfig = getTunnelConfigForProxiedHttps(this, options); + debug(`https createConnection should use proxy for ${options.host}:${options.port}:`, tunnelConfig); + + if (!tunnelConfig) { + socket = tls.connect(options); + } else { + const connectOptions = { + ...this[kProxyConfig].proxyConnectionOptions, + }; + debug('Create proxy socket', connectOptions); + const onError = (err) => { + cleanupAndPropagate(err, socket); + }; + const proxyTunnelTimeout = tunnelConfig.requestOptions.timeout; + const onTimeout = () => { + const err = new ERR_PROXY_TUNNEL(`Connection to establish proxy tunnel timed out after ${proxyTunnelTimeout}ms`); + err.proxyTunnelTimeout = proxyTunnelTimeout; + cleanupAndPropagate(err, socket); + }; + const cleanupAndPropagate = once((err, currentSocket) => { + debug('cleanupAndPropagate', err); + socket.removeListener('error', onError); + socket.removeListener('timeout', onTimeout); + // An error occurred during tunnel establishment, in that case just destroy the socket. + // and propagate the error to the callback. + + // When the error comes from unexpected status code, the stream is still in good shape, + // in that case let req.onSocket handle the destruction instead. + if (err && err.code === 'ERR_PROXY_TUNNEL' && !err.statusCode) { + socket.destroy(); + } + // This error should go to: + // -> oncreate in Agent.prototype.createSocket + // -> closure in Agent.prototype.addRequest or Agent.prototype.removeSocket + if (cb) { + cb(err, currentSocket); + } + }); + const onProxyConnection = () => { + socket.removeListener('error', onError); + establishTunnel(this, socket, options, tunnelConfig, cleanupAndPropagate); + }; + if (this[kProxyConfig].protocol === 'http:') { + socket = net.connect(connectOptions, onProxyConnection); + } else { + socket = tls.connect(connectOptions, onProxyConnection); + } + + socket.on('error', onError); + if (proxyTunnelTimeout) { + socket.setTimeout(proxyTunnelTimeout, onTimeout); + } + socket[kWaitForProxyTunnel] = true; + } + + if (options._agentKey) { + // Cache new session for reuse + socket.on('session', (session) => { + this._cacheSession(options._agentKey, session); + }); + + // Evict session on error + socket.once('close', (err) => { + if (err) + this._evictSession(options._agentKey); + }); + } + + return socket; +} + +/** + * Creates a new `HttpAgent` instance. + * @param {{ + * keepAlive?: boolean; + * keepAliveMsecs?: number; + * maxSockets?: number; + * maxTotalSockets?: number; + * maxFreeSockets?: number; + * scheduling?: string; + * timeout?: number; + * maxCachedSessions?: number; + * servername?: string; + * defaultPort?: number; + * protocol?: string; + * proxyEnv?: object; + * }} [options] + * @class + */ +function Agent(options) { + if (!(this instanceof Agent)) + return new Agent(options); + + options = { __proto__: null, ...options }; + options.defaultPort ??= 443; + options.protocol ??= 'https:'; + FunctionPrototypeCall(HttpAgent, this, options); + + this.maxCachedSessions = this.options.maxCachedSessions; + if (this.maxCachedSessions === undefined) + this.maxCachedSessions = 100; + + this._sessionCache = { + map: {}, + list: [], + }; +} +ObjectSetPrototypeOf(Agent.prototype, HttpAgent.prototype); +ObjectSetPrototypeOf(Agent, HttpAgent); +Agent.prototype.createConnection = createConnection; + +/** + * Gets a unique name for a set of options. + * @param {{ + * host: string; + * port: number; + * localAddress: string; + * family: number; + * }} [options] + * @returns {string} + */ +Agent.prototype.getName = function getName(options = kEmptyObject) { + let name = FunctionPrototypeCall(HttpAgent.prototype.getName, this, options); + + name += ':'; + if (options.ca) + name += options.ca; + + name += ':'; + if (options.cert) + name += options.cert; + + name += ':'; + if (options.clientCertEngine) + name += options.clientCertEngine; + + name += ':'; + if (options.ciphers) + name += options.ciphers; + + name += ':'; + if (options.key) + name += options.key; + + name += ':'; + if (options.pfx) + name += options.pfx; + + name += ':'; + if (options.rejectUnauthorized !== undefined) + name += options.rejectUnauthorized; + + name += ':'; + if (options.servername && options.servername !== options.host) + name += options.servername; + + name += ':'; + if (options.minVersion) + name += options.minVersion; + + name += ':'; + if (options.maxVersion) + name += options.maxVersion; + + name += ':'; + if (options.secureProtocol) + name += options.secureProtocol; + + name += ':'; + if (options.crl) + name += options.crl; + + name += ':'; + if (options.honorCipherOrder !== undefined) + name += options.honorCipherOrder; + + name += ':'; + if (options.ecdhCurve) + name += options.ecdhCurve; + + name += ':'; + if (options.dhparam) + name += options.dhparam; + + name += ':'; + if (options.secureOptions !== undefined) + name += options.secureOptions; + + name += ':'; + if (options.sessionIdContext) + name += options.sessionIdContext; + + name += ':'; + if (options.sigalgs) + name += JSONStringify(options.sigalgs); + + name += ':'; + if (options.privateKeyIdentifier) + name += options.privateKeyIdentifier; + + name += ':'; + if (options.privateKeyEngine) + name += options.privateKeyEngine; + + return name; +}; + +Agent.prototype._getSession = function _getSession(key) { + return this._sessionCache.map[key]; +}; + +Agent.prototype._cacheSession = function _cacheSession(key, session) { + // Cache is disabled + if (this.maxCachedSessions === 0) + return; + + // Fast case - update existing entry + if (this._sessionCache.map[key]) { + this._sessionCache.map[key] = session; + return; + } + + // Put new entry + if (this._sessionCache.list.length >= this.maxCachedSessions) { + const oldKey = ArrayPrototypeShift(this._sessionCache.list); + debug('evicting %j', oldKey); + delete this._sessionCache.map[oldKey]; + } + + ArrayPrototypePush(this._sessionCache.list, key); + this._sessionCache.map[key] = session; +}; + +Agent.prototype._evictSession = function _evictSession(key) { + const index = ArrayPrototypeIndexOf(this._sessionCache.list, key); + if (index === -1) + return; + + ArrayPrototypeSplice(this._sessionCache.list, index, 1); + delete this._sessionCache.map[key]; +}; + +const globalAgent = new Agent({ + keepAlive: true, scheduling: 'lifo', timeout: 5000, + // This normalized from both --use-env-proxy and NODE_USE_ENV_PROXY settings. + proxyEnv: getOptionValue('--use-env-proxy') ? filterEnvForProxies(process.env) : undefined, +}); + +/** + * Makes a request to a secure web server. + * @param {...any} args + * @returns {ClientRequest} + */ +function request(...args) { + let options = {}; + + if (typeof args[0] === 'string') { + const urlStr = ArrayPrototypeShift(args); + options = urlToHttpOptions(new URL(urlStr)); + } else if (isURL(args[0])) { + options = urlToHttpOptions(ArrayPrototypeShift(args)); + } + + if (args[0] && typeof args[0] !== 'function') { + ObjectAssign(options, ArrayPrototypeShift(args)); + } + + options._defaultAgent = module.exports.globalAgent; + ArrayPrototypeUnshift(args, options); + + return ReflectConstruct(ClientRequest, args); +} + +/** + * Makes a GET request to a secure web server. + * @param {string | URL} input + * @param {{ + * agent?: Agent | boolean; + * auth?: string; + * createConnection?: Function; + * defaultPort?: number; + * family?: number; + * headers?: object; + * hints?: number; + * host?: string; + * hostname?: string; + * insecureHTTPParser?: boolean; + * joinDuplicateHeaders?: boolean; + * localAddress?: string; + * localPort?: number; + * lookup?: Function; + * maxHeaderSize?: number; + * method?: string; + * path?: string; + * port?: number; + * protocol?: string; + * setHost?: boolean; + * socketPath?: string; + * timeout?: number; + * signal?: AbortSignal; + * uniqueHeaders?: Array; + * } | string | URL} [options] + * @param {Function} [cb] + * @returns {ClientRequest} + */ +function get(input, options, cb) { + const req = request(input, options, cb); + req.end(); + return req; +} + +module.exports = { + Agent, + globalAgent, + Server, + createServer, + get, + request, +}; \ No newline at end of file diff --git a/node/inspector.js b/node/inspector.js new file mode 100644 index 00000000..0cc41235 --- /dev/null +++ b/node/inspector.js @@ -0,0 +1,240 @@ +"use strict"; + +const { JSONParse, JSONStringify, SafeMap, SymbolDispose } = primordials; + +const { + ERR_INSPECTOR_ALREADY_ACTIVATED, + ERR_INSPECTOR_ALREADY_CONNECTED, + ERR_INSPECTOR_CLOSED, + ERR_INSPECTOR_COMMAND, + ERR_INSPECTOR_NOT_AVAILABLE, + ERR_INSPECTOR_NOT_CONNECTED, + ERR_INSPECTOR_NOT_ACTIVE, + ERR_INSPECTOR_NOT_WORKER, +} = require("internal/errors").codes; + +const { isLoopback } = require("internal/net"); + +const { hasInspector } = internalBinding("config"); +if (!hasInspector) throw new ERR_INSPECTOR_NOT_AVAILABLE(); + +const EventEmitter = require("events"); +const { queueMicrotask } = require("internal/process/task_queues"); +const { kEmptyObject } = require("internal/util"); +const { + isUint32, + validateFunction, + validateInt32, + validateObject, + validateString, +} = require("internal/validators"); +const { isMainThread } = require("worker_threads"); +const { _debugEnd } = internalBinding("process_methods"); +const { put } = require("internal/inspector/network_resources"); + +const { + Connection, + MainThreadConnection, + open, + url, + isEnabled, + waitForDebugger, + console, + emitProtocolEvent, +} = internalBinding("inspector"); + +class Session extends EventEmitter { + #connection = null; + #nextId = 1; + #messageCallbacks = new SafeMap(); + + /** + * Connects the session to the inspector back-end. + * @returns {void} + */ + connect() { + if (this.#connection) + throw new ERR_INSPECTOR_ALREADY_CONNECTED("The inspector session"); + this.#connection = new Connection((message) => this.#onMessage(message)); + } + + /** + * Connects the session to the main thread + * inspector back-end. + * @returns {void} + */ + connectToMainThread() { + if (isMainThread) throw new ERR_INSPECTOR_NOT_WORKER(); + if (this.#connection) + throw new ERR_INSPECTOR_ALREADY_CONNECTED("The inspector session"); + this.#connection = new MainThreadConnection((message) => + queueMicrotask(() => this.#onMessage(message)) + ); + } + + #onMessage(message) { + const parsed = JSONParse(message); + try { + if (parsed.id) { + const callback = this.#messageCallbacks.get(parsed.id); + this.#messageCallbacks.delete(parsed.id); + if (callback) { + if (parsed.error) { + return callback( + new ERR_INSPECTOR_COMMAND(parsed.error.code, parsed.error.message) + ); + } + + callback(null, parsed.result); + } + } else { + this.emit(parsed.method, parsed); + this.emit("inspectorNotification", parsed); + } + } catch (error) { + process.emitWarning(error); + } + } + + /** + * Posts a message to the inspector back-end. + * @param {string} method + * @param {Record} [params] + * @param {Function} [callback] + * @returns {void} + */ + post(method, params, callback) { + validateString(method, "method"); + if (!callback && typeof params === "function") { + callback = params; + params = null; + } + if (params) { + validateObject(params, "params"); + } + if (callback) { + validateFunction(callback, "callback"); + } + + if (!this.#connection) { + throw new ERR_INSPECTOR_NOT_CONNECTED(); + } + const id = this.#nextId++; + const message = { id, method }; + if (params) { + message.params = params; + } + if (callback) { + this.#messageCallbacks.set(id, callback); + } + this.#connection.dispatch(JSONStringify(message)); + } + + /** + * Immediately closes the session, all pending + * message callbacks will be called with an + * error. + * @returns {void} + */ + disconnect() { + if (!this.#connection) return; + this.#connection.disconnect(); + this.#connection = null; + const remainingCallbacks = this.#messageCallbacks.values(); + for (const callback of remainingCallbacks) { + process.nextTick(callback, new ERR_INSPECTOR_CLOSED()); + } + this.#messageCallbacks.clear(); + this.#nextId = 1; + } +} + +/** + * Activates inspector on host and port. + * @param {number} [port] + * @param {string} [host] + * @param {boolean} [wait] + * @returns {void} + */ +function inspectorOpen(port, host, wait) { + if (isEnabled()) { + throw new ERR_INSPECTOR_ALREADY_ACTIVATED(); + } + // inspectorOpen() currently does not typecheck its arguments and adding + // such checks would be a potentially breaking change. However, the native + // open() function requires the port to fit into a 16-bit unsigned integer, + // causing an integer overflow otherwise, so we at least need to prevent that. + if (isUint32(port)) { + validateInt32(port, "port", 0, 65535); + } + if (host && !isLoopback(host)) { + process.emitWarning( + "Binding the inspector to a public IP with an open port is insecure, " + + "as it allows external hosts to connect to the inspector " + + "and perform a remote code execution attack. " + + "Documentation can be found at " + + "https://nodejs.org/api/cli.html#--inspecthostport", + "SecurityWarning" + ); + } + + open(port, host); + if (wait) waitForDebugger(); + + return { + __proto__: null, + [SymbolDispose]() { + _debugEnd(); + }, + }; +} + +/** + * Blocks until a client (existing or connected later) + * has sent the `Runtime.runIfWaitingForDebugger` + * command. + * @returns {void} + */ +function inspectorWaitForDebugger() { + if (!waitForDebugger()) throw new ERR_INSPECTOR_NOT_ACTIVE(); +} + +function broadcastToFrontend(eventName, params = kEmptyObject) { + validateString(eventName, "eventName"); + validateObject(params, "params"); + emitProtocolEvent(eventName, params); +} + +const Network = { + requestWillBeSent: (params) => + broadcastToFrontend("Network.requestWillBeSent", params), + responseReceived: (params) => + broadcastToFrontend("Network.responseReceived", params), + loadingFinished: (params) => + broadcastToFrontend("Network.loadingFinished", params), + loadingFailed: (params) => + broadcastToFrontend("Network.loadingFailed", params), + dataSent: (params) => broadcastToFrontend("Network.dataSent", params), + dataReceived: (params) => broadcastToFrontend("Network.dataReceived", params), + webSocketCreated: (params) => + broadcastToFrontend("Network.webSocketCreated", params), + webSocketClosed: (params) => + broadcastToFrontend("Network.webSocketClosed", params), + webSocketHandshakeResponseReceived: (params) => + broadcastToFrontend("Network.webSocketHandshakeResponseReceived", params), +}; + +const NetworkResources = { + put, +}; + +module.exports = { + open: inspectorOpen, + close: _debugEnd, + url, + waitForDebugger: inspectorWaitForDebugger, + console, + Session, + Network, + NetworkResources, +}; diff --git a/node/net.js b/node/net.js new file mode 100644 index 00000000..d43f7d8a --- /dev/null +++ b/node/net.js @@ -0,0 +1,2650 @@ +"use strict"; + +const { + ArrayIsArray, + ArrayPrototypeIncludes, + ArrayPrototypeIndexOf, + ArrayPrototypePush, + Boolean, + FunctionPrototypeBind, + FunctionPrototypeCall, + MathMax, + Number, + NumberIsNaN, + NumberParseInt, + ObjectDefineProperty, + ObjectSetPrototypeOf, + Symbol, + SymbolAsyncDispose, + SymbolDispose, +} = primordials; + +const EventEmitter = require("events"); +const { addAbortListener } = require("internal/events/abort_listener"); +const stream = require("stream"); +let debug = require("internal/util/debuglog").debuglog("net", (fn) => { + debug = fn; +}); +const { + kReinitializeHandle, + isIP, + isIPv4, + isIPv6, + normalizedArgsSymbol, + makeSyncWrite, +} = require("internal/net"); +const assert = require("internal/assert"); +const { UV_EADDRINUSE, UV_EINVAL, UV_ENOTCONN, UV_ECANCELED, UV_ETIMEDOUT } = + internalBinding("uv"); +const { convertIpv6StringToBuffer } = internalBinding("cares_wrap"); + +const { Buffer } = require("buffer"); +const { ShutdownWrap } = internalBinding("stream_wrap"); +const { + TCP, + TCPConnectWrap, + constants: TCPConstants, +} = internalBinding("tcp_wrap"); +const { + Pipe, + PipeConnectWrap, + constants: PipeConstants, +} = internalBinding("pipe_wrap"); +const { + newAsyncId, + defaultTriggerAsyncIdScope, + symbols: { async_id_symbol, owner_symbol }, +} = require("internal/async_hooks"); +const { + writevGeneric, + writeGeneric, + onStreamRead, + kAfterAsyncWrite, + kHandle, + kUpdateTimer, + setStreamTimeout, + kBuffer, + kBufferCb, + kBufferGen, +} = require("internal/stream_base_commons"); +const { + ErrnoException, + ExceptionWithHostPort, + NodeAggregateError, + UVExceptionWithHostPort, + codes: { + ERR_INVALID_ADDRESS_FAMILY, + ERR_INVALID_ARG_TYPE, + ERR_INVALID_ARG_VALUE, + ERR_INVALID_FD_TYPE, + ERR_INVALID_HANDLE_TYPE, + ERR_INVALID_IP_ADDRESS, + ERR_IP_BLOCKED, + ERR_MISSING_ARGS, + ERR_SERVER_ALREADY_LISTEN, + ERR_SERVER_NOT_RUNNING, + ERR_SOCKET_CLOSED, + ERR_SOCKET_CLOSED_BEFORE_CONNECTION, + ERR_SOCKET_CONNECTION_TIMEOUT, + }, + genericNodeError, +} = require("internal/errors"); +const { isUint8Array } = require("internal/util/types"); +const { queueMicrotask } = require("internal/process/task_queues"); +const { + guessHandleType, + isWindows, + kEmptyObject, + promisify, +} = require("internal/util"); +const { + validateAbortSignal, + validateBoolean, + validateFunction, + validateInt32, + validateNumber, + validatePort, + validateString, +} = require("internal/validators"); +const kLastWriteQueueSize = Symbol("lastWriteQueueSize"); +const { getOptionValue } = require("internal/options"); + +// Lazy loaded to improve startup performance. +let cluster; +let dns; +let BlockList; +let SocketAddress; +let autoSelectFamilyDefault = getOptionValue("--network-family-autoselection"); +let autoSelectFamilyAttemptTimeoutDefault = getOptionValue( + "--network-family-autoselection-attempt-timeout" +); + +const { clearTimeout, setTimeout } = require("timers"); +const { kTimeout } = require("internal/timers"); + +const DEFAULT_IPV4_ADDR = "0.0.0.0"; +const DEFAULT_IPV6_ADDR = "::"; + +const noop = () => {}; + +const kPerfHooksNetConnectContext = Symbol("kPerfHooksNetConnectContext"); + +const dc = require("diagnostics_channel"); +const netClientSocketChannel = dc.channel("net.client.socket"); +const netServerSocketChannel = dc.channel("net.server.socket"); +const netServerListen = dc.tracingChannel("net.server.listen"); + +const { hasObserver, startPerf, stopPerf } = require("internal/perf/observe"); +const { getDefaultHighWaterMark } = require("internal/streams/state"); + +function getFlags(options) { + let flags = 0; + if (options.ipv6Only === true) { + flags |= TCPConstants.UV_TCP_IPV6ONLY; + } + if (options.reusePort === true) { + flags |= TCPConstants.UV_TCP_REUSEPORT; + } + return flags; +} + +function createHandle(fd, is_server) { + validateInt32(fd, "fd", 0); + const type = guessHandleType(fd); + if (type === "PIPE") { + return new Pipe(is_server ? PipeConstants.SERVER : PipeConstants.SOCKET); + } + + if (type === "TCP") { + return new TCP(is_server ? TCPConstants.SERVER : TCPConstants.SOCKET); + } + + throw new ERR_INVALID_FD_TYPE(type); +} + +function getNewAsyncId(handle) { + return !handle || typeof handle.getAsyncId !== "function" + ? newAsyncId() + : handle.getAsyncId(); +} + +function isPipeName(s) { + return typeof s === "string" && toNumber(s) === false; +} + +/** + * Creates a new TCP or IPC server + * @param {{ + * allowHalfOpen?: boolean; + * pauseOnConnect?: boolean; + * }} [options] + * @param {Function} [connectionListener] + * @returns {Server} + */ + +function createServer(options, connectionListener) { + return new Server(options, connectionListener); +} + +// Target API: +// +// let s = net.connect({port: 80, host: 'google.com'}, function() { +// ... +// }); +// +// There are various forms: +// +// connect(options, [cb]) +// connect(port, [host], [cb]) +// connect(path, [cb]); +// +function connect(...args) { + const normalized = normalizeArgs(args); + const options = normalized[0]; + debug("createConnection", normalized); + const socket = new Socket(options); + + if (options.timeout) { + socket.setTimeout(options.timeout); + } + + return socket.connect(normalized); +} + +function getDefaultAutoSelectFamily() { + return autoSelectFamilyDefault; +} + +function setDefaultAutoSelectFamily(value) { + validateBoolean(value, "value"); + autoSelectFamilyDefault = value; +} + +function getDefaultAutoSelectFamilyAttemptTimeout() { + return autoSelectFamilyAttemptTimeoutDefault; +} + +function setDefaultAutoSelectFamilyAttemptTimeout(value) { + validateInt32(value, "value", 1); + + if (value < 10) { + value = 10; + } + + autoSelectFamilyAttemptTimeoutDefault = value; +} + +// Returns an array [options, cb], where options is an object, +// cb is either a function or null. +// Used to normalize arguments of Socket.prototype.connect() and +// Server.prototype.listen(). Possible combinations of parameters: +// (options[...][, cb]) +// (path[...][, cb]) +// ([port][, host][...][, cb]) +// For Socket.prototype.connect(), the [...] part is ignored +// For Server.prototype.listen(), the [...] part is [, backlog] +// but will not be handled here (handled in listen()) +function normalizeArgs(args) { + let arr; + + if (args.length === 0) { + arr = [{}, null]; + arr[normalizedArgsSymbol] = true; + return arr; + } + + const arg0 = args[0]; + let options = {}; + if (typeof arg0 === "object" && arg0 !== null) { + // (options[...][, cb]) + options = arg0; + } else if (isPipeName(arg0)) { + // (path[...][, cb]) + options.path = arg0; + } else { + // ([port][, host][...][, cb]) + options.port = arg0; + if (args.length > 1 && typeof args[1] === "string") { + options.host = args[1]; + } + } + + const cb = args[args.length - 1]; + if (typeof cb !== "function") arr = [options, null]; + else arr = [options, cb]; + + arr[normalizedArgsSymbol] = true; + return arr; +} + +// Called when creating new Socket, or when re-using a closed Socket +function initSocketHandle(self) { + self._undestroy(); + self._sockname = null; + + // Handle creation may be deferred to bind() or connect() time. + if (self._handle) { + self._handle[owner_symbol] = self; + self._handle.onread = onStreamRead; + self[async_id_symbol] = getNewAsyncId(self._handle); + + let userBuf = self[kBuffer]; + if (userBuf) { + const bufGen = self[kBufferGen]; + if (bufGen !== null) { + userBuf = bufGen(); + if (!isUint8Array(userBuf)) return; + self[kBuffer] = userBuf; + } + self._handle.useUserBuffer(userBuf); + } + } +} + +function closeSocketHandle(self, isException, isCleanupPending = false) { + if (self._handle) { + self._handle.close(() => { + debug("emit close"); + self.emit("close", isException); + if (isCleanupPending) { + self._handle.onread = noop; + self._handle = null; + self._sockname = null; + } + }); + } +} + +const kBytesRead = Symbol("kBytesRead"); +const kBytesWritten = Symbol("kBytesWritten"); +const kSetNoDelay = Symbol("kSetNoDelay"); +const kSetKeepAlive = Symbol("kSetKeepAlive"); +const kSetKeepAliveInitialDelay = Symbol("kSetKeepAliveInitialDelay"); + +function Socket(options) { + if (!(this instanceof Socket)) return new Socket(options); + if (options?.objectMode) { + throw new ERR_INVALID_ARG_VALUE( + "options.objectMode", + options.objectMode, + "is not supported" + ); + } else if (options?.readableObjectMode || options?.writableObjectMode) { + throw new ERR_INVALID_ARG_VALUE( + `options.${ + options.readableObjectMode ? "readableObjectMode" : "writableObjectMode" + }`, + options.readableObjectMode || options.writableObjectMode, + "is not supported" + ); + } + if (options?.keepAliveInitialDelay !== undefined) { + validateNumber( + options?.keepAliveInitialDelay, + "options.keepAliveInitialDelay" + ); + + if (options.keepAliveInitialDelay < 0) { + options.keepAliveInitialDelay = 0; + } + } + + this.connecting = false; + // Problem with this is that users can supply their own handle, that may not + // have _handle.getAsyncId(). In this case an[async_id_symbol] should + // probably be supplied by async_hooks. + this[async_id_symbol] = -1; + this._hadError = false; + this[kHandle] = null; + this._parent = null; + this._host = null; + this[kLastWriteQueueSize] = 0; + this[kTimeout] = null; + this[kBuffer] = null; + this[kBufferCb] = null; + this[kBufferGen] = null; + this._closeAfterHandlingError = false; + + if (typeof options === "number") + options = { fd: options }; // Legacy interface. + else options = { ...options }; + + // Default to *not* allowing half open sockets. + options.allowHalfOpen = Boolean(options.allowHalfOpen); + // For backwards compat do not emit close on destroy. + options.emitClose = false; + options.autoDestroy = true; + // Handle strings directly. + options.decodeStrings = false; + stream.Duplex.call(this, options); + + if (options.handle) { + this._handle = options.handle; // private + this[async_id_symbol] = getNewAsyncId(this._handle); + } else if (options.fd !== undefined) { + const { fd } = options; + let err; + + // createHandle will throw ERR_INVALID_FD_TYPE if `fd` is not + // a valid `PIPE` or `TCP` descriptor + this._handle = createHandle(fd, false); + + err = this._handle.open(fd); + + // While difficult to fabricate, in some architectures + // `open` may return an error code for valid file descriptors + // which cannot be opened. This is difficult to test as most + // un-openable fds will throw on `createHandle` + if (err) throw new ErrnoException(err, "open"); + + this[async_id_symbol] = this._handle.getAsyncId(); + + if ((fd === 1 || fd === 2) && this._handle instanceof Pipe && isWindows) { + // Make stdout and stderr blocking on Windows + err = this._handle.setBlocking(true); + if (err) throw new ErrnoException(err, "setBlocking"); + + this._writev = null; + this._write = makeSyncWrite(fd); + // makeSyncWrite adjusts this value like the original handle would, so + // we need to let it do that by turning it into a writable, own + // property. + ObjectDefineProperty(this._handle, "bytesWritten", { + __proto__: null, + value: 0, + writable: true, + }); + } + } + + const onread = options.onread; + if ( + onread !== null && + typeof onread === "object" && + (isUint8Array(onread.buffer) || typeof onread.buffer === "function") && + typeof onread.callback === "function" + ) { + if (typeof onread.buffer === "function") { + this[kBuffer] = true; + this[kBufferGen] = onread.buffer; + } else { + this[kBuffer] = onread.buffer; + } + this[kBufferCb] = onread.callback; + } + + this[kSetNoDelay] = Boolean(options.noDelay); + this[kSetKeepAlive] = Boolean(options.keepAlive); + this[kSetKeepAliveInitialDelay] = ~~(options.keepAliveInitialDelay / 1000); + + // Shut down the socket when we're finished with it. + this.on("end", onReadableStreamEnd); + + initSocketHandle(this); + + this._pendingData = null; + this._pendingEncoding = ""; + + // If we have a handle, then start the flow of data into the + // buffer. if not, then this will happen when we connect + if (this._handle && options.readable !== false) { + if (options.pauseOnCreate) { + // Stop the handle from reading and pause the stream + this._handle.reading = false; + this._handle.readStop(); + this.readableFlowing = false; + } else if (!options.manualStart) { + this.read(0); + } + } + + if (options.signal) { + addClientAbortSignalOption(this, options); + } + + // Reserve properties + this.server = null; + this._server = null; + + // Used after `.destroy()` + this[kBytesRead] = 0; + this[kBytesWritten] = 0; + if (options.blockList) { + if (!module.exports.BlockList.isBlockList(options.blockList)) { + throw new ERR_INVALID_ARG_TYPE( + "options.blockList", + "net.BlockList", + options.blockList + ); + } + this.blockList = options.blockList; + } +} +ObjectSetPrototypeOf(Socket.prototype, stream.Duplex.prototype); +ObjectSetPrototypeOf(Socket, stream.Duplex); + +// Refresh existing timeouts. +Socket.prototype._unrefTimer = function _unrefTimer() { + for (let s = this; s !== null; s = s._parent) { + if (s[kTimeout]) s[kTimeout].refresh(); + } +}; + +// The user has called .end(), and all the bytes have been +// sent out to the other side. +Socket.prototype._final = function (cb) { + // If still connecting - defer handling `_final` until 'connect' will happen + if (this.connecting) { + debug("_final: not yet connected"); + return this.once("connect", () => this._final(cb)); + } + + if (!this._handle) return cb(); + + debug("_final: not ended, call shutdown()"); + + const req = new ShutdownWrap(); + req.oncomplete = afterShutdown; + req.handle = this._handle; + req.callback = cb; + const err = this._handle.shutdown(req); + + if (err === 1 || err === UV_ENOTCONN) + // synchronous finish + return cb(); + else if (err !== 0) return cb(new ErrnoException(err, "shutdown")); +}; + +function afterShutdown() { + const self = this.handle[owner_symbol]; + + debug("afterShutdown destroyed=%j", self.destroyed); + + this.callback(); +} + +// Provide a better error message when we call end() as a result +// of the other side sending a FIN. The standard 'write after end' +// is overly vague, and makes it seem like the user's code is to blame. +function writeAfterFIN(chunk, encoding, cb) { + if (!this.writableEnded) { + return stream.Duplex.prototype.write.call(this, chunk, encoding, cb); + } + + if (typeof encoding === "function") { + cb = encoding; + encoding = null; + } + + const er = genericNodeError("This socket has been ended by the other party", { + code: "EPIPE", + }); + if (typeof cb === "function") { + defaultTriggerAsyncIdScope(this[async_id_symbol], process.nextTick, cb, er); + } + this.destroy(er); + + return false; +} + +Socket.prototype.setTimeout = setStreamTimeout; + +Socket.prototype._onTimeout = function () { + const handle = this._handle; + const lastWriteQueueSize = this[kLastWriteQueueSize]; + if (lastWriteQueueSize > 0 && handle) { + // `lastWriteQueueSize !== writeQueueSize` means there is + // an active write in progress, so we suppress the timeout. + const { writeQueueSize } = handle; + if (lastWriteQueueSize !== writeQueueSize) { + this[kLastWriteQueueSize] = writeQueueSize; + this._unrefTimer(); + return; + } + } + debug("_onTimeout"); + this.emit("timeout"); +}; + +Socket.prototype.setNoDelay = function (enable) { + // Backwards compatibility: assume true when `enable` is omitted + enable = Boolean(enable === undefined ? true : enable); + + if (!this._handle) { + this[kSetNoDelay] = enable; + return this; + } + + if (this._handle.setNoDelay && enable !== this[kSetNoDelay]) { + this[kSetNoDelay] = enable; + this._handle.setNoDelay(enable); + } + + return this; +}; + +Socket.prototype.setKeepAlive = function (enable, initialDelayMsecs) { + enable = Boolean(enable); + const initialDelay = ~~(initialDelayMsecs / 1000); + + if (!this._handle) { + this[kSetKeepAlive] = enable; + this[kSetKeepAliveInitialDelay] = initialDelay; + return this; + } + + if (!this._handle.setKeepAlive) { + return this; + } + + if ( + enable !== this[kSetKeepAlive] || + (enable && this[kSetKeepAliveInitialDelay] !== initialDelay) + ) { + this[kSetKeepAlive] = enable; + this[kSetKeepAliveInitialDelay] = initialDelay; + this._handle.setKeepAlive(enable, initialDelay); + } + + return this; +}; + +Socket.prototype.address = function () { + return this._getsockname(); +}; + +ObjectDefineProperty(Socket.prototype, "_connecting", { + __proto__: null, + get: function () { + return this.connecting; + }, +}); + +ObjectDefineProperty(Socket.prototype, "pending", { + __proto__: null, + get() { + return !this._handle || this.connecting; + }, + configurable: true, +}); + +ObjectDefineProperty(Socket.prototype, "readyState", { + __proto__: null, + get: function () { + if (this.connecting) { + return "opening"; + } else if (this.readable && this.writable) { + return "open"; + } else if (this.readable && !this.writable) { + return "readOnly"; + } else if (!this.readable && this.writable) { + return "writeOnly"; + } + return "closed"; + }, +}); + +ObjectDefineProperty(Socket.prototype, "bufferSize", { + __proto__: null, + get: function () { + if (this._handle) { + return this.writableLength; + } + }, +}); + +ObjectDefineProperty(Socket.prototype, kUpdateTimer, { + __proto__: null, + get: function () { + return this._unrefTimer; + }, +}); + +function tryReadStart(socket) { + // Not already reading, start the flow + debug("Socket._handle.readStart"); + socket._handle.reading = true; + const err = socket._handle.readStart(); + if (err) socket.destroy(new ErrnoException(err, "read")); +} + +// Just call handle.readStart until we have enough in the buffer +Socket.prototype._read = function (n) { + debug( + "_read - n", + n, + "isConnecting?", + !!this.connecting, + "hasHandle?", + !!this._handle + ); + + if (this.connecting || !this._handle) { + debug("_read wait for connection"); + this.once("connect", () => this._read(n)); + } else if (!this._handle.reading) { + tryReadStart(this); + } +}; + +Socket.prototype.end = function (data, encoding, callback) { + stream.Duplex.prototype.end.call(this, data, encoding, callback); + return this; +}; + +Socket.prototype.resetAndDestroy = function () { + if (this._handle) { + if (!(this._handle instanceof TCP)) throw new ERR_INVALID_HANDLE_TYPE(); + if (this.connecting) { + debug("reset wait for connection"); + this.once("connect", () => this._reset()); + } else { + this._reset(); + } + } else { + this.destroy(new ERR_SOCKET_CLOSED()); + } + return this; +}; + +Socket.prototype.pause = function () { + if (this[kBuffer] && !this.connecting && this._handle?.reading) { + this._handle.reading = false; + if (!this.destroyed) { + const err = this._handle.readStop(); + if (err) this.destroy(new ErrnoException(err, "read")); + } + } + return stream.Duplex.prototype.pause.call(this); +}; + +Socket.prototype.resume = function () { + if ( + this[kBuffer] && + !this.connecting && + this._handle && + !this._handle.reading + ) { + tryReadStart(this); + } + return stream.Duplex.prototype.resume.call(this); +}; + +Socket.prototype.read = function (n) { + if ( + this[kBuffer] && + !this.connecting && + this._handle && + !this._handle.reading + ) { + tryReadStart(this); + } + return stream.Duplex.prototype.read.call(this, n); +}; + +// Called when the 'end' event is emitted. +function onReadableStreamEnd() { + if (!this.allowHalfOpen) { + this.write = writeAfterFIN; + } +} + +Socket.prototype.destroySoon = function () { + if (this.writable) this.end(); + + if (this.writableFinished) this.destroy(); + else this.once("finish", this.destroy); +}; + +Socket.prototype._destroy = function (exception, cb) { + debug("destroy"); + + this.connecting = false; + + for (let s = this; s !== null; s = s._parent) { + clearTimeout(s[kTimeout]); + } + + debug("close"); + if (this._handle) { + if (this !== process.stderr) debug("close handle"); + const isException = exception ? true : false; + // `bytesRead` and `kBytesWritten` should be accessible after `.destroy()` + this[kBytesRead] = this._handle.bytesRead; + this[kBytesWritten] = this._handle.bytesWritten; + + if (this.resetAndClosing) { + this.resetAndClosing = false; + const err = this._handle.reset(() => { + debug("emit close"); + this.emit("close", isException); + }); + if (err) this.emit("error", new ErrnoException(err, "reset")); + } else if (this._closeAfterHandlingError) { + // Enqueue closing the socket as a microtask, so that the socket can be + // accessible when an `error` event is handled in the `next tick queue`. + queueMicrotask(() => closeSocketHandle(this, isException, true)); + } else { + closeSocketHandle(this, isException); + } + + if (!this._closeAfterHandlingError) { + this._handle.onread = noop; + this._handle = null; + this._sockname = null; + } + cb(exception); + } else { + cb(exception); + process.nextTick(emitCloseNT, this); + } + + if (this._server) { + debug("has server"); + this._server._connections--; + if (this._server._emitCloseIfDrained) { + this._server._emitCloseIfDrained(); + } + } +}; + +Socket.prototype._reset = function () { + debug("reset connection"); + this.resetAndClosing = true; + return this.destroy(); +}; + +Socket.prototype._getpeername = function () { + if (!this._handle || !this._handle.getpeername || this.connecting) { + return this._peername || {}; + } else if (!this._peername) { + const out = {}; + const err = this._handle.getpeername(out); + if (err) return out; + this._peername = out; + } + return this._peername; +}; + +function protoGetter(name, callback) { + ObjectDefineProperty(Socket.prototype, name, { + __proto__: null, + configurable: false, + enumerable: true, + get: callback, + }); +} + +protoGetter("bytesRead", function bytesRead() { + return this._handle ? this._handle.bytesRead : this[kBytesRead]; +}); + +protoGetter("remoteAddress", function remoteAddress() { + return this._getpeername().address; +}); + +protoGetter("remoteFamily", function remoteFamily() { + return this._getpeername().family; +}); + +protoGetter("remotePort", function remotePort() { + return this._getpeername().port; +}); + +Socket.prototype._getsockname = function () { + if (!this._handle || !this._handle.getsockname) { + return {}; + } else if (!this._sockname) { + this._sockname = {}; + // FIXME(bnoordhuis) Throw when the return value is not 0? + this._handle.getsockname(this._sockname); + } + return this._sockname; +}; + +protoGetter("localAddress", function localAddress() { + return this._getsockname().address; +}); + +protoGetter("localPort", function localPort() { + return this._getsockname().port; +}); + +protoGetter("localFamily", function localFamily() { + return this._getsockname().family; +}); + +Socket.prototype[kAfterAsyncWrite] = function () { + this[kLastWriteQueueSize] = 0; +}; + +Socket.prototype._writeGeneric = function (writev, data, encoding, cb) { + // If we are still connecting, then buffer this for later. + // The Writable logic will buffer up any more writes while + // waiting for this one to be done. + if (this.connecting) { + this._pendingData = data; + this._pendingEncoding = encoding; + this.once("connect", function connect() { + this.off("close", onClose); + this._writeGeneric(writev, data, encoding, cb); + }); + function onClose() { + cb(new ERR_SOCKET_CLOSED_BEFORE_CONNECTION()); + } + this.once("close", onClose); + return; + } + this._pendingData = null; + this._pendingEncoding = ""; + + if (!this._handle) { + cb(new ERR_SOCKET_CLOSED()); + return false; + } + + this._unrefTimer(); + + let req; + if (writev) req = writevGeneric(this, data, cb); + else req = writeGeneric(this, data, encoding, cb); + if (req.async) this[kLastWriteQueueSize] = req.bytes; +}; + +Socket.prototype._writev = function (chunks, cb) { + this._writeGeneric(true, chunks, "", cb); +}; + +Socket.prototype._write = function (data, encoding, cb) { + this._writeGeneric(false, data, encoding, cb); +}; + +// Legacy alias. Having this is probably being overly cautious, but it doesn't +// really hurt anyone either. This can probably be removed safely if desired. +protoGetter("_bytesDispatched", function _bytesDispatched() { + return this._handle ? this._handle.bytesWritten : this[kBytesWritten]; +}); + +protoGetter("bytesWritten", function bytesWritten() { + let bytes = this._bytesDispatched; + const data = this._pendingData; + const encoding = this._pendingEncoding; + const writableBuffer = this.writableBuffer; + + if (!writableBuffer) return undefined; + + for (const el of writableBuffer) { + bytes += + el.chunk instanceof Buffer + ? el.chunk.length + : Buffer.byteLength(el.chunk, el.encoding); + } + + if (ArrayIsArray(data)) { + // Was a writev, iterate over chunks to get total length + for (let i = 0; i < data.length; i++) { + const chunk = data[i]; + + if (data.allBuffers || chunk instanceof Buffer) bytes += chunk.length; + else bytes += Buffer.byteLength(chunk.chunk, chunk.encoding); + } + } else if (data) { + // Writes are either a string or a Buffer. + if (typeof data !== "string") bytes += data.length; + else bytes += Buffer.byteLength(data, encoding); + } + + return bytes; +}); + +function checkBindError(err, port, handle) { + // EADDRINUSE may not be reported until we call listen() or connect(). + // To complicate matters, a failed bind() followed by listen() or connect() + // will implicitly bind to a random port. Ergo, check that the socket is + // bound to the expected port before calling listen() or connect(). + // + // FIXME(bnoordhuis) Doesn't work for pipe handles, they don't have a + // getsockname() method. Non-issue for now, the cluster module doesn't + // really support pipes anyway. + if (err === 0 && port > 0 && handle.getsockname) { + const out = {}; + err = handle.getsockname(out); + if (err === 0 && port !== out.port) { + debug(`checkBindError, bound to ${out.port} instead of ${port}`); + err = UV_EADDRINUSE; + } + } + return err; +} + +function internalConnect( + self, + address, + port, + addressType, + localAddress, + localPort, + flags +) { + // TODO return promise from Socket.prototype.connect which + // wraps _connectReq. + + assert(self.connecting); + + let err; + + if (localAddress || localPort) { + if (addressType === 4) { + localAddress ||= DEFAULT_IPV4_ADDR; + err = self._handle.bind(localAddress, localPort); + } else { + // addressType === 6 + localAddress ||= DEFAULT_IPV6_ADDR; + err = self._handle.bind6(localAddress, localPort, flags); + } + debug( + "connect: binding to localAddress: %s and localPort: %d (addressType: %d)", + localAddress, + localPort, + addressType + ); + + err = checkBindError(err, localPort, self._handle); + if (err) { + const ex = new ExceptionWithHostPort( + err, + "bind", + localAddress, + localPort + ); + self.destroy(ex); + return; + } + } + + debug( + "connect: attempting to connect to %s:%d (addressType: %d)", + address, + port, + addressType + ); + self.emit("connectionAttempt", address, port, addressType); + + if (addressType === 6 || addressType === 4) { + if (self.blockList?.check(address, `ipv${addressType}`)) { + self.destroy(new ERR_IP_BLOCKED(address)); + return; + } + const req = new TCPConnectWrap(); + req.oncomplete = afterConnect; + req.address = address; + req.port = port; + req.localAddress = localAddress; + req.localPort = localPort; + req.addressType = addressType; + + if (addressType === 4) err = self._handle.connect(req, address, port); + else err = self._handle.connect6(req, address, port); + } else { + const req = new PipeConnectWrap(); + req.address = address; + req.oncomplete = afterConnect; + + err = self._handle.connect(req, address); + } + + if (err) { + const sockname = self._getsockname(); + let details; + + if (sockname) { + details = sockname.address + ":" + sockname.port; + } + + const ex = new ExceptionWithHostPort( + err, + "connect", + address, + port, + details + ); + self.destroy(ex); + } else if ((addressType === 6 || addressType === 4) && hasObserver("net")) { + startPerf(self, kPerfHooksNetConnectContext, { + type: "net", + name: "connect", + detail: { host: address, port }, + }); + } +} + +function internalConnectMultiple(context, canceled) { + clearTimeout(context[kTimeout]); + const self = context.socket; + + // We were requested to abort. Stop all operations + if (self._aborted) { + return; + } + + // All connections have been tried without success, destroy with error + if (canceled || context.current === context.addresses.length) { + if (context.errors.length === 0) { + self.destroy(new ERR_SOCKET_CONNECTION_TIMEOUT()); + return; + } + + self.destroy(new NodeAggregateError(context.errors)); + return; + } + + assert(self.connecting); + + const current = context.current++; + + if (current > 0) { + self[kReinitializeHandle](new TCP(TCPConstants.SOCKET)); + } + + const { localPort, port, flags } = context; + const { address, family: addressType } = context.addresses[current]; + let localAddress; + let err; + + if (localPort) { + if (addressType === 4) { + localAddress = DEFAULT_IPV4_ADDR; + err = self._handle.bind(localAddress, localPort); + } else { + // addressType === 6 + localAddress = DEFAULT_IPV6_ADDR; + err = self._handle.bind6(localAddress, localPort, flags); + } + + debug( + "connect/multiple: binding to localAddress: %s and localPort: %d (addressType: %d)", + localAddress, + localPort, + addressType + ); + + err = checkBindError(err, localPort, self._handle); + if (err) { + ArrayPrototypePush( + context.errors, + new ExceptionWithHostPort(err, "bind", localAddress, localPort) + ); + internalConnectMultiple(context); + return; + } + } + + if (self.blockList?.check(address, `ipv${addressType}`)) { + const ex = new ERR_IP_BLOCKED(address); + ArrayPrototypePush(context.errors, ex); + self.emit("connectionAttemptFailed", address, port, addressType, ex); + internalConnectMultiple(context); + return; + } + + debug( + "connect/multiple: attempting to connect to %s:%d (addressType: %d)", + address, + port, + addressType + ); + self.emit("connectionAttempt", address, port, addressType); + + const req = new TCPConnectWrap(); + req.oncomplete = FunctionPrototypeBind( + afterConnectMultiple, + undefined, + context, + current + ); + req.address = address; + req.port = port; + req.localAddress = localAddress; + req.localPort = localPort; + req.addressType = addressType; + + ArrayPrototypePush( + self.autoSelectFamilyAttemptedAddresses, + `${address}:${port}` + ); + + if (addressType === 4) { + err = self._handle.connect(req, address, port); + } else { + err = self._handle.connect6(req, address, port); + } + + if (err) { + const sockname = self._getsockname(); + let details; + + if (sockname) { + details = sockname.address + ":" + sockname.port; + } + + const ex = new ExceptionWithHostPort( + err, + "connect", + address, + port, + details + ); + ArrayPrototypePush(context.errors, ex); + + self.emit("connectionAttemptFailed", address, port, addressType, ex); + internalConnectMultiple(context); + return; + } + + if (current < context.addresses.length - 1) { + debug( + "connect/multiple: setting the attempt timeout to %d ms", + context.timeout + ); + + // If the attempt has not returned an error, start the connection timer + context[kTimeout] = setTimeout( + internalConnectMultipleTimeout, + context.timeout, + context, + req, + self._handle + ); + } +} + +Socket.prototype.connect = function (...args) { + let normalized; + // If passed an array, it's treated as an array of arguments that have + // already been normalized (so we don't normalize more than once). This has + // been solved before in https://github.com/nodejs/node/pull/12342, but was + // reverted as it had unintended side effects. + if (ArrayIsArray(args[0]) && args[0][normalizedArgsSymbol]) { + normalized = args[0]; + } else { + normalized = normalizeArgs(args); + } + const options = normalized[0]; + const cb = normalized[1]; + + if (netClientSocketChannel.hasSubscribers) { + netClientSocketChannel.publish({ + socket: this, + }); + } + + if (cb !== null) { + this.once("connect", cb); + } + + // If the parent is already connecting, do not attempt to connect again + if (this._parent?.connecting) { + return this; + } + + // options.port === null will be checked later. + if (options.port === undefined && options.path == null) + throw new ERR_MISSING_ARGS(["options", "port", "path"]); + + if (this.write !== Socket.prototype.write) + this.write = Socket.prototype.write; + + if (this.destroyed) { + this._handle = null; + this._peername = null; + this._sockname = null; + } + + const { path } = options; + const pipe = !!path; + debug("pipe", pipe, path); + + if (!this._handle) { + this._handle = pipe + ? new Pipe(PipeConstants.SOCKET) + : new TCP(TCPConstants.SOCKET); + initSocketHandle(this); + } + + this._unrefTimer(); + + this.connecting = true; + + if (pipe) { + validateString(path, "options.path"); + defaultTriggerAsyncIdScope( + this[async_id_symbol], + internalConnect, + this, + path + ); + } else { + lookupAndConnect(this, options); + } + return this; +}; + +Socket.prototype[kReinitializeHandle] = function reinitializeHandle(handle) { + this._handle?.close(); + + this._handle = handle; + this._handle[owner_symbol] = this; + + initSocketHandle(this); +}; + +function socketToDnsFamily(family) { + switch (family) { + case "IPv4": + return 4; + case "IPv6": + return 6; + } + + return family; +} + +function lookupAndConnect(self, options) { + const { localAddress, localPort } = options; + const host = options.host || "localhost"; + let { port, autoSelectFamilyAttemptTimeout, autoSelectFamily } = options; + + validateString(host, "options.host"); + + if (localAddress && !isIP(localAddress)) { + throw new ERR_INVALID_IP_ADDRESS(localAddress); + } + + if (localPort) { + validateNumber(localPort, "options.localPort"); + } + + if (port !== undefined) { + if (typeof port !== "number" && typeof port !== "string") { + throw new ERR_INVALID_ARG_TYPE( + "options.port", + ["number", "string"], + port + ); + } + validatePort(port); + } + port |= 0; + + if (autoSelectFamily != null) { + validateBoolean(autoSelectFamily, "options.autoSelectFamily"); + } else { + autoSelectFamily = autoSelectFamilyDefault; + } + + if (autoSelectFamilyAttemptTimeout != null) { + validateInt32( + autoSelectFamilyAttemptTimeout, + "options.autoSelectFamilyAttemptTimeout", + 1 + ); + + if (autoSelectFamilyAttemptTimeout < 10) { + autoSelectFamilyAttemptTimeout = 10; + } + } else { + autoSelectFamilyAttemptTimeout = autoSelectFamilyAttemptTimeoutDefault; + } + + // If host is an IP, skip performing a lookup + const addressType = isIP(host); + if (addressType) { + defaultTriggerAsyncIdScope(self[async_id_symbol], process.nextTick, () => { + if (self.connecting) + defaultTriggerAsyncIdScope( + self[async_id_symbol], + internalConnect, + self, + host, + port, + addressType, + localAddress, + localPort + ); + }); + return; + } + + if (options.lookup != null) + validateFunction(options.lookup, "options.lookup"); + + if (dns === undefined) dns = require("dns"); + const dnsopts = { + family: socketToDnsFamily(options.family), + hints: options.hints || 0, + }; + + if ( + !isWindows && + dnsopts.family !== 4 && + dnsopts.family !== 6 && + dnsopts.hints === 0 + ) { + dnsopts.hints = dns.ADDRCONFIG; + } + + debug("connect: find host", host); + debug("connect: dns options", dnsopts); + self._host = host; + const lookup = options.lookup || dns.lookup; + + if ( + dnsopts.family !== 4 && + dnsopts.family !== 6 && + !localAddress && + autoSelectFamily + ) { + debug("connect: autodetecting"); + + dnsopts.all = true; + defaultTriggerAsyncIdScope(self[async_id_symbol], function () { + lookupAndConnectMultiple( + self, + async_id_symbol, + lookup, + host, + options, + dnsopts, + port, + localAddress, + localPort, + autoSelectFamilyAttemptTimeout + ); + }); + + return; + } + + defaultTriggerAsyncIdScope(self[async_id_symbol], function () { + lookup(host, dnsopts, function emitLookup(err, ip, addressType) { + self.emit("lookup", err, ip, addressType, host); + + // It's possible we were destroyed while looking this up. + // XXX it would be great if we could cancel the promise returned by + // the look up. + if (!self.connecting) return; + + if (err) { + // net.createConnection() creates a net.Socket object and immediately + // calls net.Socket.connect() on it (that's us). There are no event + // listeners registered yet so defer the error event to the next tick. + process.nextTick(connectErrorNT, self, err); + } else if (typeof ip !== "string" || !isIP(ip)) { + err = new ERR_INVALID_IP_ADDRESS(ip); + process.nextTick(connectErrorNT, self, err); + } else if (addressType !== 4 && addressType !== 6) { + err = new ERR_INVALID_ADDRESS_FAMILY( + addressType, + options.host, + options.port + ); + process.nextTick(connectErrorNT, self, err); + } else { + self._unrefTimer(); + defaultTriggerAsyncIdScope( + self[async_id_symbol], + internalConnect, + self, + ip, + port, + addressType, + localAddress, + localPort + ); + } + }); + }); +} + +function lookupAndConnectMultiple( + self, + async_id_symbol, + lookup, + host, + options, + dnsopts, + port, + localAddress, + localPort, + timeout +) { + defaultTriggerAsyncIdScope(self[async_id_symbol], function emitLookup() { + lookup(host, dnsopts, function emitLookup(err, addresses) { + // It's possible we were destroyed while looking this up. + // XXX it would be great if we could cancel the promise returned by + // the look up. + if (!self.connecting) { + return; + } else if (err) { + self.emit("lookup", err, undefined, undefined, host); + + // net.createConnection() creates a net.Socket object and immediately + // calls net.Socket.connect() on it (that's us). There are no event + // listeners registered yet so defer the error event to the next tick. + process.nextTick(connectErrorNT, self, err); + return; + } + + // Filter addresses by only keeping the one which are either IPv4 or IPV6. + // The first valid address determines which group has preference on the + // alternate family sorting which happens later. + const validAddresses = [[], []]; + const validIps = [[], []]; + let destinations; + for (let i = 0, l = addresses.length; i < l; i++) { + const address = addresses[i]; + const { address: ip, family: addressType } = address; + self.emit("lookup", err, ip, addressType, host); + // It's possible we were destroyed while looking this up. + if (!self.connecting) { + return; + } + if (isIP(ip) && (addressType === 4 || addressType === 6)) { + destinations ||= addressType === 6 ? { 6: 0, 4: 1 } : { 4: 0, 6: 1 }; + + const destination = destinations[addressType]; + + // Only try an address once + if (!ArrayPrototypeIncludes(validIps[destination], ip)) { + ArrayPrototypePush(validAddresses[destination], address); + ArrayPrototypePush(validIps[destination], ip); + } + } + } + + // When no AAAA or A records are available, fail on the first one + if (!validAddresses[0].length && !validAddresses[1].length) { + const { address: firstIp, family: firstAddressType } = addresses[0]; + + if (!isIP(firstIp)) { + err = new ERR_INVALID_IP_ADDRESS(firstIp); + process.nextTick(connectErrorNT, self, err); + } else if (firstAddressType !== 4 && firstAddressType !== 6) { + err = new ERR_INVALID_ADDRESS_FAMILY( + firstAddressType, + options.host, + options.port + ); + process.nextTick(connectErrorNT, self, err); + } + + return; + } + + // Sort addresses alternating families + const toAttempt = []; + for ( + let i = 0, + l = MathMax(validAddresses[0].length, validAddresses[1].length); + i < l; + i++ + ) { + if (i in validAddresses[0]) { + ArrayPrototypePush(toAttempt, validAddresses[0][i]); + } + if (i in validAddresses[1]) { + ArrayPrototypePush(toAttempt, validAddresses[1][i]); + } + } + + if (toAttempt.length === 1) { + debug( + "connect/multiple: only one address found, switching back to single connection" + ); + const { address: ip, family: addressType } = toAttempt[0]; + + self._unrefTimer(); + defaultTriggerAsyncIdScope( + self[async_id_symbol], + internalConnect, + self, + ip, + port, + addressType, + localAddress, + localPort + ); + + return; + } + + self.autoSelectFamilyAttemptedAddresses = []; + debug("connect/multiple: will try the following addresses", toAttempt); + + const context = { + socket: self, + addresses: toAttempt, + current: 0, + port, + localPort, + timeout, + [kTimeout]: null, + errors: [], + }; + + self._unrefTimer(); + defaultTriggerAsyncIdScope( + self[async_id_symbol], + internalConnectMultiple, + context + ); + }); + }); +} + +function connectErrorNT(self, err) { + self.destroy(err); +} + +Socket.prototype.ref = function () { + if (!this._handle) { + this.once("connect", this.ref); + return this; + } + + if (typeof this._handle.ref === "function") { + this._handle.ref(); + } + + return this; +}; + +Socket.prototype.unref = function () { + if (!this._handle) { + this.once("connect", this.unref); + return this; + } + + if (typeof this._handle.unref === "function") { + this._handle.unref(); + } + + return this; +}; + +function afterConnect(status, handle, req, readable, writable) { + const self = handle[owner_symbol]; + + // Callback may come after call to destroy + if (self.destroyed) { + return; + } + + debug("afterConnect"); + + assert(self.connecting); + self.connecting = false; + self._sockname = null; + + if (status === 0) { + if (self.readable && !readable) { + self.push(null); + self.read(); + } + if (self.writable && !writable) { + self.end(); + } + self._unrefTimer(); + + if (self[kSetNoDelay] && self._handle.setNoDelay) { + self._handle.setNoDelay(true); + } + + if (self[kSetKeepAlive] && self._handle.setKeepAlive) { + self._handle.setKeepAlive(true, self[kSetKeepAliveInitialDelay]); + } + + self.emit("connect"); + self.emit("ready"); + + // Start the first read, or get an immediate EOF. + // this doesn't actually consume any bytes, because len=0. + if (readable && !self.isPaused()) self.read(0); + if (self[kPerfHooksNetConnectContext] && hasObserver("net")) { + stopPerf(self, kPerfHooksNetConnectContext); + } + } else { + let details; + if (req.localAddress && req.localPort) { + details = req.localAddress + ":" + req.localPort; + } + const ex = new ExceptionWithHostPort( + status, + "connect", + req.address, + req.port, + details + ); + if (details) { + ex.localAddress = req.localAddress; + ex.localPort = req.localPort; + } + + self.emit( + "connectionAttemptFailed", + req.address, + req.port, + req.addressType, + ex + ); + self.destroy(ex); + } +} + +function addClientAbortSignalOption(self, options) { + validateAbortSignal(options.signal, "options.signal"); + const { signal } = options; + let disposable; + + function onAbort() { + disposable?.[SymbolDispose](); + self._aborted = true; + } + + if (signal.aborted) { + process.nextTick(onAbort); + } else { + process.nextTick(() => { + disposable = addAbortListener(signal, onAbort); + }); + } +} + +function createConnectionError(req, status) { + let details; + + if (req.localAddress && req.localPort) { + details = req.localAddress + ":" + req.localPort; + } + + const ex = new ExceptionWithHostPort( + status, + "connect", + req.address, + req.port, + details + ); + if (details) { + ex.localAddress = req.localAddress; + ex.localPort = req.localPort; + } + + return ex; +} + +function afterConnectMultiple( + context, + current, + status, + handle, + req, + readable, + writable +) { + debug( + "connect/multiple: connection attempt to %s:%s completed with status %s", + req.address, + req.port, + status + ); + + // Make sure another connection is not spawned + clearTimeout(context[kTimeout]); + + // One of the connection has completed and correctly dispatched but after timeout, ignore this one + if (status === 0 && current !== context.current - 1) { + debug( + "connect/multiple: ignoring successful but timedout connection to %s:%s", + req.address, + req.port + ); + handle.close(); + return; + } + + const self = context.socket; + + // Some error occurred, add to the list of exceptions + if (status !== 0) { + const ex = createConnectionError(req, status); + ArrayPrototypePush(context.errors, ex); + + self.emit( + "connectionAttemptFailed", + req.address, + req.port, + req.addressType, + ex + ); + + // Try the next address, unless we were aborted + if (context.socket.connecting) { + internalConnectMultiple(context, status === UV_ECANCELED); + } + + return; + } + + if (hasObserver("net")) { + startPerf(self, kPerfHooksNetConnectContext, { + type: "net", + name: "connect", + detail: { host: req.address, port: req.port }, + }); + } + + afterConnect(status, self._handle, req, readable, writable); +} + +function internalConnectMultipleTimeout(context, req, handle) { + debug( + "connect/multiple: connection to %s:%s timed out", + req.address, + req.port + ); + context.socket.emit( + "connectionAttemptTimeout", + req.address, + req.port, + req.addressType + ); + + req.oncomplete = undefined; + ArrayPrototypePush(context.errors, createConnectionError(req, UV_ETIMEDOUT)); + handle.close(); + + // Try the next address, unless we were aborted + if (context.socket.connecting) { + internalConnectMultiple(context); + } +} + +function addServerAbortSignalOption(self, options) { + if (options?.signal === undefined) { + return; + } + validateAbortSignal(options.signal, "options.signal"); + const { signal } = options; + const onAborted = () => { + self.close(); + }; + if (signal.aborted) { + process.nextTick(onAborted); + } else { + const disposable = addAbortListener(signal, onAborted); + self.once("close", disposable[SymbolDispose]); + } +} + +function Server(options, connectionListener) { + if (!(this instanceof Server)) return new Server(options, connectionListener); + + EventEmitter.call(this); + + if (typeof options === "function") { + connectionListener = options; + options = kEmptyObject; + this.on("connection", connectionListener); + } else if (options == null || typeof options === "object") { + options = { ...options }; + + if (typeof connectionListener === "function") { + this.on("connection", connectionListener); + } + } else { + throw new ERR_INVALID_ARG_TYPE("options", "Object", options); + } + if (options.keepAliveInitialDelay !== undefined) { + validateNumber( + options.keepAliveInitialDelay, + "options.keepAliveInitialDelay" + ); + + if (options.keepAliveInitialDelay < 0) { + options.keepAliveInitialDelay = 0; + } + } + if (options.highWaterMark !== undefined) { + validateNumber(options.highWaterMark, "options.highWaterMark"); + + if (options.highWaterMark < 0) { + options.highWaterMark = getDefaultHighWaterMark(); + } + } + + this._connections = 0; + + this[async_id_symbol] = -1; + this._handle = null; + this._usingWorkers = false; + this._workers = []; + this._unref = false; + this._listeningId = 1; + + this.allowHalfOpen = options.allowHalfOpen || false; + this.pauseOnConnect = !!options.pauseOnConnect; + this.noDelay = Boolean(options.noDelay); + this.keepAlive = Boolean(options.keepAlive); + this.keepAliveInitialDelay = ~~(options.keepAliveInitialDelay / 1000); + this.highWaterMark = options.highWaterMark ?? getDefaultHighWaterMark(); + if (options.blockList) { + if (!module.exports.BlockList.isBlockList(options.blockList)) { + throw new ERR_INVALID_ARG_TYPE( + "options.blockList", + "net.BlockList", + options.blockList + ); + } + this.blockList = options.blockList; + } +} +ObjectSetPrototypeOf(Server.prototype, EventEmitter.prototype); +ObjectSetPrototypeOf(Server, EventEmitter); + +function toNumber(x) { + return (x = Number(x)) >= 0 ? x : false; +} + +// Returns handle if it can be created, or error code if it can't +function createServerHandle(address, port, addressType, fd, flags) { + let err = 0; + // Assign handle in listen, and clean up if bind or listen fails + let handle; + + let isTCP = false; + if (typeof fd === "number" && fd >= 0) { + try { + handle = createHandle(fd, true); + } catch (e) { + // Not a fd we can listen on. This will trigger an error. + debug("listen invalid fd=%d:", fd, e.message); + return UV_EINVAL; + } + + err = handle.open(fd); + if (err) return err; + + assert(!address && !port); + } else if (port === -1 && addressType === -1) { + handle = new Pipe(PipeConstants.SERVER); + if (isWindows) { + const instances = NumberParseInt(process.env.NODE_PENDING_PIPE_INSTANCES); + if (!NumberIsNaN(instances)) { + handle.setPendingInstances(instances); + } + } + } else { + handle = new TCP(TCPConstants.SERVER); + isTCP = true; + } + + if (address || port || isTCP) { + debug("bind to", address || "any"); + if (!address) { + // Try binding to ipv6 first + err = handle.bind6(DEFAULT_IPV6_ADDR, port, flags); + if (err) { + handle.close(); + // Fallback to ipv4 + return createServerHandle( + DEFAULT_IPV4_ADDR, + port, + undefined, + undefined, + flags + ); + } + } else if (addressType === 6) { + err = handle.bind6(address, port, flags); + } else { + err = handle.bind(address, port, flags); + } + } + + if (err) { + handle.close(); + return err; + } + + return handle; +} + +function setupListenHandle(address, port, addressType, backlog, fd, flags) { + debug("setupListenHandle", address, port, addressType, backlog, fd); + + // If there is not yet a handle, we need to create one and bind. + // In the case of a server sent via IPC, we don't need to do this. + if (this._handle) { + debug("setupListenHandle: have a handle already"); + } else { + debug("setupListenHandle: create a handle"); + + let rval = null; + + // Try to bind to the unspecified IPv6 address, see if IPv6 is available + if (!address && typeof fd !== "number") { + rval = createServerHandle(DEFAULT_IPV6_ADDR, port, 6, fd, flags); + + if (typeof rval === "number") { + rval = null; + address = DEFAULT_IPV4_ADDR; + addressType = 4; + } else { + address = DEFAULT_IPV6_ADDR; + addressType = 6; + } + } + + if (rval === null) + rval = createServerHandle(address, port, addressType, fd, flags); + + if (typeof rval === "number") { + const error = new UVExceptionWithHostPort(rval, "listen", address, port); + + if (netServerListen.hasSubscribers) { + netServerListen.error.publish({ server: this, error }); + } + + process.nextTick(emitErrorNT, this, error); + return; + } + this._handle = rval; + } + + this[async_id_symbol] = getNewAsyncId(this._handle); + this._handle.onconnection = onconnection; + this._handle[owner_symbol] = this; + + // Use a backlog of 512 entries. We pass 511 to the listen() call because + // the kernel does: backlogsize = roundup_pow_of_two(backlogsize + 1); + // which will thus give us a backlog of 512 entries. + const err = this._handle.listen(backlog || 511); + + if (err) { + const ex = new UVExceptionWithHostPort(err, "listen", address, port); + this._handle.close(); + this._handle = null; + + if (netServerListen.hasSubscribers) { + netServerListen.error.publish({ server: this, error: ex }); + } + + defaultTriggerAsyncIdScope( + this[async_id_symbol], + process.nextTick, + emitErrorNT, + this, + ex + ); + return; + } + + if (netServerListen.hasSubscribers) { + netServerListen.asyncEnd.publish({ server: this }); + } + + // Generate connection key, this should be unique to the connection + this._connectionKey = addressType + ":" + address + ":" + port; + + // Unref the handle if the server was unref'ed prior to listening + if (this._unref) this.unref(); + + defaultTriggerAsyncIdScope( + this[async_id_symbol], + process.nextTick, + emitListeningNT, + this + ); +} + +Server.prototype._listen2 = setupListenHandle; // legacy alias + +function emitErrorNT(self, err) { + self.emit("error", err); +} + +function emitListeningNT(self) { + // Ensure handle hasn't closed + if (self._handle) self.emit("listening"); +} + +function listenInCluster( + server, + address, + port, + addressType, + backlog, + fd, + exclusive, + flags, + options +) { + exclusive = !!exclusive; + + if (cluster === undefined) cluster = require("cluster"); + + if (cluster.isPrimary || exclusive) { + // Will create a new handle + // _listen2 sets up the listened handle, it is still named like this + // to avoid breaking code that wraps this method + server._listen2(address, port, addressType, backlog, fd, flags); + return; + } + + const serverQuery = { + address: address, + port: port, + addressType: addressType, + fd: fd, + flags, + backlog, + ...options, + }; + const listeningId = server._listeningId; + // Get the primary's server handle, and listen on it + cluster._getServer(server, serverQuery, listenOnPrimaryHandle); + function listenOnPrimaryHandle(err, handle) { + if (listeningId !== server._listeningId) { + handle.close(); + return; + } + err = checkBindError(err, port, handle); + + if (err) { + const ex = new ExceptionWithHostPort(err, "bind", address, port); + return server.emit("error", ex); + } + // If there was a handle, just close it to avoid fd leak + // but it doesn't look like that's going to happen right now + if (server._handle) { + server._handle.close(); + } + // Reuse primary's server handle + server._handle = handle; + // _listen2 sets up the listened handle, it is still named like this + // to avoid breaking code that wraps this method + server._listen2(address, port, addressType, backlog, fd, flags); + } +} + +Server.prototype.listen = function (...args) { + const normalized = normalizeArgs(args); + let options = normalized[0]; + const cb = normalized[1]; + + if (this._handle) { + throw new ERR_SERVER_ALREADY_LISTEN(); + } + + if (netServerListen.hasSubscribers) { + netServerListen.asyncStart.publish({ server: this, options }); + } + + if (cb !== null) { + this.once("listening", cb); + } + const backlogFromArgs = + // (handle, backlog) or (path, backlog) or (port, backlog) + toNumber(args.length > 1 && args[1]) || + toNumber(args.length > 2 && args[2]); // (port, host, backlog) + + options = options._handle || options.handle || options; + const flags = getFlags(options); + // Refresh the id to make the previous call invalid + this._listeningId++; + // (handle[, backlog][, cb]) where handle is an object with a handle + if (options instanceof TCP) { + this._handle = options; + this[async_id_symbol] = this._handle.getAsyncId(); + listenInCluster(this, null, -1, -1, backlogFromArgs, undefined, true); + return this; + } + addServerAbortSignalOption(this, options); + // (handle[, backlog][, cb]) where handle is an object with a fd + if (typeof options.fd === "number" && options.fd >= 0) { + listenInCluster(this, null, null, null, backlogFromArgs, options.fd); + return this; + } + + // ([port][, host][, backlog][, cb]) where port is omitted, + // that is, listen(), listen(null), listen(cb), or listen(null, cb) + // or (options[, cb]) where options.port is explicitly set as undefined or + // null, bind to an arbitrary unused port + if ( + args.length === 0 || + typeof args[0] === "function" || + (options.port === undefined && "port" in options) || + options.port === null + ) { + options.port = 0; + } + // ([port][, host][, backlog][, cb]) where port is specified + // or (options[, cb]) where options.port is specified + // or if options.port is normalized as 0 before + let backlog; + if (typeof options.port === "number" || typeof options.port === "string") { + validatePort(options.port, "options.port"); + backlog = options.backlog || backlogFromArgs; + if (options.reusePort === true) { + options.exclusive = true; + } + // start TCP server listening on host:port + if (options.host) { + lookupAndListen( + this, + options.port | 0, + options.host, + backlog, + options.exclusive, + flags + ); + } else { + // Undefined host, listens on unspecified address + // Default addressType 4 will be used to search for primary server + listenInCluster( + this, + null, + options.port | 0, + 4, + backlog, + undefined, + options.exclusive, + flags + ); + } + return this; + } + + // (path[, backlog][, cb]) or (options[, cb]) + // where path or options.path is a UNIX domain socket or Windows pipe + if (options.path && isPipeName(options.path)) { + // We can not call fchmod on abstract unix socket + if ( + options.path[0] === "\0" && + (options.readableAll || options.writableAll) + ) { + const msg = + "can not set readableAll or writableAllt to true when path is abstract unix socket"; + throw new ERR_INVALID_ARG_VALUE("options", options, msg); + } + const pipeName = (this._pipeName = options.path); + backlog = options.backlog || backlogFromArgs; + listenInCluster( + this, + pipeName, + -1, + -1, + backlog, + undefined, + options.exclusive, + undefined, + { + readableAll: options.readableAll, + writableAll: options.writableAll, + } + ); + + if (!this._handle) { + // Failed and an error shall be emitted in the next tick. + // Therefore, we directly return. + return this; + } + + let mode = 0; + if (options.readableAll === true) mode |= PipeConstants.UV_READABLE; + if (options.writableAll === true) mode |= PipeConstants.UV_WRITABLE; + if (mode !== 0) { + const err = this._handle.fchmod(mode); + if (err) { + this._handle.close(); + this._handle = null; + throw new ErrnoException(err, "uv_pipe_chmod"); + } + } + return this; + } + + if (!("port" in options || "path" in options)) { + throw new ERR_INVALID_ARG_VALUE( + "options", + options, + 'must have the property "port" or "path"' + ); + } + + throw new ERR_INVALID_ARG_VALUE("options", options); +}; + +function isIpv6LinkLocal(ip) { + if (!isIPv6(ip)) { + return false; + } + + const ipv6Buffer = convertIpv6StringToBuffer(ip); + const firstByte = ipv6Buffer[0]; // The first 8 bits + const secondByte = ipv6Buffer[1]; // The next 8 bits + + // The link-local prefix is `1111111010`, which in hexadecimal is `fe80` + // First 8 bits (firstByte) should be `11111110` (0xfe) + // The next 2 bits of the second byte should be `10` (0x80) + + const isFirstByteCorrect = firstByte === 0xfe; // 0b11111110 == 0xfe + const isSecondByteCorrect = (secondByte & 0xc0) === 0x80; // 0b10xxxxxx == 0x80 + + return isFirstByteCorrect && isSecondByteCorrect; +} + +function filterOnlyValidAddress(addresses) { + // Return the first non IPV6 link-local address if present + for (const address of addresses) { + if (!isIpv6LinkLocal(address.address)) { + return address; + } + } + + // Otherwise return the first address + return addresses[0]; +} + +function lookupAndListen(self, port, address, backlog, exclusive, flags) { + if (dns === undefined) dns = require("dns"); + const listeningId = self._listeningId; + + dns.lookup(address, { all: true }, (err, addresses) => { + if (listeningId !== self._listeningId) { + return; + } + if (err) { + self.emit("error", err); + } else { + const validAddress = filterOnlyValidAddress(addresses); + const family = validAddress?.family || 4; + + listenInCluster( + self, + validAddress.address, + port, + family, + backlog, + undefined, + exclusive, + flags + ); + } + }); +} + +ObjectDefineProperty(Server.prototype, "listening", { + __proto__: null, + get: function () { + return !!this._handle; + }, + configurable: true, + enumerable: true, +}); + +Server.prototype.address = function () { + if (this._handle?.getsockname) { + const out = {}; + const err = this._handle.getsockname(out); + if (err) { + throw new ErrnoException(err, "address"); + } + return out; + } else if (this._pipeName) { + return this._pipeName; + } + return null; +}; + +function onconnection(err, clientHandle) { + const handle = this; + const self = handle[owner_symbol]; + + debug("onconnection"); + + if (err) { + self.emit("error", new ErrnoException(err, "accept")); + return; + } + + if (self.maxConnections != null && self._connections >= self.maxConnections) { + if (clientHandle.getsockname || clientHandle.getpeername) { + const data = { __proto__: null }; + if (clientHandle.getsockname) { + const localInfo = { __proto__: null }; + clientHandle.getsockname(localInfo); + data.localAddress = localInfo.address; + data.localPort = localInfo.port; + data.localFamily = localInfo.family; + } + if (clientHandle.getpeername) { + const remoteInfo = { __proto__: null }; + clientHandle.getpeername(remoteInfo); + data.remoteAddress = remoteInfo.address; + data.remotePort = remoteInfo.port; + data.remoteFamily = remoteInfo.family; + } + self.emit("drop", data); + } else { + self.emit("drop"); + } + clientHandle.close(); + return; + } + if (self.blockList && typeof clientHandle.getpeername === "function") { + const remoteInfo = { __proto__: null }; + clientHandle.getpeername(remoteInfo); + const addressType = isIP(remoteInfo.address); + if ( + addressType && + self.blockList.check(remoteInfo.address, `ipv${addressType}`) + ) { + clientHandle.close(); + return; + } + } + const socket = new Socket({ + handle: clientHandle, + allowHalfOpen: self.allowHalfOpen, + pauseOnCreate: self.pauseOnConnect, + readable: true, + writable: true, + readableHighWaterMark: self.highWaterMark, + writableHighWaterMark: self.highWaterMark, + }); + + if (self.noDelay && clientHandle.setNoDelay) { + socket[kSetNoDelay] = true; + clientHandle.setNoDelay(true); + } + if (self.keepAlive && clientHandle.setKeepAlive) { + socket[kSetKeepAlive] = true; + socket[kSetKeepAliveInitialDelay] = self.keepAliveInitialDelay; + clientHandle.setKeepAlive(true, self.keepAliveInitialDelay); + } + + self._connections++; + socket.server = self; + socket._server = self; + self.emit("connection", socket); + if (netServerSocketChannel.hasSubscribers) { + netServerSocketChannel.publish({ + socket, + }); + } +} + +/** + * Gets the number of concurrent connections on the server + * @param {Function} cb + * @returns {Server} + */ + +Server.prototype.getConnections = function (cb) { + const self = this; + + function end(err, connections) { + defaultTriggerAsyncIdScope( + self[async_id_symbol], + process.nextTick, + cb, + err, + connections + ); + } + + if (!this._usingWorkers) { + end(null, this._connections); + return this; + } + + // Poll workers + let left = this._workers.length; + let total = this._connections; + + function oncount(err, count) { + if (err) { + left = -1; + return end(err); + } + + total += count; + if (--left === 0) return end(null, total); + } + + for (let n = 0; n < this._workers.length; n++) { + this._workers[n].getConnections(oncount); + } + + return this; +}; + +Server.prototype.close = function (cb) { + this._listeningId++; + if (typeof cb === "function") { + if (!this._handle) { + this.once("close", function close() { + cb(new ERR_SERVER_NOT_RUNNING()); + }); + } else { + this.once("close", cb); + } + } + + if (this._handle) { + this._handle.close(); + this._handle = null; + } + + if (this._usingWorkers) { + let left = this._workers.length; + const onWorkerClose = () => { + if (--left !== 0) return; + + this._connections = 0; + this._emitCloseIfDrained(); + }; + + // Increment connections to be sure that, even if all sockets will be closed + // during polling of workers, `close` event will be emitted only once. + this._connections++; + + // Poll workers + for (let n = 0; n < this._workers.length; n++) + this._workers[n].close(onWorkerClose); + } else { + this._emitCloseIfDrained(); + } + + return this; +}; + +Server.prototype[SymbolAsyncDispose] = async function () { + if (!this._handle) { + return; + } + await FunctionPrototypeCall(promisify(this.close), this); +}; + +Server.prototype._emitCloseIfDrained = function () { + debug("SERVER _emitCloseIfDrained"); + + if (this._handle || this._connections) { + debug( + "SERVER handle? %j connections? %d", + !!this._handle, + this._connections + ); + return; + } + + defaultTriggerAsyncIdScope( + this[async_id_symbol], + process.nextTick, + emitCloseNT, + this + ); +}; + +function emitCloseNT(self) { + debug("SERVER: emit close"); + self.emit("close"); +} + +Server.prototype[EventEmitter.captureRejectionSymbol] = function ( + err, + event, + sock +) { + switch (event) { + case "connection": + sock.destroy(err); + break; + default: + this.emit("error", err); + } +}; + +// Legacy alias on the C++ wrapper object. This is not public API, so we may +// want to runtime-deprecate it at some point. There's no hurry, though. +ObjectDefineProperty(TCP.prototype, "owner", { + __proto__: null, + get() { + return this[owner_symbol]; + }, + set(v) { + return (this[owner_symbol] = v); + }, +}); + +ObjectDefineProperty(Socket.prototype, "_handle", { + __proto__: null, + get() { + return this[kHandle]; + }, + set(v) { + return (this[kHandle] = v); + }, +}); + +Server.prototype._setupWorker = function (socketList) { + this._usingWorkers = true; + this._workers.push(socketList); + socketList.once("exit", (socketList) => { + const index = ArrayPrototypeIndexOf(this._workers, socketList); + this._workers.splice(index, 1); + }); +}; + +Server.prototype.ref = function () { + this._unref = false; + + if (this._handle) this._handle.ref(); + + return this; +}; + +Server.prototype.unref = function () { + this._unref = true; + + if (this._handle) this._handle.unref(); + + return this; +}; + +module.exports = { + _createServerHandle: createServerHandle, + _normalizeArgs: normalizeArgs, + get BlockList() { + BlockList ??= require("internal/blocklist").BlockList; + return BlockList; + }, + get SocketAddress() { + SocketAddress ??= require("internal/socketaddress").SocketAddress; + return SocketAddress; + }, + connect, + createConnection: connect, + createServer, + isIP: isIP, + isIPv4: isIPv4, + isIPv6: isIPv6, + Server, + Socket, + Stream: Socket, // Legacy naming + getDefaultAutoSelectFamily, + setDefaultAutoSelectFamily, + getDefaultAutoSelectFamilyAttemptTimeout, + setDefaultAutoSelectFamilyAttemptTimeout, +}; diff --git a/node/node_sea.cc b/node/node_sea.cc new file mode 100644 index 00000000..a1184d47 --- /dev/null +++ b/node/node_sea.cc @@ -0,0 +1,895 @@ +#include "node_sea.h" + +#include "blob_serializer_deserializer-inl.h" +#include "debug_utils-inl.h" +#include "env-inl.h" +#include "node_contextify.h" +#include "node_errors.h" +#include "node_external_reference.h" +#include "node_internals.h" +#include "node_options.h" +#include "node_snapshot_builder.h" +#include "node_union_bytes.h" +#include "node_v8_platform-inl.h" +#include "simdjson.h" +#include "util-inl.h" + +// The POSTJECT_SENTINEL_FUSE macro is a string of random characters selected by +// the Node.js project that is present only once in the entire binary. It is +// used by the postject_has_resource() function to efficiently detect if a +// resource has been injected. See +// https://github.com/nodejs/postject/blob/35343439cac8c488f2596d7c4c1dddfec1fddcae/postject-api.h#L42-L45. +#define POSTJECT_SENTINEL_FUSE "NODE_SEA_FUSE_fce680ab2cc467b6e072b8b5df1996b2" +#include "postject-api.h" +#undef POSTJECT_SENTINEL_FUSE + +#include +#include +#include +#include + +using node::ExitCode; +using v8::Array; +using v8::ArrayBuffer; +using v8::BackingStore; +using v8::Context; +using v8::Function; +using v8::FunctionCallbackInfo; +using v8::HandleScope; +using v8::Isolate; +using v8::Local; +using v8::LocalVector; +using v8::MaybeLocal; +using v8::NewStringType; +using v8::Object; +using v8::ScriptCompiler; +using v8::ScriptOrigin; +using v8::String; +using v8::Value; + +namespace node { +namespace sea { + +namespace { + +SeaFlags operator|(SeaFlags x, SeaFlags y) { + return static_cast(static_cast(x) | + static_cast(y)); +} + +SeaFlags operator&(SeaFlags x, SeaFlags y) { + return static_cast(static_cast(x) & + static_cast(y)); +} + +SeaFlags operator|=(/* NOLINT (runtime/references) */ SeaFlags& x, SeaFlags y) { + return x = x | y; +} + +class SeaSerializer : public BlobSerializer { + public: + SeaSerializer() + : BlobSerializer( + per_process::enabled_debug_list.enabled(DebugCategory::SEA)) {} + + template ::value>* = nullptr, + std::enable_if_t::value>* = nullptr> + size_t Write(const T& data); +}; + +template <> +size_t SeaSerializer::Write(const SeaResource& sea) { + sink.reserve(SeaResource::kHeaderSize + sea.main_code_or_snapshot.size()); + + Debug("Write SEA magic %x\n", kMagic); + size_t written_total = WriteArithmetic(kMagic); + + uint32_t flags = static_cast(sea.flags); + Debug("Write SEA flags %x\n", flags); + written_total += WriteArithmetic(flags); + + Debug("Write SEA resource exec argv extension %u\n", + static_cast(sea.exec_argv_extension)); + written_total += + WriteArithmetic(static_cast(sea.exec_argv_extension)); + DCHECK_EQ(written_total, SeaResource::kHeaderSize); + + Debug("Write SEA code path %p, size=%zu\n", + sea.code_path.data(), + sea.code_path.size()); + written_total += + WriteStringView(sea.code_path, StringLogMode::kAddressAndContent); + + Debug("Write SEA resource %s %p, size=%zu\n", + sea.use_snapshot() ? "snapshot" : "code", + sea.main_code_or_snapshot.data(), + sea.main_code_or_snapshot.size()); + written_total += + WriteStringView(sea.main_code_or_snapshot, + sea.use_snapshot() ? StringLogMode::kAddressOnly + : StringLogMode::kAddressAndContent); + + if (sea.code_cache.has_value()) { + Debug("Write SEA resource code cache %p, size=%zu\n", + sea.code_cache->data(), + sea.code_cache->size()); + written_total += + WriteStringView(sea.code_cache.value(), StringLogMode::kAddressOnly); + } + + if (!sea.assets.empty()) { + Debug("Write SEA resource assets size %zu\n", sea.assets.size()); + written_total += WriteArithmetic(sea.assets.size()); + for (auto const& [key, content] : sea.assets) { + Debug("Write SEA resource asset %s at %p, size=%zu\n", + key, + content.data(), + content.size()); + written_total += WriteStringView(key, StringLogMode::kAddressAndContent); + written_total += WriteStringView(content, StringLogMode::kAddressOnly); + } + } + + if (static_cast(sea.flags & SeaFlags::kIncludeExecArgv)) { + Debug("Write SEA resource exec argv size %zu\n", sea.exec_argv.size()); + written_total += WriteArithmetic(sea.exec_argv.size()); + for (const auto& arg : sea.exec_argv) { + Debug("Write SEA resource exec arg %s at %p, size=%zu\n", + arg.data(), + arg.data(), + arg.size()); + written_total += WriteStringView(arg, StringLogMode::kAddressAndContent); + } + } + return written_total; +} + +class SeaDeserializer : public BlobDeserializer { + public: + explicit SeaDeserializer(std::string_view v) + : BlobDeserializer( + per_process::enabled_debug_list.enabled(DebugCategory::SEA), v) {} + + template ::value>* = nullptr, + std::enable_if_t::value>* = nullptr> + T Read(); +}; + +template <> +SeaResource SeaDeserializer::Read() { + uint32_t magic = ReadArithmetic(); + Debug("Read SEA magic %x\n", magic); + + CHECK_EQ(magic, kMagic); + SeaFlags flags(static_cast(ReadArithmetic())); + Debug("Read SEA flags %x\n", static_cast(flags)); + + uint8_t extension_value = ReadArithmetic(); + SeaExecArgvExtension exec_argv_extension = + static_cast(extension_value); + Debug("Read SEA resource exec argv extension %u\n", extension_value); + CHECK_EQ(read_total, SeaResource::kHeaderSize); + + std::string_view code_path = + ReadStringView(StringLogMode::kAddressAndContent); + Debug( + "Read SEA code path %p, size=%zu\n", code_path.data(), code_path.size()); + + bool use_snapshot = static_cast(flags & SeaFlags::kUseSnapshot); + std::string_view code = + ReadStringView(use_snapshot ? StringLogMode::kAddressOnly + : StringLogMode::kAddressAndContent); + + Debug("Read SEA resource %s %p, size=%zu\n", + use_snapshot ? "snapshot" : "code", + code.data(), + code.size()); + + std::string_view code_cache; + if (static_cast(flags & SeaFlags::kUseCodeCache)) { + code_cache = ReadStringView(StringLogMode::kAddressOnly); + Debug("Read SEA resource code cache %p, size=%zu\n", + code_cache.data(), + code_cache.size()); + } + + std::unordered_map assets; + if (static_cast(flags & SeaFlags::kIncludeAssets)) { + size_t assets_size = ReadArithmetic(); + Debug("Read SEA resource assets size %zu\n", assets_size); + for (size_t i = 0; i < assets_size; ++i) { + std::string_view key = ReadStringView(StringLogMode::kAddressAndContent); + std::string_view content = ReadStringView(StringLogMode::kAddressOnly); + Debug("Read SEA resource asset %s at %p, size=%zu\n", + key, + content.data(), + content.size()); + assets.emplace(key, content); + } + } + + std::vector exec_argv; + if (static_cast(flags & SeaFlags::kIncludeExecArgv)) { + size_t exec_argv_size = ReadArithmetic(); + Debug("Read SEA resource exec args size %zu\n", exec_argv_size); + exec_argv.reserve(exec_argv_size); + for (size_t i = 0; i < exec_argv_size; ++i) { + std::string_view arg = ReadStringView(StringLogMode::kAddressAndContent); + Debug("Read SEA resource exec arg %s at %p, size=%zu\n", + arg.data(), + arg.data(), + arg.size()); + exec_argv.emplace_back(arg); + } + } + return {flags, + exec_argv_extension, + code_path, + code, + code_cache, + assets, + exec_argv}; +} + +std::string_view FindSingleExecutableBlob() { +#if !defined(DISABLE_SINGLE_EXECUTABLE_APPLICATION) + CHECK(IsSingleExecutable()); + static const std::string_view result = []() -> std::string_view { + size_t size; +#ifdef __APPLE__ + postject_options options; + postject_options_init(&options); + options.macho_segment_name = "NODE_SEA"; + const char* blob = static_cast( + postject_find_resource("NODE_SEA_BLOB", &size, &options)); +#else + const char* blob = static_cast( + postject_find_resource("NODE_SEA_BLOB", &size, nullptr)); +#endif + return {blob, size}; + }(); + per_process::Debug(DebugCategory::SEA, + "Found SEA blob %p, size=%zu\n", + result.data(), + result.size()); + return result; +#else + UNREACHABLE(); +#endif // !defined(DISABLE_SINGLE_EXECUTABLE_APPLICATION) +} + +} // anonymous namespace + +bool SeaResource::use_snapshot() const { + return static_cast(flags & SeaFlags::kUseSnapshot); +} + +bool SeaResource::use_code_cache() const { + return static_cast(flags & SeaFlags::kUseCodeCache); +} + +SeaResource FindSingleExecutableResource() { + static const SeaResource sea_resource = []() -> SeaResource { + std::string_view blob = FindSingleExecutableBlob(); + per_process::Debug(DebugCategory::SEA, + "Found SEA resource %p, size=%zu\n", + blob.data(), + blob.size()); + SeaDeserializer deserializer(blob); + return deserializer.Read(); + }(); + return sea_resource; +} + +bool IsSingleExecutable() { + return postject_has_resource(); +} + +void IsSea(const FunctionCallbackInfo& args) { + args.GetReturnValue().Set(IsSingleExecutable()); +} + +void IsExperimentalSeaWarningNeeded(const FunctionCallbackInfo& args) { + bool is_building_sea = + !per_process::cli_options->experimental_sea_config.empty(); + if (is_building_sea) { + args.GetReturnValue().Set(true); + return; + } + + if (!IsSingleExecutable()) { + args.GetReturnValue().Set(false); + return; + } + + SeaResource sea_resource = FindSingleExecutableResource(); + args.GetReturnValue().Set(!static_cast( + sea_resource.flags & SeaFlags::kDisableExperimentalSeaWarning)); +} + +std::tuple FixupArgsForSEA(int argc, char** argv) { + // Repeats argv[0] at position 1 on argv as a replacement for the missing + // entry point file path. + if (IsSingleExecutable()) { + static std::vector new_argv; + static std::vector exec_argv_storage; + static std::vector cli_extension_args; + + SeaResource sea_resource = FindSingleExecutableResource(); + + new_argv.clear(); + exec_argv_storage.clear(); + cli_extension_args.clear(); + + // Handle CLI extension mode for --node-options + if (sea_resource.exec_argv_extension == SeaExecArgvExtension::kCli) { + // Extract --node-options and filter argv + for (int i = 1; i < argc; ++i) { + if (strncmp(argv[i], "--node-options=", 15) == 0) { + std::string node_options = argv[i] + 15; + std::vector errors; + cli_extension_args = ParseNodeOptionsEnvVar(node_options, &errors); + // Remove this argument by shifting the rest + for (int j = i; j < argc - 1; ++j) { + argv[j] = argv[j + 1]; + } + argc--; + i--; // Adjust index since we removed an element + } + } + } + + // Reserve space for argv[0], exec argv, cli extension args, original argv, + // and nullptr + new_argv.reserve(argc + sea_resource.exec_argv.size() + + cli_extension_args.size() + 2); + new_argv.emplace_back(argv[0]); + + // Insert exec argv from SEA config + if (!sea_resource.exec_argv.empty()) { + exec_argv_storage.reserve(sea_resource.exec_argv.size() + + cli_extension_args.size()); + for (const auto& arg : sea_resource.exec_argv) { + exec_argv_storage.emplace_back(arg); + new_argv.emplace_back(exec_argv_storage.back().data()); + } + } + + // Insert CLI extension args + for (const auto& arg : cli_extension_args) { + exec_argv_storage.emplace_back(arg); + new_argv.emplace_back(exec_argv_storage.back().data()); + } + + // Add actual run time arguments + new_argv.insert(new_argv.end(), argv, argv + argc); + new_argv.emplace_back(nullptr); + argc = new_argv.size() - 1; + argv = new_argv.data(); + } + + return {argc, argv}; +} + +namespace { + +struct SeaConfig { + std::string main_path; + std::string output_path; + SeaFlags flags = SeaFlags::kDefault; + SeaExecArgvExtension exec_argv_extension = SeaExecArgvExtension::kEnv; + std::unordered_map assets; + std::vector exec_argv; +}; + +std::optional ParseSingleExecutableConfig( + const std::string& config_path) { + std::string config; + int r = ReadFileSync(&config, config_path.c_str()); + if (r != 0) { + const char* err = uv_strerror(r); + FPrintF(stderr, + "Cannot read single executable configuration from %s: %s\n", + config_path, + err); + return std::nullopt; + } + + SeaConfig result; + + simdjson::ondemand::parser parser; + simdjson::ondemand::document document; + simdjson::ondemand::object main_object; + simdjson::error_code error = + parser.iterate(simdjson::pad(config)).get(document); + + if (!error) { + error = document.get_object().get(main_object); + } + if (error) { + FPrintF(stderr, + "Cannot parse JSON from %s: %s\n", + config_path, + simdjson::error_message(error)); + return std::nullopt; + } + + bool use_snapshot_value = false; + bool use_code_cache_value = false; + + for (auto field : main_object) { + std::string_view key; + if (field.unescaped_key().get(key)) { + FPrintF(stderr, "Cannot read key from %s\n", config_path); + return std::nullopt; + } + if (key == "main") { + if (field.value().get_string().get(result.main_path) || + result.main_path.empty()) { + FPrintF(stderr, + "\"main\" field of %s is not a non-empty string\n", + config_path); + return std::nullopt; + } + } else if (key == "output") { + if (field.value().get_string().get(result.output_path) || + result.output_path.empty()) { + FPrintF(stderr, + "\"output\" field of %s is not a non-empty string\n", + config_path); + return std::nullopt; + } + } else if (key == "disableExperimentalSEAWarning") { + bool disable_experimental_sea_warning; + if (field.value().get_bool().get(disable_experimental_sea_warning)) { + FPrintF( + stderr, + "\"disableExperimentalSEAWarning\" field of %s is not a Boolean\n", + config_path); + return std::nullopt; + } + if (disable_experimental_sea_warning) { + result.flags |= SeaFlags::kDisableExperimentalSeaWarning; + } + } else if (key == "useSnapshot") { + if (field.value().get_bool().get(use_snapshot_value)) { + FPrintF(stderr, + "\"useSnapshot\" field of %s is not a Boolean\n", + config_path); + return std::nullopt; + } + if (use_snapshot_value) { + result.flags |= SeaFlags::kUseSnapshot; + } + } else if (key == "useCodeCache") { + if (field.value().get_bool().get(use_code_cache_value)) { + FPrintF(stderr, + "\"useCodeCache\" field of %s is not a Boolean\n", + config_path); + return std::nullopt; + } + if (use_code_cache_value) { + result.flags |= SeaFlags::kUseCodeCache; + } + } else if (key == "assets") { + simdjson::ondemand::object assets_object; + if (field.value().get_object().get(assets_object)) { + FPrintF(stderr, + "\"assets\" field of %s is not a map of strings\n", + config_path); + return std::nullopt; + } + simdjson::ondemand::value asset_value; + for (auto asset_field : assets_object) { + std::string_view key_str; + std::string_view value_str; + if (asset_field.unescaped_key().get(key_str) || + asset_field.value().get(asset_value) || + asset_value.get_string().get(value_str)) { + FPrintF(stderr, + "\"assets\" field of %s is not a map of strings\n", + config_path); + return std::nullopt; + } + + result.assets.emplace(key_str, value_str); + } + + if (!result.assets.empty()) { + result.flags |= SeaFlags::kIncludeAssets; + } + } else if (key == "execArgv") { + simdjson::ondemand::array exec_argv_array; + if (field.value().get_array().get(exec_argv_array)) { + FPrintF(stderr, + "\"execArgv\" field of %s is not an array of strings\n", + config_path); + return std::nullopt; + } + std::vector exec_argv; + for (auto argv : exec_argv_array) { + std::string_view argv_str; + if (argv.get_string().get(argv_str)) { + FPrintF(stderr, + "\"execArgv\" field of %s is not an array of strings\n", + config_path); + return std::nullopt; + } + exec_argv.emplace_back(argv_str); + } + if (!exec_argv.empty()) { + result.flags |= SeaFlags::kIncludeExecArgv; + result.exec_argv = std::move(exec_argv); + } + } else if (key == "execArgvExtension") { + std::string_view extension_str; + if (field.value().get_string().get(extension_str)) { + FPrintF(stderr, + "\"execArgvExtension\" field of %s is not a string\n", + config_path); + return std::nullopt; + } + if (extension_str == "none") { + result.exec_argv_extension = SeaExecArgvExtension::kNone; + } else if (extension_str == "env") { + result.exec_argv_extension = SeaExecArgvExtension::kEnv; + } else if (extension_str == "cli") { + result.exec_argv_extension = SeaExecArgvExtension::kCli; + } else { + FPrintF(stderr, + "\"execArgvExtension\" field of %s must be one of " + "\"none\", \"env\", or \"cli\"\n", + config_path); + return std::nullopt; + } + } + } + + if (static_cast(result.flags & SeaFlags::kUseSnapshot) && + static_cast(result.flags & SeaFlags::kUseCodeCache)) { + // TODO(joyeecheung): code cache in snapshot should be configured by + // separate snapshot configurations. + FPrintF(stderr, + "\"useCodeCache\" is redundant when \"useSnapshot\" is true\n"); + } + + if (result.main_path.empty()) { + FPrintF(stderr, + "\"main\" field of %s is not a non-empty string\n", + config_path); + return std::nullopt; + } + + if (result.output_path.empty()) { + FPrintF(stderr, + "\"output\" field of %s is not a non-empty string\n", + config_path); + return std::nullopt; + } + + return result; +} + +ExitCode GenerateSnapshotForSEA(const SeaConfig& config, + const std::vector& args, + const std::vector& exec_args, + const std::string& builder_script_content, + const SnapshotConfig& snapshot_config, + std::vector* snapshot_blob) { + SnapshotData snapshot; + // TODO(joyeecheung): make the arguments configurable through the JSON + // config or a programmatic API. + std::vector patched_args = {args[0], config.main_path}; + ExitCode exit_code = SnapshotBuilder::Generate(&snapshot, + patched_args, + exec_args, + builder_script_content, + snapshot_config); + if (exit_code != ExitCode::kNoFailure) { + return exit_code; + } + auto& persistents = snapshot.env_info.principal_realm.persistent_values; + auto it = std::ranges::find_if(persistents, [](const PropInfo& prop) { + return prop.name == "snapshot_deserialize_main"; + }); + if (it == persistents.end()) { + FPrintF( + stderr, + "%s does not invoke " + "v8.startupSnapshot.setDeserializeMainFunction(), which is required " + "for snapshot scripts used to build single executable applications." + "\n", + config.main_path); + return ExitCode::kGenericUserError; + } + // We need the temporary variable for copy elision. + std::vector temp = snapshot.ToBlob(); + *snapshot_blob = std::move(temp); + return ExitCode::kNoFailure; +} + +std::optional GenerateCodeCache(std::string_view main_path, + std::string_view main_script) { + RAIIIsolate raii_isolate(SnapshotBuilder::GetEmbeddedSnapshotData()); + Isolate* isolate = raii_isolate.get(); + + v8::Isolate::Scope isolate_scope(isolate); + HandleScope handle_scope(isolate); + + Local context = Context::New(isolate); + Context::Scope context_scope(context); + + errors::PrinterTryCatch bootstrapCatch( + isolate, errors::PrinterTryCatch::kPrintSourceLine); + + Local filename; + if (!String::NewFromUtf8(isolate, + main_path.data(), + NewStringType::kNormal, + main_path.length()) + .ToLocal(&filename)) { + return std::nullopt; + } + + Local content; + if (!String::NewFromUtf8(isolate, + main_script.data(), + NewStringType::kNormal, + main_script.length()) + .ToLocal(&content)) { + return std::nullopt; + } + + LocalVector parameters( + isolate, + { + FIXED_ONE_BYTE_STRING(isolate, "exports"), + FIXED_ONE_BYTE_STRING(isolate, "require"), + FIXED_ONE_BYTE_STRING(isolate, "module"), + FIXED_ONE_BYTE_STRING(isolate, "__filename"), + FIXED_ONE_BYTE_STRING(isolate, "__dirname"), + }); + ScriptOrigin script_origin(filename, 0, 0, true); + ScriptCompiler::Source script_source(content, script_origin); + MaybeLocal maybe_fn = + ScriptCompiler::CompileFunction(context, + &script_source, + parameters.size(), + parameters.data(), + 0, + nullptr); + Local fn; + if (!maybe_fn.ToLocal(&fn)) { + return std::nullopt; + } + + // TODO(RaisinTen): Using the V8 code cache prevents us from using `import()` + // in the SEA code. Support it. + // Refs: https://github.com/nodejs/node/pull/48191#discussion_r1213271430 + std::unique_ptr cache{ + ScriptCompiler::CreateCodeCacheForFunction(fn)}; + std::string code_cache(cache->data, cache->data + cache->length); + return code_cache; +} + +int BuildAssets(const std::unordered_map& config, + std::unordered_map* assets) { + for (auto const& [key, path] : config) { + std::string blob; + int r = ReadFileSync(&blob, path.c_str()); + if (r != 0) { + const char* err = uv_strerror(r); + FPrintF(stderr, "Cannot read asset %s: %s\n", path.c_str(), err); + return r; + } + assets->emplace(key, std::move(blob)); + } + return 0; +} + +ExitCode GenerateSingleExecutableBlob( + const SeaConfig& config, + const std::vector& args, + const std::vector& exec_args) { + std::string main_script; + // TODO(joyeecheung): unify the file utils. + int r = ReadFileSync(&main_script, config.main_path.c_str()); + if (r != 0) { + const char* err = uv_strerror(r); + FPrintF(stderr, "Cannot read main script %s:%s\n", config.main_path, err); + return ExitCode::kGenericUserError; + } + + std::vector snapshot_blob; + bool builds_snapshot_from_main = + static_cast(config.flags & SeaFlags::kUseSnapshot); + if (builds_snapshot_from_main) { + // TODO(joyeecheung): allow passing snapshot configuration in SEA configs. + SnapshotConfig snapshot_config; + snapshot_config.builder_script_path = main_script; + ExitCode exit_code = GenerateSnapshotForSEA( + config, args, exec_args, main_script, snapshot_config, &snapshot_blob); + if (exit_code != ExitCode::kNoFailure) { + return exit_code; + } + } + + std::optional optional_sv_code_cache; + std::string code_cache; + if (static_cast(config.flags & SeaFlags::kUseCodeCache)) { + std::optional optional_code_cache = + GenerateCodeCache(config.main_path, main_script); + if (!optional_code_cache.has_value()) { + FPrintF(stderr, "Cannot generate V8 code cache\n"); + return ExitCode::kGenericUserError; + } + code_cache = optional_code_cache.value(); + optional_sv_code_cache = code_cache; + } + + std::unordered_map assets; + if (!config.assets.empty() && BuildAssets(config.assets, &assets) != 0) { + return ExitCode::kGenericUserError; + } + std::unordered_map assets_view; + for (auto const& [key, content] : assets) { + assets_view.emplace(key, content); + } + std::vector exec_argv_view; + for (const auto& arg : config.exec_argv) { + exec_argv_view.emplace_back(arg); + } + SeaResource sea{ + config.flags, + config.exec_argv_extension, + config.main_path, + builds_snapshot_from_main + ? std::string_view{snapshot_blob.data(), snapshot_blob.size()} + : std::string_view{main_script.data(), main_script.size()}, + optional_sv_code_cache, + assets_view, + exec_argv_view}; + + SeaSerializer serializer; + serializer.Write(sea); + + uv_buf_t buf = uv_buf_init(serializer.sink.data(), serializer.sink.size()); + r = WriteFileSync(config.output_path.c_str(), buf); + if (r != 0) { + const char* err = uv_strerror(r); + FPrintF(stderr, "Cannot write output to %s:%s\n", config.output_path, err); + return ExitCode::kGenericUserError; + } + + FPrintF(stderr, + "Wrote single executable preparation blob to %s\n", + config.output_path); + return ExitCode::kNoFailure; +} + +} // anonymous namespace + +ExitCode BuildSingleExecutableBlob(const std::string& config_path, + const std::vector& args, + const std::vector& exec_args) { + std::optional config_opt = + ParseSingleExecutableConfig(config_path); + if (config_opt.has_value()) { + ExitCode code = + GenerateSingleExecutableBlob(config_opt.value(), args, exec_args); + return code; + } + + return ExitCode::kGenericUserError; +} + +void GetAsset(const FunctionCallbackInfo& args) { + CHECK_EQ(args.Length(), 1); + CHECK(args[0]->IsString()); + Utf8Value key(args.GetIsolate(), args[0]); + SeaResource sea_resource = FindSingleExecutableResource(); + if (sea_resource.assets.empty()) { + return; + } + auto it = sea_resource.assets.find(*key); + if (it == sea_resource.assets.end()) { + return; + } + // We cast away the constness here, the JS land should ensure that + // the data is not mutated. + std::unique_ptr store = ArrayBuffer::NewBackingStore( + const_cast(it->second.data()), + it->second.size(), + [](void*, size_t, void*) {}, + nullptr); + Local ab = ArrayBuffer::New(args.GetIsolate(), std::move(store)); + args.GetReturnValue().Set(ab); +} + +void GetAssetKeys(const FunctionCallbackInfo& args) { + CHECK_EQ(args.Length(), 0); + Isolate* isolate = args.GetIsolate(); + SeaResource sea_resource = FindSingleExecutableResource(); + + Local context = isolate->GetCurrentContext(); + LocalVector keys(isolate); + keys.reserve(sea_resource.assets.size()); + for (const auto& [key, _] : sea_resource.assets) { + Local key_str; + if (!ToV8Value(context, key).ToLocal(&key_str)) { + return; + } + keys.push_back(key_str); + } + Local result = Array::New(isolate, keys.data(), keys.size()); + args.GetReturnValue().Set(result); +} + +MaybeLocal LoadSingleExecutableApplication( + const StartExecutionCallbackInfo& info) { + // Here we are currently relying on the fact that in NodeMainInstance::Run(), + // env->context() is entered. + Local context = Isolate::GetCurrent()->GetCurrentContext(); + Environment* env = Environment::GetCurrent(context); + SeaResource sea = FindSingleExecutableResource(); + + CHECK(!sea.use_snapshot()); + // TODO(joyeecheung): this should be an external string. Refactor UnionBytes + // and make it easy to create one based on static content on the fly. + Local main_script = + ToV8Value(env->context(), sea.main_code_or_snapshot).ToLocalChecked(); + return info.run_cjs->Call( + env->context(), Null(env->isolate()), 1, &main_script); +} + +bool MaybeLoadSingleExecutableApplication(Environment* env) { +#ifndef DISABLE_SINGLE_EXECUTABLE_APPLICATION + if (!IsSingleExecutable()) { + return false; + } + + SeaResource sea = FindSingleExecutableResource(); + + if (sea.use_snapshot()) { + // The SEA preparation blob building process should already enforce this, + // this check is just here to guard against the unlikely case where + // the SEA preparation blob has been manually modified by someone. + CHECK(!env->snapshot_deserialize_main().IsEmpty()); + LoadEnvironment(env, StartExecutionCallback{}); + return true; + } + + LoadEnvironment(env, LoadSingleExecutableApplication); + return true; +#else + return false; +#endif +} + +void Initialize(Local target, + Local unused, + Local context, + void* priv) { + SetMethod(context, target, "isSea", IsSea); + SetMethod(context, + target, + "isExperimentalSeaWarningNeeded", + IsExperimentalSeaWarningNeeded); + SetMethod(context, target, "getAsset", GetAsset); + SetMethod(context, target, "getAssetKeys", GetAssetKeys); +} + +void RegisterExternalReferences(ExternalReferenceRegistry* registry) { + registry->Register(IsSea); + registry->Register(IsExperimentalSeaWarningNeeded); + registry->Register(GetAsset); + registry->Register(GetAssetKeys); +} + +} // namespace sea +} // namespace node + +NODE_BINDING_CONTEXT_AWARE_INTERNAL(sea, node::sea::Initialize) +NODE_BINDING_EXTERNAL_REFERENCE(sea, node::sea::RegisterExternalReferences) \ No newline at end of file diff --git a/node/os.js b/node/os.js new file mode 100644 index 00000000..cd406d49 --- /dev/null +++ b/node/os.js @@ -0,0 +1,326 @@ +"use strict"; + +const { + ArrayPrototypePush, + Float64Array, + ObjectDefineProperties, + ObjectFreeze, + StringPrototypeSlice, + SymbolToPrimitive, +} = primordials; + +const { getTempDir } = internalBinding("credentials"); +const constants = internalBinding("constants").os; +const isWindows = process.platform === "win32"; + +const { + codes: { ERR_SYSTEM_ERROR }, + hideStackFrames, +} = require("internal/errors"); +const { getCIDR } = require("internal/util"); +const { validateInt32 } = require("internal/validators"); + +const { + getAvailableParallelism, + getCPUs, + getFreeMem, + getHomeDirectory: _getHomeDirectory, + getHostname: _getHostname, + getInterfaceAddresses: _getInterfaceAddresses, + getLoadAvg, + getPriority: _getPriority, + getOSInformation: _getOSInformation, + getTotalMem, + getUserInfo, + getUptime: _getUptime, + isBigEndian, + setPriority: _setPriority, +} = internalBinding("os"); + +function getCheckedFunction(fn) { + return hideStackFrames(function checkError() { + const ctx = {}; + const ret = fn(ctx); + if (ret === undefined) { + throw new ERR_SYSTEM_ERROR.HideStackFramesError(ctx); + } + return ret; + }); +} + +const { 0: type, 1: version, 2: release, 3: machine } = _getOSInformation(); + +const getHomeDirectory = getCheckedFunction(_getHomeDirectory); +const getHostname = getCheckedFunction(_getHostname); +const getInterfaceAddresses = getCheckedFunction(_getInterfaceAddresses); +const getUptime = getCheckedFunction(_getUptime); + +/** + * @returns {string} + */ +const getOSRelease = () => release; +/** + * @returns {string} + */ +const getOSType = () => type; +/** + * @returns {string} + */ +const getOSVersion = () => version; +/** + * @returns {string} + */ +const getMachine = () => machine; + +getAvailableParallelism[SymbolToPrimitive] = () => getAvailableParallelism(); +getFreeMem[SymbolToPrimitive] = () => getFreeMem(); +getHostname[SymbolToPrimitive] = () => getHostname(); +getOSVersion[SymbolToPrimitive] = () => getOSVersion(); +getOSType[SymbolToPrimitive] = () => getOSType(); +getOSRelease[SymbolToPrimitive] = () => getOSRelease(); +getMachine[SymbolToPrimitive] = () => getMachine(); +getHomeDirectory[SymbolToPrimitive] = () => getHomeDirectory(); +getTotalMem[SymbolToPrimitive] = () => getTotalMem(); +getUptime[SymbolToPrimitive] = () => getUptime(); + +const kEndianness = isBigEndian ? "BE" : "LE"; + +const avgValues = new Float64Array(3); + +/** + * @returns {[number, number, number]} + */ +function loadavg() { + getLoadAvg(avgValues); + return [avgValues[0], avgValues[1], avgValues[2]]; +} + +/** + * Returns an array of objects containing information about each + * logical CPU core. + * @returns {Array<{ + * model: string, + * speed: number, + * times: { + * user: number, + * nice: number, + * sys: number, + * idle: number, + * irq: number, + * }, + * }>} + */ +function cpus() { + // [] is a bugfix for a regression introduced in 51cea61 + const data = getCPUs() || []; + const result = []; + let i = 0; + while (i < data.length) { + ArrayPrototypePush(result, { + model: data[i++], + speed: data[i++], + times: { + user: data[i++], + nice: data[i++], + sys: data[i++], + idle: data[i++], + irq: data[i++], + }, + }); + } + return result; +} + +/** + * @returns {string} + */ +function arch() { + return process.arch; +} +arch[SymbolToPrimitive] = () => process.arch; + +/** + * @returns {string} + */ +function platform() { + return process.platform; +} +platform[SymbolToPrimitive] = () => process.platform; + +/** + * @returns {string} + */ +function tmpdir() { + if (isWindows) { + const path = + process.env.TEMP || + process.env.TMP || + (process.env.SystemRoot || process.env.windir) + "\\temp"; + + if ( + path.length > 1 && + path[path.length - 1] === "\\" && + path[path.length - 2] !== ":" + ) { + return StringPrototypeSlice(path, 0, -1); + } + + return path; + } + + return getTempDir() || "/tmp"; +} +tmpdir[SymbolToPrimitive] = () => tmpdir(); + +/** + * @returns {'BE' | 'LE'} + */ +function endianness() { + return kEndianness; +} +endianness[SymbolToPrimitive] = () => kEndianness; + +/** + * @returns {Record>} + */ +function networkInterfaces() { + const data = getInterfaceAddresses(); + const result = {}; + + if (data === undefined) return result; + for (let i = 0; i < data.length; i += 7) { + const name = data[i]; + const entry = { + address: data[i + 1], + netmask: data[i + 2], + family: data[i + 3], + mac: data[i + 4], + internal: data[i + 5], + cidr: getCIDR(data[i + 1], data[i + 2], data[i + 3]), + }; + const scopeid = data[i + 6]; + if (scopeid !== -1) entry.scopeid = scopeid; + + const existing = result[name]; + if (existing !== undefined) ArrayPrototypePush(existing, entry); + else result[name] = [entry]; + } + + return result; +} + +/** + * @param {number} [pid] + * @param {number} [priority] + * @returns {void} + */ +function setPriority(pid, priority) { + if (priority === undefined) { + priority = pid; + pid = 0; + } + + validateInt32(pid, "pid"); + validateInt32(priority, "priority", -20, 19); + + const ctx = {}; + + if (_setPriority(pid, priority, ctx) !== 0) throw new ERR_SYSTEM_ERROR(ctx); +} + +/** + * @param {number} [pid] + * @returns {number} + */ +function getPriority(pid) { + if (pid === undefined) pid = 0; + else validateInt32(pid, "pid"); + + const ctx = {}; + const priority = _getPriority(pid, ctx); + + if (priority === undefined) throw new ERR_SYSTEM_ERROR(ctx); + + return priority; +} + +/** + * @param {{ encoding?: string }} [options] If `encoding` is set to + * `'buffer'`, the `username`, `shell`, and `homedir` values will + * be `Buffer` instances. + * @returns {{ + * uid: number, + * gid: number, + * username: string, + * homedir: string, + * shell: string | null, + * }} + */ +function userInfo(options) { + if (typeof options !== "object") options = null; + + const ctx = {}; + const user = getUserInfo(options, ctx); + + if (user === undefined) throw new ERR_SYSTEM_ERROR(ctx); + + return user; +} + +module.exports = { + arch, + availableParallelism: getAvailableParallelism, + cpus, + endianness, + freemem: getFreeMem, + getPriority, + homedir: getHomeDirectory, + hostname: getHostname, + loadavg, + networkInterfaces, + platform, + release: getOSRelease, + setPriority, + tmpdir, + totalmem: getTotalMem, + type: getOSType, + userInfo, + uptime: getUptime, + version: getOSVersion, + machine: getMachine, +}; + +ObjectFreeze(constants.signals); + +ObjectDefineProperties(module.exports, { + constants: { + __proto__: null, + configurable: false, + enumerable: true, + value: constants, + }, + + EOL: { + __proto__: null, + configurable: true, + enumerable: true, + writable: false, + value: isWindows ? "\r\n" : "\n", + }, + + devNull: { + __proto__: null, + configurable: true, + enumerable: true, + writable: false, + value: isWindows ? "\\\\.\\nul" : "/dev/null", + }, +}); diff --git a/node/path.js b/node/path.js new file mode 100644 index 00000000..08ee62f3 --- /dev/null +++ b/node/path.js @@ -0,0 +1,1790 @@ +"use strict"; + +const { + ArrayPrototypeIncludes, + ArrayPrototypeJoin, + ArrayPrototypePush, + ArrayPrototypeSlice, + FunctionPrototypeBind, + StringPrototypeCharCodeAt, + StringPrototypeIncludes, + StringPrototypeIndexOf, + StringPrototypeLastIndexOf, + StringPrototypeRepeat, + StringPrototypeReplace, + StringPrototypeSlice, + StringPrototypeSplit, + StringPrototypeToLowerCase, + StringPrototypeToUpperCase, +} = primordials; + +const { + CHAR_UPPERCASE_A, + CHAR_LOWERCASE_A, + CHAR_UPPERCASE_Z, + CHAR_LOWERCASE_Z, + CHAR_DOT, + CHAR_FORWARD_SLASH, + CHAR_BACKWARD_SLASH, + CHAR_COLON, + CHAR_QUESTION_MARK, +} = require("internal/constants"); +const { validateObject, validateString } = require("internal/validators"); + +const { isWindows, getLazy } = require("internal/util"); + +const lazyMatchGlobPattern = getLazy( + () => require("internal/fs/glob").matchGlobPattern +); + +function isPathSeparator(code) { + return code === CHAR_FORWARD_SLASH || code === CHAR_BACKWARD_SLASH; +} + +function isPosixPathSeparator(code) { + return code === CHAR_FORWARD_SLASH; +} + +const WINDOWS_RESERVED_NAMES = [ + "CON", + "PRN", + "AUX", + "NUL", + "COM1", + "COM2", + "COM3", + "COM4", + "COM5", + "COM6", + "COM7", + "COM8", + "COM9", + "LPT1", + "LPT2", + "LPT3", + "LPT4", + "LPT5", + "LPT6", + "LPT7", + "LPT8", + "LPT9", + "COM\xb9", + "COM\xb2", + "COM\xb3", + "LPT\xb9", + "LPT\xb2", + "LPT\xb3", +]; + +function isWindowsReservedName(path, colonIndex) { + const devicePart = StringPrototypeToUpperCase( + StringPrototypeSlice(path, 0, colonIndex) + ); + return ArrayPrototypeIncludes(WINDOWS_RESERVED_NAMES, devicePart); +} + +function isWindowsDeviceRoot(code) { + return ( + (code >= CHAR_UPPERCASE_A && code <= CHAR_UPPERCASE_Z) || + (code >= CHAR_LOWERCASE_A && code <= CHAR_LOWERCASE_Z) + ); +} + +// Resolves . and .. elements in a path with directory names +function normalizeString(path, allowAboveRoot, separator, isPathSeparator) { + let res = ""; + let lastSegmentLength = 0; + let lastSlash = -1; + let dots = 0; + let code = 0; + for (let i = 0; i <= path.length; ++i) { + if (i < path.length) code = StringPrototypeCharCodeAt(path, i); + else if (isPathSeparator(code)) break; + else code = CHAR_FORWARD_SLASH; + + if (isPathSeparator(code)) { + if (lastSlash === i - 1 || dots === 1) { + // NOOP + } else if (dots === 2) { + if ( + res.length < 2 || + lastSegmentLength !== 2 || + StringPrototypeCharCodeAt(res, res.length - 1) !== CHAR_DOT || + StringPrototypeCharCodeAt(res, res.length - 2) !== CHAR_DOT + ) { + if (res.length > 2) { + const lastSlashIndex = res.length - lastSegmentLength - 1; + if (lastSlashIndex === -1) { + res = ""; + lastSegmentLength = 0; + } else { + res = StringPrototypeSlice(res, 0, lastSlashIndex); + lastSegmentLength = + res.length - 1 - StringPrototypeLastIndexOf(res, separator); + } + lastSlash = i; + dots = 0; + continue; + } else if (res.length !== 0) { + res = ""; + lastSegmentLength = 0; + lastSlash = i; + dots = 0; + continue; + } + } + if (allowAboveRoot) { + res += res.length > 0 ? `${separator}..` : ".."; + lastSegmentLength = 2; + } + } else { + if (res.length > 0) + res += `${separator}${StringPrototypeSlice(path, lastSlash + 1, i)}`; + else res = StringPrototypeSlice(path, lastSlash + 1, i); + lastSegmentLength = i - lastSlash - 1; + } + lastSlash = i; + dots = 0; + } else if (code === CHAR_DOT && dots !== -1) { + ++dots; + } else { + dots = -1; + } + } + return res; +} + +function formatExt(ext) { + return ext ? `${ext[0] === "." ? "" : "."}${ext}` : ""; +} + +/** + * @param {string} sep + * @param {{ + * dir?: string; + * root?: string; + * base?: string; + * name?: string; + * ext?: string; + * }} pathObject + * @returns {string} + */ +function _format(sep, pathObject) { + validateObject(pathObject, "pathObject"); + const dir = pathObject.dir || pathObject.root; + const base = + pathObject.base || `${pathObject.name || ""}${formatExt(pathObject.ext)}`; + if (!dir) { + return base; + } + return dir === pathObject.root ? `${dir}${base}` : `${dir}${sep}${base}`; +} + +const forwardSlashRegExp = /\//g; + +const win32 = { + /** + * path.resolve([from ...], to) + * @param {...string} args + * @returns {string} + */ + resolve(...args) { + let resolvedDevice = ""; + let resolvedTail = ""; + let resolvedAbsolute = false; + + for (let i = args.length - 1; i >= -1; i--) { + let path; + if (i >= 0) { + path = args[i]; + validateString(path, `paths[${i}]`); + + // Skip empty entries + if (path.length === 0) { + continue; + } + } else if (resolvedDevice.length === 0) { + path = process.cwd(); + // Fast path for current directory + if ( + args.length === 0 || + (args.length === 1 && + (args[0] === "" || args[0] === ".") && + isPathSeparator(StringPrototypeCharCodeAt(path, 0))) + ) { + if (!isWindows) { + path = StringPrototypeReplace(path, forwardSlashRegExp, "\\"); + } + return path; + } + } else { + // Windows has the concept of drive-specific current working + // directories. If we've resolved a drive letter but not yet an + // absolute path, get cwd for that drive, or the process cwd if + // the drive cwd is not available. We're sure the device is not + // a UNC path at this points, because UNC paths are always absolute. + path = process.env[`=${resolvedDevice}`] || process.cwd(); + + // Verify that a cwd was found and that it actually points + // to our drive. If not, default to the drive's root. + if ( + path === undefined || + (StringPrototypeToLowerCase(StringPrototypeSlice(path, 0, 2)) !== + StringPrototypeToLowerCase(resolvedDevice) && + StringPrototypeCharCodeAt(path, 2) === CHAR_BACKWARD_SLASH) + ) { + path = `${resolvedDevice}\\`; + } + } + + const len = path.length; + let rootEnd = 0; + let device = ""; + let isAbsolute = false; + const code = StringPrototypeCharCodeAt(path, 0); + + // Try to match a root + if (len === 1) { + if (isPathSeparator(code)) { + // `path` contains just a path separator + rootEnd = 1; + isAbsolute = true; + } + } else if (isPathSeparator(code)) { + // Possible UNC root + + // If we started with a separator, we know we at least have an + // absolute path of some kind (UNC or otherwise) + isAbsolute = true; + + if (isPathSeparator(StringPrototypeCharCodeAt(path, 1))) { + // Matched double path separator at beginning + let j = 2; + let last = j; + // Match 1 or more non-path separators + while ( + j < len && + !isPathSeparator(StringPrototypeCharCodeAt(path, j)) + ) { + j++; + } + if (j < len && j !== last) { + const firstPart = StringPrototypeSlice(path, last, j); + // Matched! + last = j; + // Match 1 or more path separators + while ( + j < len && + isPathSeparator(StringPrototypeCharCodeAt(path, j)) + ) { + j++; + } + if (j < len && j !== last) { + // Matched! + last = j; + // Match 1 or more non-path separators + while ( + j < len && + !isPathSeparator(StringPrototypeCharCodeAt(path, j)) + ) { + j++; + } + if (j === len || j !== last) { + if (firstPart !== "." && firstPart !== "?") { + // We matched a UNC root + device = `\\\\${firstPart}\\${StringPrototypeSlice( + path, + last, + j + )}`; + rootEnd = j; + } else { + // We matched a device root (e.g. \\\\.\\PHYSICALDRIVE0) + device = `\\\\${firstPart}`; + rootEnd = 4; + } + } + } + } + } else { + rootEnd = 1; + } + } else if ( + isWindowsDeviceRoot(code) && + StringPrototypeCharCodeAt(path, 1) === CHAR_COLON + ) { + // Possible device root + device = StringPrototypeSlice(path, 0, 2); + rootEnd = 2; + if (len > 2 && isPathSeparator(StringPrototypeCharCodeAt(path, 2))) { + // Treat separator following drive name as an absolute path + // indicator + isAbsolute = true; + rootEnd = 3; + } + } + + if (device.length > 0) { + if (resolvedDevice.length > 0) { + if ( + StringPrototypeToLowerCase(device) !== + StringPrototypeToLowerCase(resolvedDevice) + ) + // This path points to another device so it is not applicable + continue; + } else { + resolvedDevice = device; + } + } + + if (resolvedAbsolute) { + if (resolvedDevice.length > 0) break; + } else { + resolvedTail = `${StringPrototypeSlice( + path, + rootEnd + )}\\${resolvedTail}`; + resolvedAbsolute = isAbsolute; + if (isAbsolute && resolvedDevice.length > 0) { + break; + } + } + } + + // At this point the path should be resolved to a full absolute path, + // but handle relative paths to be safe (might happen when process.cwd() + // fails) + + // Normalize the tail path + resolvedTail = normalizeString( + resolvedTail, + !resolvedAbsolute, + "\\", + isPathSeparator + ); + + return resolvedAbsolute + ? `${resolvedDevice}\\${resolvedTail}` + : `${resolvedDevice}${resolvedTail}` || "."; + }, + + /** + * @param {string} path + * @returns {string} + */ + normalize(path) { + validateString(path, "path"); + const len = path.length; + if (len === 0) return "."; + let rootEnd = 0; + let device; + let isAbsolute = false; + const code = StringPrototypeCharCodeAt(path, 0); + + // Try to match a root + if (len === 1) { + // `path` contains just a single char, exit early to avoid + // unnecessary work + return isPosixPathSeparator(code) ? "\\" : path; + } + if (isPathSeparator(code)) { + // Possible UNC root + + // If we started with a separator, we know we at least have an absolute + // path of some kind (UNC or otherwise) + isAbsolute = true; + + if (isPathSeparator(StringPrototypeCharCodeAt(path, 1))) { + // Matched double path separator at beginning + let j = 2; + let last = j; + // Match 1 or more non-path separators + while ( + j < len && + !isPathSeparator(StringPrototypeCharCodeAt(path, j)) + ) { + j++; + } + if (j < len && j !== last) { + const firstPart = StringPrototypeSlice(path, last, j); + // Matched! + last = j; + // Match 1 or more path separators + while ( + j < len && + isPathSeparator(StringPrototypeCharCodeAt(path, j)) + ) { + j++; + } + if (j < len && j !== last) { + // Matched! + last = j; + // Match 1 or more non-path separators + while ( + j < len && + !isPathSeparator(StringPrototypeCharCodeAt(path, j)) + ) { + j++; + } + if (j === len || j !== last) { + if (firstPart === "." || firstPart === "?") { + // We matched a device root (e.g. \\\\.\\PHYSICALDRIVE0) + device = `\\\\${firstPart}`; + rootEnd = 4; + const colonIndex = StringPrototypeIndexOf(path, ":"); + // Special case: handle \\?\COM1: or similar reserved device paths + const possibleDevice = StringPrototypeSlice( + path, + 4, + colonIndex + 1 + ); + if ( + isWindowsReservedName( + possibleDevice, + possibleDevice.length - 1 + ) + ) { + device = `\\\\?\\${possibleDevice}`; + rootEnd = 4 + possibleDevice.length; + } + } else if (j === len) { + // We matched a UNC root only + // Return the normalized version of the UNC root since there + // is nothing left to process + return `\\\\${firstPart}\\${StringPrototypeSlice( + path, + last + )}\\`; + } else { + // We matched a UNC root with leftovers + device = `\\\\${firstPart}\\${StringPrototypeSlice( + path, + last, + j + )}`; + rootEnd = j; + } + } + } + } + } else { + rootEnd = 1; + } + } else { + const colonIndex = StringPrototypeIndexOf(path, ":"); + if (colonIndex > 0) { + if (isWindowsDeviceRoot(code) && colonIndex === 1) { + device = StringPrototypeSlice(path, 0, 2); + rootEnd = 2; + if (len > 2 && isPathSeparator(StringPrototypeCharCodeAt(path, 2))) { + isAbsolute = true; + rootEnd = 3; + } + } else if (isWindowsReservedName(path, colonIndex)) { + device = StringPrototypeSlice(path, 0, colonIndex + 1); + rootEnd = colonIndex + 1; + } + } + } + + let tail = + rootEnd < len + ? normalizeString( + StringPrototypeSlice(path, rootEnd), + !isAbsolute, + "\\", + isPathSeparator + ) + : ""; + if (tail.length === 0 && !isAbsolute) tail = "."; + if ( + tail.length > 0 && + isPathSeparator(StringPrototypeCharCodeAt(path, len - 1)) + ) + tail += "\\"; + if ( + !isAbsolute && + device === undefined && + StringPrototypeIncludes(path, ":") + ) { + // If the original path was not absolute and if we have not been able to + // resolve it relative to a particular device, we need to ensure that the + // `tail` has not become something that Windows might interpret as an + // absolute path. See CVE-2024-36139. + if ( + tail.length >= 2 && + isWindowsDeviceRoot(StringPrototypeCharCodeAt(tail, 0)) && + StringPrototypeCharCodeAt(tail, 1) === CHAR_COLON + ) { + return `.\\${tail}`; + } + let index = StringPrototypeIndexOf(path, ":"); + + do { + if ( + index === len - 1 || + isPathSeparator(StringPrototypeCharCodeAt(path, index + 1)) + ) { + return `.\\${tail}`; + } + } while ((index = StringPrototypeIndexOf(path, ":", index + 1)) !== -1); + } + const colonIndex = StringPrototypeIndexOf(path, ":"); + if (isWindowsReservedName(path, colonIndex)) { + return `.\\${device ?? ""}${tail}`; + } + if (device === undefined) { + return isAbsolute ? `\\${tail}` : tail; + } + return isAbsolute ? `${device}\\${tail}` : `${device}${tail}`; + }, + + /** + * @param {string} path + * @returns {boolean} + */ + isAbsolute(path) { + validateString(path, "path"); + const len = path.length; + if (len === 0) return false; + + const code = StringPrototypeCharCodeAt(path, 0); + return ( + isPathSeparator(code) || + // Possible device root + (len > 2 && + isWindowsDeviceRoot(code) && + StringPrototypeCharCodeAt(path, 1) === CHAR_COLON && + isPathSeparator(StringPrototypeCharCodeAt(path, 2))) + ); + }, + + /** + * @param {...string} args + * @returns {string} + */ + join(...args) { + if (args.length === 0) return "."; + + const path = []; + for (let i = 0; i < args.length; ++i) { + const arg = args[i]; + validateString(arg, "path"); + if (arg.length > 0) { + ArrayPrototypePush(path, arg); + } + } + + if (path.length === 0) return "."; + + const firstPart = path[0]; + let joined = ArrayPrototypeJoin(path, "\\"); + + // Make sure that the joined path doesn't start with two slashes, because + // normalize() will mistake it for a UNC path then. + // + // This step is skipped when it is very clear that the user actually + // intended to point at a UNC path. This is assumed when the first + // non-empty string arguments starts with exactly two slashes followed by + // at least one more non-slash character. + // + // Note that for normalize() to treat a path as a UNC path it needs to + // have at least 2 components, so we don't filter for that here. + // This means that the user can use join to construct UNC paths from + // a server name and a share name; for example: + // path.join('//server', 'share') -> '\\\\server\\share\\') + let needsReplace = true; + let slashCount = 0; + if (isPathSeparator(StringPrototypeCharCodeAt(firstPart, 0))) { + ++slashCount; + const firstLen = firstPart.length; + if ( + firstLen > 1 && + isPathSeparator(StringPrototypeCharCodeAt(firstPart, 1)) + ) { + ++slashCount; + if (firstLen > 2) { + if (isPathSeparator(StringPrototypeCharCodeAt(firstPart, 2))) + ++slashCount; + else { + // We matched a UNC path in the first part + needsReplace = false; + } + } + } + } + if (needsReplace) { + // Find any more consecutive slashes we need to replace + while ( + slashCount < joined.length && + isPathSeparator(StringPrototypeCharCodeAt(joined, slashCount)) + ) { + slashCount++; + } + + // Replace the slashes if needed + if (slashCount >= 2) + joined = `\\${StringPrototypeSlice(joined, slashCount)}`; + } + + // Skip normalization when reserved device names are present + const parts = []; + let part = ""; + + for (let i = 0; i < joined.length; i++) { + if (joined[i] === "\\") { + if (part) parts.push(part); + part = ""; + // Skip consecutive backslashes + while (i + 1 < joined.length && joined[i + 1] === "\\") i++; + } else { + part += joined[i]; + } + } + // Add the final part if any + if (part) parts.push(part); + + // Check if any part has a Windows reserved name + if ( + parts.some((p) => { + const colonIndex = StringPrototypeIndexOf(p, ":"); + return colonIndex !== -1 && isWindowsReservedName(p, colonIndex); + }) + ) { + // Replace forward slashes with backslashes + let result = ""; + for (let i = 0; i < joined.length; i++) { + result += joined[i] === "/" ? "\\" : joined[i]; + } + return result; + } + + return win32.normalize(joined); + }, + + /** + * It will solve the relative path from `from` to `to`, for instance + * from = 'C:\\orandea\\test\\aaa' + * to = 'C:\\orandea\\impl\\bbb' + * The output of the function should be: '..\\..\\impl\\bbb' + * @param {string} from + * @param {string} to + * @returns {string} + */ + relative(from, to) { + validateString(from, "from"); + validateString(to, "to"); + + if (from === to) return ""; + + const fromOrig = win32.resolve(from); + const toOrig = win32.resolve(to); + + if (fromOrig === toOrig) return ""; + + from = StringPrototypeToLowerCase(fromOrig); + to = StringPrototypeToLowerCase(toOrig); + + if (from === to) return ""; + + if (fromOrig.length !== from.length || toOrig.length !== to.length) { + const fromSplit = StringPrototypeSplit(fromOrig, "\\"); + const toSplit = StringPrototypeSplit(toOrig, "\\"); + if (fromSplit[fromSplit.length - 1] === "") { + fromSplit.pop(); + } + if (toSplit[toSplit.length - 1] === "") { + toSplit.pop(); + } + + const fromLen = fromSplit.length; + const toLen = toSplit.length; + const length = fromLen < toLen ? fromLen : toLen; + + let i; + for (i = 0; i < length; i++) { + if ( + StringPrototypeToLowerCase(fromSplit[i]) !== + StringPrototypeToLowerCase(toSplit[i]) + ) { + break; + } + } + + if (i === 0) { + return toOrig; + } else if (i === length) { + if (toLen > length) { + return ArrayPrototypeJoin(ArrayPrototypeSlice(toSplit, i), "\\"); + } + if (fromLen > length) { + return StringPrototypeRepeat("..\\", fromLen - 1 - i) + ".."; + } + return ""; + } + + return ( + StringPrototypeRepeat("..\\", fromLen - i) + + ArrayPrototypeJoin(ArrayPrototypeSlice(toSplit, i), "\\") + ); + } + + // Trim any leading backslashes + let fromStart = 0; + while ( + fromStart < from.length && + StringPrototypeCharCodeAt(from, fromStart) === CHAR_BACKWARD_SLASH + ) { + fromStart++; + } + // Trim trailing backslashes (applicable to UNC paths only) + let fromEnd = from.length; + while ( + fromEnd - 1 > fromStart && + StringPrototypeCharCodeAt(from, fromEnd - 1) === CHAR_BACKWARD_SLASH + ) { + fromEnd--; + } + const fromLen = fromEnd - fromStart; + + // Trim any leading backslashes + let toStart = 0; + while ( + toStart < to.length && + StringPrototypeCharCodeAt(to, toStart) === CHAR_BACKWARD_SLASH + ) { + toStart++; + } + // Trim trailing backslashes (applicable to UNC paths only) + let toEnd = to.length; + while ( + toEnd - 1 > toStart && + StringPrototypeCharCodeAt(to, toEnd - 1) === CHAR_BACKWARD_SLASH + ) { + toEnd--; + } + const toLen = toEnd - toStart; + + // Compare paths to find the longest common path from root + const length = fromLen < toLen ? fromLen : toLen; + let lastCommonSep = -1; + let i = 0; + for (; i < length; i++) { + const fromCode = StringPrototypeCharCodeAt(from, fromStart + i); + if (fromCode !== StringPrototypeCharCodeAt(to, toStart + i)) break; + else if (fromCode === CHAR_BACKWARD_SLASH) lastCommonSep = i; + } + + // We found a mismatch before the first common path separator was seen, so + // return the original `to`. + if (i !== length) { + if (lastCommonSep === -1) return toOrig; + } else { + if (toLen > length) { + if ( + StringPrototypeCharCodeAt(to, toStart + i) === CHAR_BACKWARD_SLASH + ) { + // We get here if `from` is the exact base path for `to`. + // For example: from='C:\\foo\\bar'; to='C:\\foo\\bar\\baz' + return StringPrototypeSlice(toOrig, toStart + i + 1); + } + if (i === 2) { + // We get here if `from` is the device root. + // For example: from='C:\\'; to='C:\\foo' + return StringPrototypeSlice(toOrig, toStart + i); + } + } + if (fromLen > length) { + if ( + StringPrototypeCharCodeAt(from, fromStart + i) === CHAR_BACKWARD_SLASH + ) { + // We get here if `to` is the exact base path for `from`. + // For example: from='C:\\foo\\bar'; to='C:\\foo' + lastCommonSep = i; + } else if (i === 2) { + // We get here if `to` is the device root. + // For example: from='C:\\foo\\bar'; to='C:\\' + lastCommonSep = 3; + } + } + if (lastCommonSep === -1) lastCommonSep = 0; + } + + let out = ""; + // Generate the relative path based on the path difference between `to` and + // `from` + for (i = fromStart + lastCommonSep + 1; i <= fromEnd; ++i) { + if ( + i === fromEnd || + StringPrototypeCharCodeAt(from, i) === CHAR_BACKWARD_SLASH + ) { + out += out.length === 0 ? ".." : "\\.."; + } + } + + toStart += lastCommonSep; + + // Lastly, append the rest of the destination (`to`) path that comes after + // the common path parts + if (out.length > 0) + return `${out}${StringPrototypeSlice(toOrig, toStart, toEnd)}`; + + if (StringPrototypeCharCodeAt(toOrig, toStart) === CHAR_BACKWARD_SLASH) + ++toStart; + return StringPrototypeSlice(toOrig, toStart, toEnd); + }, + + /** + * @param {string} path + * @returns {string} + */ + toNamespacedPath(path) { + // Note: this will *probably* throw somewhere. + if (typeof path !== "string" || path.length === 0) return path; + + const resolvedPath = win32.resolve(path); + + if (resolvedPath.length <= 2) return path; + + if (StringPrototypeCharCodeAt(resolvedPath, 0) === CHAR_BACKWARD_SLASH) { + // Possible UNC root + if (StringPrototypeCharCodeAt(resolvedPath, 1) === CHAR_BACKWARD_SLASH) { + const code = StringPrototypeCharCodeAt(resolvedPath, 2); + if (code !== CHAR_QUESTION_MARK && code !== CHAR_DOT) { + // Matched non-long UNC root, convert the path to a long UNC path + return `\\\\?\\UNC\\${StringPrototypeSlice(resolvedPath, 2)}`; + } + } + } else if ( + isWindowsDeviceRoot(StringPrototypeCharCodeAt(resolvedPath, 0)) && + StringPrototypeCharCodeAt(resolvedPath, 1) === CHAR_COLON && + StringPrototypeCharCodeAt(resolvedPath, 2) === CHAR_BACKWARD_SLASH + ) { + // Matched device root, convert the path to a long UNC path + return `\\\\?\\${resolvedPath}`; + } + + return resolvedPath; + }, + + /** + * @param {string} path + * @returns {string} + */ + dirname(path) { + validateString(path, "path"); + const len = path.length; + if (len === 0) return "."; + let rootEnd = -1; + let offset = 0; + const code = StringPrototypeCharCodeAt(path, 0); + + if (len === 1) { + // `path` contains just a path separator, exit early to avoid + // unnecessary work or a dot. + return isPathSeparator(code) ? path : "."; + } + + // Try to match a root + if (isPathSeparator(code)) { + // Possible UNC root + + rootEnd = offset = 1; + + if (isPathSeparator(StringPrototypeCharCodeAt(path, 1))) { + // Matched double path separator at beginning + let j = 2; + let last = j; + // Match 1 or more non-path separators + while ( + j < len && + !isPathSeparator(StringPrototypeCharCodeAt(path, j)) + ) { + j++; + } + if (j < len && j !== last) { + // Matched! + last = j; + // Match 1 or more path separators + while ( + j < len && + isPathSeparator(StringPrototypeCharCodeAt(path, j)) + ) { + j++; + } + if (j < len && j !== last) { + // Matched! + last = j; + // Match 1 or more non-path separators + while ( + j < len && + !isPathSeparator(StringPrototypeCharCodeAt(path, j)) + ) { + j++; + } + if (j === len) { + // We matched a UNC root only + return path; + } + if (j !== last) { + // We matched a UNC root with leftovers + + // Offset by 1 to include the separator after the UNC root to + // treat it as a "normal root" on top of a (UNC) root + rootEnd = offset = j + 1; + } + } + } + } + // Possible device root + } else if ( + isWindowsDeviceRoot(code) && + StringPrototypeCharCodeAt(path, 1) === CHAR_COLON + ) { + rootEnd = + len > 2 && isPathSeparator(StringPrototypeCharCodeAt(path, 2)) ? 3 : 2; + offset = rootEnd; + } + + let end = -1; + let matchedSlash = true; + for (let i = len - 1; i >= offset; --i) { + if (isPathSeparator(StringPrototypeCharCodeAt(path, i))) { + if (!matchedSlash) { + end = i; + break; + } + } else { + // We saw the first non-path separator + matchedSlash = false; + } + } + + if (end === -1) { + if (rootEnd === -1) return "."; + + end = rootEnd; + } + return StringPrototypeSlice(path, 0, end); + }, + + /** + * @param {string} path + * @param {string} [suffix] + * @returns {string} + */ + basename(path, suffix) { + if (suffix !== undefined) validateString(suffix, "suffix"); + validateString(path, "path"); + let start = 0; + let end = -1; + let matchedSlash = true; + + // Check for a drive letter prefix so as not to mistake the following + // path separator as an extra separator at the end of the path that can be + // disregarded + if ( + path.length >= 2 && + isWindowsDeviceRoot(StringPrototypeCharCodeAt(path, 0)) && + StringPrototypeCharCodeAt(path, 1) === CHAR_COLON + ) { + start = 2; + } + + if ( + suffix !== undefined && + suffix.length > 0 && + suffix.length <= path.length + ) { + if (suffix === path) return ""; + let extIdx = suffix.length - 1; + let firstNonSlashEnd = -1; + for (let i = path.length - 1; i >= start; --i) { + const code = StringPrototypeCharCodeAt(path, i); + if (isPathSeparator(code)) { + // If we reached a path separator that was not part of a set of path + // separators at the end of the string, stop now + if (!matchedSlash) { + start = i + 1; + break; + } + } else { + if (firstNonSlashEnd === -1) { + // We saw the first non-path separator, remember this index in case + // we need it if the extension ends up not matching + matchedSlash = false; + firstNonSlashEnd = i + 1; + } + if (extIdx >= 0) { + // Try to match the explicit extension + if (code === StringPrototypeCharCodeAt(suffix, extIdx)) { + if (--extIdx === -1) { + // We matched the extension, so mark this as the end of our path + // component + end = i; + } + } else { + // Extension does not match, so our result is the entire path + // component + extIdx = -1; + end = firstNonSlashEnd; + } + } + } + } + + if (start === end) end = firstNonSlashEnd; + else if (end === -1) end = path.length; + return StringPrototypeSlice(path, start, end); + } + for (let i = path.length - 1; i >= start; --i) { + if (isPathSeparator(StringPrototypeCharCodeAt(path, i))) { + // If we reached a path separator that was not part of a set of path + // separators at the end of the string, stop now + if (!matchedSlash) { + start = i + 1; + break; + } + } else if (end === -1) { + // We saw the first non-path separator, mark this as the end of our + // path component + matchedSlash = false; + end = i + 1; + } + } + + if (end === -1) return ""; + return StringPrototypeSlice(path, start, end); + }, + + /** + * @param {string} path + * @returns {string} + */ + extname(path) { + validateString(path, "path"); + let start = 0; + let startDot = -1; + let startPart = 0; + let end = -1; + let matchedSlash = true; + // Track the state of characters (if any) we see before our first dot and + // after any path separator we find + let preDotState = 0; + + // Check for a drive letter prefix so as not to mistake the following + // path separator as an extra separator at the end of the path that can be + // disregarded + + if ( + path.length >= 2 && + StringPrototypeCharCodeAt(path, 1) === CHAR_COLON && + isWindowsDeviceRoot(StringPrototypeCharCodeAt(path, 0)) + ) { + start = startPart = 2; + } + + for (let i = path.length - 1; i >= start; --i) { + const code = StringPrototypeCharCodeAt(path, i); + if (isPathSeparator(code)) { + // If we reached a path separator that was not part of a set of path + // separators at the end of the string, stop now + if (!matchedSlash) { + startPart = i + 1; + break; + } + continue; + } + if (end === -1) { + // We saw the first non-path separator, mark this as the end of our + // extension + matchedSlash = false; + end = i + 1; + } + if (code === CHAR_DOT) { + // If this is our first dot, mark it as the start of our extension + if (startDot === -1) startDot = i; + else if (preDotState !== 1) preDotState = 1; + } else if (startDot !== -1) { + // We saw a non-dot and non-path separator before our dot, so we should + // have a good chance at having a non-empty extension + preDotState = -1; + } + } + + if ( + startDot === -1 || + end === -1 || + // We saw a non-dot character immediately before the dot + preDotState === 0 || + // The (right-most) trimmed path component is exactly '..' + (preDotState === 1 && startDot === end - 1 && startDot === startPart + 1) + ) { + return ""; + } + return StringPrototypeSlice(path, startDot, end); + }, + + format: FunctionPrototypeBind(_format, null, "\\"), + + /** + * @param {string} path + * @returns {{ + * dir: string; + * root: string; + * base: string; + * name: string; + * ext: string; + * }} + */ + parse(path) { + validateString(path, "path"); + + const ret = { root: "", dir: "", base: "", ext: "", name: "" }; + if (path.length === 0) return ret; + + const len = path.length; + let rootEnd = 0; + let code = StringPrototypeCharCodeAt(path, 0); + + if (len === 1) { + if (isPathSeparator(code)) { + // `path` contains just a path separator, exit early to avoid + // unnecessary work + ret.root = ret.dir = path; + return ret; + } + ret.base = ret.name = path; + return ret; + } + // Try to match a root + if (isPathSeparator(code)) { + // Possible UNC root + + rootEnd = 1; + if (isPathSeparator(StringPrototypeCharCodeAt(path, 1))) { + // Matched double path separator at beginning + let j = 2; + let last = j; + // Match 1 or more non-path separators + while ( + j < len && + !isPathSeparator(StringPrototypeCharCodeAt(path, j)) + ) { + j++; + } + if (j < len && j !== last) { + // Matched! + last = j; + // Match 1 or more path separators + while ( + j < len && + isPathSeparator(StringPrototypeCharCodeAt(path, j)) + ) { + j++; + } + if (j < len && j !== last) { + // Matched! + last = j; + // Match 1 or more non-path separators + while ( + j < len && + !isPathSeparator(StringPrototypeCharCodeAt(path, j)) + ) { + j++; + } + if (j === len) { + // We matched a UNC root only + rootEnd = j; + } else if (j !== last) { + // We matched a UNC root with leftovers + rootEnd = j + 1; + } + } + } + } + } else if ( + isWindowsDeviceRoot(code) && + StringPrototypeCharCodeAt(path, 1) === CHAR_COLON + ) { + // Possible device root + if (len <= 2) { + // `path` contains just a drive root, exit early to avoid + // unnecessary work + ret.root = ret.dir = path; + return ret; + } + rootEnd = 2; + if (isPathSeparator(StringPrototypeCharCodeAt(path, 2))) { + if (len === 3) { + // `path` contains just a drive root, exit early to avoid + // unnecessary work + ret.root = ret.dir = path; + return ret; + } + rootEnd = 3; + } + } + if (rootEnd > 0) ret.root = StringPrototypeSlice(path, 0, rootEnd); + + let startDot = -1; + let startPart = rootEnd; + let end = -1; + let matchedSlash = true; + let i = path.length - 1; + + // Track the state of characters (if any) we see before our first dot and + // after any path separator we find + let preDotState = 0; + + // Get non-dir info + for (; i >= rootEnd; --i) { + code = StringPrototypeCharCodeAt(path, i); + if (isPathSeparator(code)) { + // If we reached a path separator that was not part of a set of path + // separators at the end of the string, stop now + if (!matchedSlash) { + startPart = i + 1; + break; + } + continue; + } + if (end === -1) { + // We saw the first non-path separator, mark this as the end of our + // extension + matchedSlash = false; + end = i + 1; + } + if (code === CHAR_DOT) { + // If this is our first dot, mark it as the start of our extension + if (startDot === -1) startDot = i; + else if (preDotState !== 1) preDotState = 1; + } else if (startDot !== -1) { + // We saw a non-dot and non-path separator before our dot, so we should + // have a good chance at having a non-empty extension + preDotState = -1; + } + } + + if (end !== -1) { + if ( + startDot === -1 || + // We saw a non-dot character immediately before the dot + preDotState === 0 || + // The (right-most) trimmed path component is exactly '..' + (preDotState === 1 && + startDot === end - 1 && + startDot === startPart + 1) + ) { + ret.base = ret.name = StringPrototypeSlice(path, startPart, end); + } else { + ret.name = StringPrototypeSlice(path, startPart, startDot); + ret.base = StringPrototypeSlice(path, startPart, end); + ret.ext = StringPrototypeSlice(path, startDot, end); + } + } + + // If the directory is the root, use the entire root as the `dir` including + // the trailing slash if any (`C:\abc` -> `C:\`). Otherwise, strip out the + // trailing slash (`C:\abc\def` -> `C:\abc`). + if (startPart > 0 && startPart !== rootEnd) + ret.dir = StringPrototypeSlice(path, 0, startPart - 1); + else ret.dir = ret.root; + + return ret; + }, + + matchesGlob(path, pattern) { + return lazyMatchGlobPattern()(path, pattern, true); + }, + + sep: "\\", + delimiter: ";", + win32: null, + posix: null, +}; + +const posixCwd = (() => { + if (isWindows) { + // Converts Windows' backslash path separators to POSIX forward slashes + // and truncates any drive indicator + const regexp = /\\/g; + return () => { + const cwd = StringPrototypeReplace(process.cwd(), regexp, "/"); + return StringPrototypeSlice(cwd, StringPrototypeIndexOf(cwd, "/")); + }; + } + + // We're already on POSIX, no need for any transformations + return () => process.cwd(); +})(); + +const posix = { + /** + * path.resolve([from ...], to) + * @param {...string} args + * @returns {string} + */ + resolve(...args) { + if ( + args.length === 0 || + (args.length === 1 && (args[0] === "" || args[0] === ".")) + ) { + const cwd = posixCwd(); + if (StringPrototypeCharCodeAt(cwd, 0) === CHAR_FORWARD_SLASH) { + return cwd; + } + } + let resolvedPath = ""; + let resolvedAbsolute = false; + + for (let i = args.length - 1; i >= 0 && !resolvedAbsolute; i--) { + const path = args[i]; + validateString(path, `paths[${i}]`); + + // Skip empty entries + if (path.length === 0) { + continue; + } + + resolvedPath = `${path}/${resolvedPath}`; + resolvedAbsolute = + StringPrototypeCharCodeAt(path, 0) === CHAR_FORWARD_SLASH; + } + + if (!resolvedAbsolute) { + const cwd = posixCwd(); + resolvedPath = `${cwd}/${resolvedPath}`; + resolvedAbsolute = + StringPrototypeCharCodeAt(cwd, 0) === CHAR_FORWARD_SLASH; + } + + // At this point the path should be resolved to a full absolute path, but + // handle relative paths to be safe (might happen when process.cwd() fails) + + // Normalize the path + resolvedPath = normalizeString( + resolvedPath, + !resolvedAbsolute, + "/", + isPosixPathSeparator + ); + + if (resolvedAbsolute) { + return `/${resolvedPath}`; + } + return resolvedPath.length > 0 ? resolvedPath : "."; + }, + + /** + * @param {string} path + * @returns {string} + */ + normalize(path) { + validateString(path, "path"); + + if (path.length === 0) return "."; + + const isAbsolute = + StringPrototypeCharCodeAt(path, 0) === CHAR_FORWARD_SLASH; + const trailingSeparator = + StringPrototypeCharCodeAt(path, path.length - 1) === CHAR_FORWARD_SLASH; + + // Normalize the path + path = normalizeString(path, !isAbsolute, "/", isPosixPathSeparator); + + if (path.length === 0) { + if (isAbsolute) return "/"; + return trailingSeparator ? "./" : "."; + } + if (trailingSeparator) path += "/"; + + return isAbsolute ? `/${path}` : path; + }, + + /** + * @param {string} path + * @returns {boolean} + */ + isAbsolute(path) { + validateString(path, "path"); + return ( + path.length > 0 && + StringPrototypeCharCodeAt(path, 0) === CHAR_FORWARD_SLASH + ); + }, + + /** + * @param {...string} args + * @returns {string} + */ + join(...args) { + if (args.length === 0) return "."; + + const path = []; + for (let i = 0; i < args.length; ++i) { + const arg = args[i]; + validateString(arg, "path"); + if (arg.length > 0) { + path.push(arg); + } + } + + if (path.length === 0) return "."; + + return posix.normalize(ArrayPrototypeJoin(path, "/")); + }, + + /** + * @param {string} from + * @param {string} to + * @returns {string} + */ + relative(from, to) { + validateString(from, "from"); + validateString(to, "to"); + + if (from === to) return ""; + + // Trim leading forward slashes. + from = posix.resolve(from); + to = posix.resolve(to); + + if (from === to) return ""; + + const fromStart = 1; + const fromEnd = from.length; + const fromLen = fromEnd - fromStart; + const toStart = 1; + const toLen = to.length - toStart; + + // Compare paths to find the longest common path from root + const length = fromLen < toLen ? fromLen : toLen; + let lastCommonSep = -1; + let i = 0; + for (; i < length; i++) { + const fromCode = StringPrototypeCharCodeAt(from, fromStart + i); + if (fromCode !== StringPrototypeCharCodeAt(to, toStart + i)) break; + else if (fromCode === CHAR_FORWARD_SLASH) lastCommonSep = i; + } + if (i === length) { + if (toLen > length) { + if (StringPrototypeCharCodeAt(to, toStart + i) === CHAR_FORWARD_SLASH) { + // We get here if `from` is the exact base path for `to`. + // For example: from='/foo/bar'; to='/foo/bar/baz' + return StringPrototypeSlice(to, toStart + i + 1); + } + if (i === 0) { + // We get here if `from` is the root + // For example: from='/'; to='/foo' + return StringPrototypeSlice(to, toStart + i); + } + } else if (fromLen > length) { + if ( + StringPrototypeCharCodeAt(from, fromStart + i) === CHAR_FORWARD_SLASH + ) { + // We get here if `to` is the exact base path for `from`. + // For example: from='/foo/bar/baz'; to='/foo/bar' + lastCommonSep = i; + } else if (i === 0) { + // We get here if `to` is the root. + // For example: from='/foo/bar'; to='/' + lastCommonSep = 0; + } + } + } + + let out = ""; + // Generate the relative path based on the path difference between `to` + // and `from`. + for (i = fromStart + lastCommonSep + 1; i <= fromEnd; ++i) { + if ( + i === fromEnd || + StringPrototypeCharCodeAt(from, i) === CHAR_FORWARD_SLASH + ) { + out += out.length === 0 ? ".." : "/.."; + } + } + + // Lastly, append the rest of the destination (`to`) path that comes after + // the common path parts. + return `${out}${StringPrototypeSlice(to, toStart + lastCommonSep)}`; + }, + + /** + * @param {string} path + * @returns {string} + */ + toNamespacedPath(path) { + // Non-op on posix systems + return path; + }, + + /** + * @param {string} path + * @returns {string} + */ + dirname(path) { + validateString(path, "path"); + if (path.length === 0) return "."; + const hasRoot = StringPrototypeCharCodeAt(path, 0) === CHAR_FORWARD_SLASH; + let end = -1; + let matchedSlash = true; + for (let i = path.length - 1; i >= 1; --i) { + if (StringPrototypeCharCodeAt(path, i) === CHAR_FORWARD_SLASH) { + if (!matchedSlash) { + end = i; + break; + } + } else { + // We saw the first non-path separator + matchedSlash = false; + } + } + + if (end === -1) return hasRoot ? "/" : "."; + if (hasRoot && end === 1) return "//"; + return StringPrototypeSlice(path, 0, end); + }, + + /** + * @param {string} path + * @param {string} [suffix] + * @returns {string} + */ + basename(path, suffix) { + if (suffix !== undefined) validateString(suffix, "suffix"); + validateString(path, "path"); + + let start = 0; + let end = -1; + let matchedSlash = true; + + if ( + suffix !== undefined && + suffix.length > 0 && + suffix.length <= path.length + ) { + if (suffix === path) return ""; + let extIdx = suffix.length - 1; + let firstNonSlashEnd = -1; + for (let i = path.length - 1; i >= 0; --i) { + const code = StringPrototypeCharCodeAt(path, i); + if (code === CHAR_FORWARD_SLASH) { + // If we reached a path separator that was not part of a set of path + // separators at the end of the string, stop now + if (!matchedSlash) { + start = i + 1; + break; + } + } else { + if (firstNonSlashEnd === -1) { + // We saw the first non-path separator, remember this index in case + // we need it if the extension ends up not matching + matchedSlash = false; + firstNonSlashEnd = i + 1; + } + if (extIdx >= 0) { + // Try to match the explicit extension + if (code === StringPrototypeCharCodeAt(suffix, extIdx)) { + if (--extIdx === -1) { + // We matched the extension, so mark this as the end of our path + // component + end = i; + } + } else { + // Extension does not match, so our result is the entire path + // component + extIdx = -1; + end = firstNonSlashEnd; + } + } + } + } + + if (start === end) end = firstNonSlashEnd; + else if (end === -1) end = path.length; + return StringPrototypeSlice(path, start, end); + } + for (let i = path.length - 1; i >= 0; --i) { + if (StringPrototypeCharCodeAt(path, i) === CHAR_FORWARD_SLASH) { + // If we reached a path separator that was not part of a set of path + // separators at the end of the string, stop now + if (!matchedSlash) { + start = i + 1; + break; + } + } else if (end === -1) { + // We saw the first non-path separator, mark this as the end of our + // path component + matchedSlash = false; + end = i + 1; + } + } + + if (end === -1) return ""; + return StringPrototypeSlice(path, start, end); + }, + + /** + * @param {string} path + * @returns {string} + */ + extname(path) { + validateString(path, "path"); + let startDot = -1; + let startPart = 0; + let end = -1; + let matchedSlash = true; + // Track the state of characters (if any) we see before our first dot and + // after any path separator we find + let preDotState = 0; + for (let i = path.length - 1; i >= 0; --i) { + const char = path[i]; + if (char === "/") { + // If we reached a path separator that was not part of a set of path + // separators at the end of the string, stop now + if (!matchedSlash) { + startPart = i + 1; + break; + } + continue; + } + if (end === -1) { + // We saw the first non-path separator, mark this as the end of our + // extension + matchedSlash = false; + end = i + 1; + } + if (char === ".") { + // If this is our first dot, mark it as the start of our extension + if (startDot === -1) startDot = i; + else if (preDotState !== 1) preDotState = 1; + } else if (startDot !== -1) { + // We saw a non-dot and non-path separator before our dot, so we should + // have a good chance at having a non-empty extension + preDotState = -1; + } + } + + if ( + startDot === -1 || + end === -1 || + // We saw a non-dot character immediately before the dot + preDotState === 0 || + // The (right-most) trimmed path component is exactly '..' + (preDotState === 1 && startDot === end - 1 && startDot === startPart + 1) + ) { + return ""; + } + return StringPrototypeSlice(path, startDot, end); + }, + + format: FunctionPrototypeBind(_format, null, "/"), + + /** + * @param {string} path + * @returns {{ + * dir: string; + * root: string; + * base: string; + * name: string; + * ext: string; + * }} + */ + parse(path) { + validateString(path, "path"); + + const ret = { root: "", dir: "", base: "", ext: "", name: "" }; + if (path.length === 0) return ret; + const isAbsolute = + StringPrototypeCharCodeAt(path, 0) === CHAR_FORWARD_SLASH; + let start; + if (isAbsolute) { + ret.root = "/"; + start = 1; + } else { + start = 0; + } + let startDot = -1; + let startPart = 0; + let end = -1; + let matchedSlash = true; + let i = path.length - 1; + + // Track the state of characters (if any) we see before our first dot and + // after any path separator we find + let preDotState = 0; + + // Get non-dir info + for (; i >= start; --i) { + const code = StringPrototypeCharCodeAt(path, i); + if (code === CHAR_FORWARD_SLASH) { + // If we reached a path separator that was not part of a set of path + // separators at the end of the string, stop now + if (!matchedSlash) { + startPart = i + 1; + break; + } + continue; + } + if (end === -1) { + // We saw the first non-path separator, mark this as the end of our + // extension + matchedSlash = false; + end = i + 1; + } + if (code === CHAR_DOT) { + // If this is our first dot, mark it as the start of our extension + if (startDot === -1) startDot = i; + else if (preDotState !== 1) preDotState = 1; + } else if (startDot !== -1) { + // We saw a non-dot and non-path separator before our dot, so we should + // have a good chance at having a non-empty extension + preDotState = -1; + } + } + + if (end !== -1) { + const start = startPart === 0 && isAbsolute ? 1 : startPart; + if ( + startDot === -1 || + // We saw a non-dot character immediately before the dot + preDotState === 0 || + // The (right-most) trimmed path component is exactly '..' + (preDotState === 1 && + startDot === end - 1 && + startDot === startPart + 1) + ) { + ret.base = ret.name = StringPrototypeSlice(path, start, end); + } else { + ret.name = StringPrototypeSlice(path, start, startDot); + ret.base = StringPrototypeSlice(path, start, end); + ret.ext = StringPrototypeSlice(path, startDot, end); + } + } + + if (startPart > 0) ret.dir = StringPrototypeSlice(path, 0, startPart - 1); + else if (isAbsolute) ret.dir = "/"; + + return ret; + }, + + matchesGlob(path, pattern) { + return lazyMatchGlobPattern()(path, pattern, false); + }, + + sep: "/", + delimiter: ":", + win32: null, + posix: null, +}; + +posix.win32 = win32.win32 = win32; +posix.posix = win32.posix = posix; + +// Legacy internal API, docs-only deprecated: DEP0080 +win32._makeLong = win32.toNamespacedPath; +posix._makeLong = posix.toNamespacedPath; + +module.exports = isWindows ? win32 : posix; diff --git a/node/perf_hooks.js b/node/perf_hooks.js new file mode 100644 index 00000000..3abbec84 --- /dev/null +++ b/node/perf_hooks.js @@ -0,0 +1,47 @@ +"use strict"; + +const { ObjectDefineProperty } = primordials; + +const { constants } = internalBinding("performance"); + +const { PerformanceEntry } = require("internal/perf/performance_entry"); +const { PerformanceResourceTiming } = require("internal/perf/resource_timing"); +const { + PerformanceObserver, + PerformanceObserverEntryList, +} = require("internal/perf/observe"); +const { + PerformanceMark, + PerformanceMeasure, +} = require("internal/perf/usertiming"); +const { Performance, performance } = require("internal/perf/performance"); + +const { createHistogram } = require("internal/histogram"); + +const monitorEventLoopDelay = require("internal/perf/event_loop_delay"); +const { + eventLoopUtilization, +} = require("internal/perf/event_loop_utilization"); +const timerify = require("internal/perf/timerify"); + +module.exports = { + Performance, + PerformanceEntry, + PerformanceMark, + PerformanceMeasure, + PerformanceObserver, + PerformanceObserverEntryList, + PerformanceResourceTiming, + monitorEventLoopDelay, + eventLoopUtilization, + timerify, + createHistogram, + performance, +}; + +ObjectDefineProperty(module.exports, "constants", { + __proto__: null, + configurable: false, + enumerable: true, + value: constants, +}); diff --git a/node/process.js b/node/process.js new file mode 100644 index 00000000..bdfab0ab --- /dev/null +++ b/node/process.js @@ -0,0 +1,4 @@ +"use strict"; + +// Re-export process as a built-in module +module.exports = process; diff --git a/node/querystring.js b/node/querystring.js new file mode 100644 index 00000000..2f705b59 --- /dev/null +++ b/node/querystring.js @@ -0,0 +1,790 @@ +"use strict"; + +const { + Array, + ArrayIsArray, + Int8Array, + MathAbs, + NumberIsFinite, + ObjectKeys, + String, + StringPrototypeCharCodeAt, + StringPrototypeSlice, + decodeURIComponent, +} = primordials; + +const { Buffer } = require("buffer"); +const { encodeStr, hexTable, isHexTable } = require("internal/querystring"); +const QueryString = (module.exports = { + unescapeBuffer, + // `unescape()` is a JS global, so we need to use a different local name + unescape: qsUnescape, + + // `escape()` is a JS global, so we need to use a different local name + escape: qsEscape, + + stringify, + encode: stringify, + + parse, + decode: parse, +}); + +const unhexTable = new Int8Array([ + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, // 0 - 15 + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, // 16 - 31 + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, // 32 - 47 + +0, + +1, + +2, + +3, + +4, + +5, + +6, + +7, + +8, + +9, + -1, + -1, + -1, + -1, + -1, + -1, // 48 - 63 + -1, + 10, + 11, + 12, + 13, + 14, + 15, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, // 64 - 79 + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, // 80 - 95 + -1, + 10, + 11, + 12, + 13, + 14, + 15, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, // 96 - 111 + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, // 112 - 127 + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, // 128 ... + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, // ... 255 +]); +/** + * A safe fast alternative to decodeURIComponent + * @param {string} s + * @param {boolean} decodeSpaces + * @returns {string} + */ +function unescapeBuffer(s, decodeSpaces) { + const out = Buffer.allocUnsafe(s.length); + let index = 0; + let outIndex = 0; + let currentChar; + let nextChar; + let hexHigh; + let hexLow; + const maxLength = s.length - 2; + // Flag to know if some hex chars have been decoded + let hasHex = false; + while (index < s.length) { + currentChar = StringPrototypeCharCodeAt(s, index); + if (currentChar === 43 /* '+' */ && decodeSpaces) { + out[outIndex++] = 32; // ' ' + index++; + continue; + } + if (currentChar === 37 /* '%' */ && index < maxLength) { + currentChar = StringPrototypeCharCodeAt(s, ++index); + hexHigh = unhexTable[currentChar]; + if (!(hexHigh >= 0)) { + out[outIndex++] = 37; // '%' + continue; + } else { + nextChar = StringPrototypeCharCodeAt(s, ++index); + hexLow = unhexTable[nextChar]; + if (!(hexLow >= 0)) { + out[outIndex++] = 37; // '%' + index--; + } else { + hasHex = true; + currentChar = hexHigh * 16 + hexLow; + } + } + } + out[outIndex++] = currentChar; + index++; + } + return hasHex ? out.slice(0, outIndex) : out; +} + +/** + * @param {string} s + * @param {boolean} decodeSpaces + * @returns {string} + */ +function qsUnescape(s, decodeSpaces) { + try { + return decodeURIComponent(s); + } catch { + return QueryString.unescapeBuffer(s, decodeSpaces).toString(); + } +} + +// These characters do not need escaping when generating query strings: +// ! - . _ ~ +// ' ( ) * +// digits +// alpha (uppercase) +// alpha (lowercase) +const noEscape = new Int8Array([ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, // 0 - 15 + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, // 16 - 31 + 0, + 1, + 0, + 0, + 0, + 0, + 0, + 1, + 1, + 1, + 1, + 0, + 0, + 1, + 1, + 0, // 32 - 47 + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 0, + 0, + 0, + 0, + 0, + 0, // 48 - 63 + 0, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, // 64 - 79 + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 0, + 0, + 0, + 0, + 1, // 80 - 95 + 0, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, // 96 - 111 + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 0, + 0, + 0, + 1, + 0, // 112 - 127 +]); + +/** + * QueryString.escape() replaces encodeURIComponent() + * @see https://www.ecma-international.org/ecma-262/5.1/#sec-15.1.3.4 + * @param {any} str + * @returns {string} + */ +function qsEscape(str) { + if (typeof str !== "string") { + if (typeof str === "object") str = String(str); + else str += ""; + } + + return encodeStr(str, noEscape, hexTable); +} + +/** + * @param {string | number | bigint | boolean | symbol | undefined | null} v + * @returns {string} + */ +function stringifyPrimitive(v) { + if (typeof v === "string") return v; + if (typeof v === "number" && NumberIsFinite(v)) return "" + v; + if (typeof v === "bigint") return "" + v; + if (typeof v === "boolean") return v ? "true" : "false"; + return ""; +} + +/** + * @param {string | number | bigint | boolean} v + * @param {(v: string) => string} encode + * @returns {string} + */ +function encodeStringified(v, encode) { + if (typeof v === "string") return v.length ? encode(v) : ""; + if (typeof v === "number" && NumberIsFinite(v)) { + // Values >= 1e21 automatically switch to scientific notation which requires + // escaping due to the inclusion of a '+' in the output + return MathAbs(v) < 1e21 ? "" + v : encode("" + v); + } + if (typeof v === "bigint") return "" + v; + if (typeof v === "boolean") return v ? "true" : "false"; + return ""; +} + +/** + * @param {string | number | boolean | null} v + * @param {(v: string) => string} encode + * @returns {string} + */ +function encodeStringifiedCustom(v, encode) { + return encode(stringifyPrimitive(v)); +} + +/** + * @param {Record | null>} obj + * @param {string} [sep] + * @param {string} [eq] + * @param {{ encodeURIComponent?: (v: string) => string }} [options] + * @returns {string} + */ +function stringify(obj, sep, eq, options) { + sep ||= "&"; + eq ||= "="; + + let encode = QueryString.escape; + if (options && typeof options.encodeURIComponent === "function") { + encode = options.encodeURIComponent; + } + const convert = + encode === qsEscape ? encodeStringified : encodeStringifiedCustom; + + if (obj !== null && typeof obj === "object") { + const keys = ObjectKeys(obj); + const len = keys.length; + let fields = ""; + for (let i = 0; i < len; ++i) { + const k = keys[i]; + const v = obj[k]; + let ks = convert(k, encode); + ks += eq; + + if (ArrayIsArray(v)) { + const vlen = v.length; + if (vlen === 0) continue; + if (fields) fields += sep; + for (let j = 0; j < vlen; ++j) { + if (j) fields += sep; + fields += ks; + fields += convert(v[j], encode); + } + } else { + if (fields) fields += sep; + fields += ks; + fields += convert(v, encode); + } + } + return fields; + } + return ""; +} + +/** + * @param {string} str + * @returns {number[]} + */ +function charCodes(str) { + if (str.length === 0) return []; + if (str.length === 1) return [StringPrototypeCharCodeAt(str, 0)]; + const ret = new Array(str.length); + for (let i = 0; i < str.length; ++i) + ret[i] = StringPrototypeCharCodeAt(str, i); + return ret; +} +const defSepCodes = [38]; // & +const defEqCodes = [61]; // = + +function addKeyVal(obj, key, value, keyEncoded, valEncoded, decode) { + if (key.length > 0 && keyEncoded) key = decodeStr(key, decode); + if (value.length > 0 && valEncoded) value = decodeStr(value, decode); + + if (obj[key] === undefined) { + obj[key] = value; + } else { + const curValue = obj[key]; + // A simple Array-specific property check is enough here to + // distinguish from a string value and is faster and still safe + // since we are generating all of the values being assigned. + if (curValue.pop) curValue[curValue.length] = value; + else obj[key] = [curValue, value]; + } +} + +/** + * Parse a key/val string. + * @param {string} qs + * @param {string} sep + * @param {string} eq + * @param {{ + * maxKeys?: number, + * decodeURIComponent?: (v: string) => string, + * }} [options] + * @returns {Record} + */ +function parse(qs, sep, eq, options) { + const obj = { __proto__: null }; + + if (typeof qs !== "string" || qs.length === 0) { + return obj; + } + + const sepCodes = !sep ? defSepCodes : charCodes(String(sep)); + const eqCodes = !eq ? defEqCodes : charCodes(String(eq)); + const sepLen = sepCodes.length; + const eqLen = eqCodes.length; + + let pairs = 1000; + if (options && typeof options.maxKeys === "number") { + // -1 is used in place of a value like Infinity for meaning + // "unlimited pairs" because of additional checks V8 (at least as of v5.4) + // has to do when using variables that contain values like Infinity. Since + // `pairs` is always decremented and checked explicitly for 0, -1 works + // effectively the same as Infinity, while providing a significant + // performance boost. + pairs = options.maxKeys > 0 ? options.maxKeys : -1; + } + + let decode = QueryString.unescape; + if (options && typeof options.decodeURIComponent === "function") { + decode = options.decodeURIComponent; + } + const customDecode = decode !== qsUnescape; + + let lastPos = 0; + let sepIdx = 0; + let eqIdx = 0; + let key = ""; + let value = ""; + let keyEncoded = customDecode; + let valEncoded = customDecode; + const plusChar = customDecode ? "%20" : " "; + let encodeCheck = 0; + for (let i = 0; i < qs.length; ++i) { + const code = StringPrototypeCharCodeAt(qs, i); + + // Try matching key/value pair separator (e.g. '&') + if (code === sepCodes[sepIdx]) { + if (++sepIdx === sepLen) { + // Key/value pair separator match! + const end = i - sepIdx + 1; + if (eqIdx < eqLen) { + // We didn't find the (entire) key/value separator + if (lastPos < end) { + // Treat the substring as part of the key instead of the value + key += StringPrototypeSlice(qs, lastPos, end); + } else if (key.length === 0) { + // We saw an empty substring between separators + if (--pairs === 0) return obj; + lastPos = i + 1; + sepIdx = eqIdx = 0; + continue; + } + } else if (lastPos < end) { + value += StringPrototypeSlice(qs, lastPos, end); + } + + addKeyVal(obj, key, value, keyEncoded, valEncoded, decode); + + if (--pairs === 0) return obj; + keyEncoded = valEncoded = customDecode; + key = value = ""; + encodeCheck = 0; + lastPos = i + 1; + sepIdx = eqIdx = 0; + } + } else { + sepIdx = 0; + // Try matching key/value separator (e.g. '=') if we haven't already + if (eqIdx < eqLen) { + if (code === eqCodes[eqIdx]) { + if (++eqIdx === eqLen) { + // Key/value separator match! + const end = i - eqIdx + 1; + if (lastPos < end) key += StringPrototypeSlice(qs, lastPos, end); + encodeCheck = 0; + lastPos = i + 1; + } + continue; + } else { + eqIdx = 0; + if (!keyEncoded) { + // Try to match an (valid) encoded byte once to minimize unnecessary + // calls to string decoding functions + if (code === 37 /* % */) { + encodeCheck = 1; + continue; + } else if (encodeCheck > 0) { + if (isHexTable[code] === 1) { + if (++encodeCheck === 3) keyEncoded = true; + continue; + } else { + encodeCheck = 0; + } + } + } + } + if (code === 43 /* + */) { + if (lastPos < i) key += StringPrototypeSlice(qs, lastPos, i); + key += plusChar; + lastPos = i + 1; + continue; + } + } + if (code === 43 /* + */) { + if (lastPos < i) value += StringPrototypeSlice(qs, lastPos, i); + value += plusChar; + lastPos = i + 1; + } else if (!valEncoded) { + // Try to match an (valid) encoded byte (once) to minimize unnecessary + // calls to string decoding functions + if (code === 37 /* % */) { + encodeCheck = 1; + } else if (encodeCheck > 0) { + if (isHexTable[code] === 1) { + if (++encodeCheck === 3) valEncoded = true; + } else { + encodeCheck = 0; + } + } + } + } + } + + // Deal with any leftover key or value data + if (lastPos < qs.length) { + if (eqIdx < eqLen) key += StringPrototypeSlice(qs, lastPos); + else if (sepIdx < sepLen) value += StringPrototypeSlice(qs, lastPos); + } else if (eqIdx === 0 && key.length === 0) { + // We ended on an empty substring + return obj; + } + + addKeyVal(obj, key, value, keyEncoded, valEncoded, decode); + + return obj; +} + +/** + * V8 does not optimize functions with try-catch blocks, so we isolate them here + * to minimize the damage (Note: no longer true as of V8 5.4 -- but still will + * not be inlined). + * @param {string} s + * @param {(v: string) => string} decoder + * @returns {string} + */ +function decodeStr(s, decoder) { + try { + return decoder(s); + } catch { + return QueryString.unescape(s, true); + } +} diff --git a/node/readline.js b/node/readline.js new file mode 100644 index 00000000..fbe177f4 --- /dev/null +++ b/node/readline.js @@ -0,0 +1,514 @@ +"use strict"; + +const { + DateNow, + FunctionPrototypeBind, + FunctionPrototypeCall, + ObjectDefineProperties, + ObjectSetPrototypeOf, + Promise, + PromiseReject, + StringPrototypeSlice, + SymbolDispose, +} = primordials; + +const { + clearLine, + clearScreenDown, + cursorTo, + moveCursor, +} = require("internal/readline/callbacks"); +const emitKeypressEvents = require("internal/readline/emitKeypressEvents"); +const promises = require("readline/promises"); + +const { AbortError } = require("internal/errors"); +const { inspect } = require("internal/util/inspect"); +const { kEmptyObject, promisify } = require("internal/util"); +const { validateAbortSignal } = require("internal/validators"); + +/** + * @typedef {import('./stream.js').Readable} Readable + * @typedef {import('./stream.js').Writable} Writable + */ + +const { + Interface: _Interface, + InterfaceConstructor, + kAddHistory, + kDecoder, + kDeleteLeft, + kDeleteLineLeft, + kDeleteLineRight, + kDeleteRight, + kDeleteWordLeft, + kDeleteWordRight, + kGetDisplayPos, + kHistoryNext, + kHistoryPrev, + kInsertString, + kLine, + kLine_buffer, + kMoveCursor, + kNormalWrite, + kOldPrompt, + kOnLine, + kPreviousKey, + kPrompt, + kQuestion, + kQuestionCallback, + kQuestionCancel, + kRefreshLine, + kSawKeyPress, + kSawReturnAt, + kSetRawMode, + kTabComplete, + kTabCompleter, + kTtyWrite, + kWordLeft, + kWordRight, + kWriteToOutput, +} = require("internal/readline/interface"); +let addAbortListener; + +function Interface(input, output, completer, terminal) { + if (!(this instanceof Interface)) { + return new Interface(input, output, completer, terminal); + } + + if ( + input?.input && + typeof input.completer === "function" && + input.completer.length !== 2 + ) { + const { completer } = input; + input.completer = (v, cb) => cb(null, completer(v)); + } else if (typeof completer === "function" && completer.length !== 2) { + const realCompleter = completer; + completer = (v, cb) => cb(null, realCompleter(v)); + } + + FunctionPrototypeCall( + InterfaceConstructor, + this, + input, + output, + completer, + terminal + ); + + if (process.env.TERM === "dumb") { + this._ttyWrite = FunctionPrototypeBind(_ttyWriteDumb, this); + } +} + +ObjectSetPrototypeOf(Interface.prototype, _Interface.prototype); +ObjectSetPrototypeOf(Interface, _Interface); + +/** + * Displays `query` by writing it to the `output`. + * @param {string} query + * @param {{ signal?: AbortSignal; }} [options] + * @param {Function} cb + * @returns {void} + */ +Interface.prototype.question = function question(query, options, cb) { + cb = typeof options === "function" ? options : cb; + if (options === null || typeof options !== "object") { + options = kEmptyObject; + } + + if (options.signal) { + validateAbortSignal(options.signal, "options.signal"); + if (options.signal.aborted) { + return; + } + + const onAbort = () => { + this[kQuestionCancel](); + }; + addAbortListener ??= + require("internal/events/abort_listener").addAbortListener; + const disposable = addAbortListener(options.signal, onAbort); + const originalCb = cb; + cb = + typeof cb === "function" + ? (answer) => { + disposable[SymbolDispose](); + return originalCb(answer); + } + : disposable[SymbolDispose]; + } + + if (typeof cb === "function") { + this[kQuestion](query, cb); + } +}; +Interface.prototype.question[promisify.custom] = function question( + query, + options +) { + if (options === null || typeof options !== "object") { + options = kEmptyObject; + } + + if (options.signal?.aborted) { + return PromiseReject( + new AbortError(undefined, { cause: options.signal.reason }) + ); + } + + return new Promise((resolve, reject) => { + let cb = resolve; + + if (options.signal) { + const onAbort = () => { + reject(new AbortError(undefined, { cause: options.signal.reason })); + }; + addAbortListener ??= + require("internal/events/abort_listener").addAbortListener; + const disposable = addAbortListener(options.signal, onAbort); + cb = (answer) => { + disposable[SymbolDispose](); + resolve(answer); + }; + } + + this.question(query, options, cb); + }); +}; + +/** + * Creates a new `readline.Interface` instance. + * @param {Readable | { + * input: Readable; + * output: Writable; + * completer?: Function; + * terminal?: boolean; + * history?: string[]; + * historySize?: number; + * removeHistoryDuplicates?: boolean; + * prompt?: string; + * crlfDelay?: number; + * escapeCodeTimeout?: number; + * tabSize?: number; + * signal?: AbortSignal; + * }} input + * @param {Writable} [output] + * @param {Function} [completer] + * @param {boolean} [terminal] + * @returns {Interface} + */ +function createInterface(input, output, completer, terminal) { + return new Interface(input, output, completer, terminal); +} + +ObjectDefineProperties(Interface.prototype, { + // Redirect internal prototype methods to the underscore notation for backward + // compatibility. + [kSetRawMode]: { + __proto__: null, + get() { + return this._setRawMode; + }, + }, + [kOnLine]: { + __proto__: null, + get() { + return this._onLine; + }, + }, + [kWriteToOutput]: { + __proto__: null, + get() { + return this._writeToOutput; + }, + }, + [kAddHistory]: { + __proto__: null, + get() { + return this._addHistory; + }, + }, + [kRefreshLine]: { + __proto__: null, + get() { + return this._refreshLine; + }, + }, + [kNormalWrite]: { + __proto__: null, + get() { + return this._normalWrite; + }, + }, + [kInsertString]: { + __proto__: null, + get() { + return this._insertString; + }, + }, + [kTabComplete]: { + __proto__: null, + get() { + return this._tabComplete; + }, + }, + [kWordLeft]: { + __proto__: null, + get() { + return this._wordLeft; + }, + }, + [kWordRight]: { + __proto__: null, + get() { + return this._wordRight; + }, + }, + [kDeleteLeft]: { + __proto__: null, + get() { + return this._deleteLeft; + }, + }, + [kDeleteRight]: { + __proto__: null, + get() { + return this._deleteRight; + }, + }, + [kDeleteWordLeft]: { + __proto__: null, + get() { + return this._deleteWordLeft; + }, + }, + [kDeleteWordRight]: { + __proto__: null, + get() { + return this._deleteWordRight; + }, + }, + [kDeleteLineLeft]: { + __proto__: null, + get() { + return this._deleteLineLeft; + }, + }, + [kDeleteLineRight]: { + __proto__: null, + get() { + return this._deleteLineRight; + }, + }, + [kLine]: { + __proto__: null, + get() { + return this._line; + }, + }, + [kHistoryNext]: { + __proto__: null, + get() { + return this._historyNext; + }, + }, + [kHistoryPrev]: { + __proto__: null, + get() { + return this._historyPrev; + }, + }, + [kGetDisplayPos]: { + __proto__: null, + get() { + return this._getDisplayPos; + }, + }, + [kMoveCursor]: { + __proto__: null, + get() { + return this._moveCursor; + }, + }, + [kTtyWrite]: { + __proto__: null, + get() { + return this._ttyWrite; + }, + }, + + // Defining proxies for the internal instance properties for backward + // compatibility. + _decoder: { + __proto__: null, + get() { + return this[kDecoder]; + }, + set(value) { + this[kDecoder] = value; + }, + }, + _line_buffer: { + __proto__: null, + get() { + return this[kLine_buffer]; + }, + set(value) { + this[kLine_buffer] = value; + }, + }, + _oldPrompt: { + __proto__: null, + get() { + return this[kOldPrompt]; + }, + set(value) { + this[kOldPrompt] = value; + }, + }, + _previousKey: { + __proto__: null, + get() { + return this[kPreviousKey]; + }, + set(value) { + this[kPreviousKey] = value; + }, + }, + _prompt: { + __proto__: null, + get() { + return this[kPrompt]; + }, + set(value) { + this[kPrompt] = value; + }, + }, + _questionCallback: { + __proto__: null, + get() { + return this[kQuestionCallback]; + }, + set(value) { + this[kQuestionCallback] = value; + }, + }, + _sawKeyPress: { + __proto__: null, + get() { + return this[kSawKeyPress]; + }, + set(value) { + this[kSawKeyPress] = value; + }, + }, + _sawReturnAt: { + __proto__: null, + get() { + return this[kSawReturnAt]; + }, + set(value) { + this[kSawReturnAt] = value; + }, + }, +}); + +// Make internal methods public for backward compatibility. +Interface.prototype._setRawMode = _Interface.prototype[kSetRawMode]; +Interface.prototype._onLine = _Interface.prototype[kOnLine]; +Interface.prototype._writeToOutput = _Interface.prototype[kWriteToOutput]; +Interface.prototype._addHistory = _Interface.prototype[kAddHistory]; +Interface.prototype._refreshLine = _Interface.prototype[kRefreshLine]; +Interface.prototype._normalWrite = _Interface.prototype[kNormalWrite]; +Interface.prototype._insertString = _Interface.prototype[kInsertString]; +Interface.prototype._tabComplete = function (lastKeypressWasTab) { + // Overriding parent method because `this.completer` in the legacy + // implementation takes a callback instead of being an async function. + this.pause(); + const string = StringPrototypeSlice(this.line, 0, this.cursor); + this.completer(string, (err, value) => { + this.resume(); + + if (err) { + this._writeToOutput(`Tab completion error: ${inspect(err)}`); + return; + } + + this[kTabCompleter](lastKeypressWasTab, value); + }); +}; +Interface.prototype._wordLeft = _Interface.prototype[kWordLeft]; +Interface.prototype._wordRight = _Interface.prototype[kWordRight]; +Interface.prototype._deleteLeft = _Interface.prototype[kDeleteLeft]; +Interface.prototype._deleteRight = _Interface.prototype[kDeleteRight]; +Interface.prototype._deleteWordLeft = _Interface.prototype[kDeleteWordLeft]; +Interface.prototype._deleteWordRight = _Interface.prototype[kDeleteWordRight]; +Interface.prototype._deleteLineLeft = _Interface.prototype[kDeleteLineLeft]; +Interface.prototype._deleteLineRight = _Interface.prototype[kDeleteLineRight]; +Interface.prototype._line = _Interface.prototype[kLine]; +Interface.prototype._historyNext = _Interface.prototype[kHistoryNext]; +Interface.prototype._historyPrev = _Interface.prototype[kHistoryPrev]; +Interface.prototype._getDisplayPos = _Interface.prototype[kGetDisplayPos]; +Interface.prototype._getCursorPos = _Interface.prototype.getCursorPos; +Interface.prototype._moveCursor = _Interface.prototype[kMoveCursor]; +Interface.prototype._ttyWrite = _Interface.prototype[kTtyWrite]; + +function _ttyWriteDumb(s, key) { + key ||= kEmptyObject; + if (key.name === "escape") return; + + if (this[kSawReturnAt] && key.name !== "enter") this[kSawReturnAt] = 0; + + if (key.ctrl) { + if (key.name === "c") { + if (this.listenerCount("SIGINT") > 0) { + this.emit("SIGINT"); + } else { + // This readline instance is finished + this.close(); + } + + return; + } else if (key.name === "d") { + this.close(); + return; + } + } + + switch (key.name) { + case "return": // Carriage return, i.e. \r + this[kSawReturnAt] = DateNow(); + this._line(); + break; + + case "enter": + // When key interval > crlfDelay + if ( + this[kSawReturnAt] === 0 || + DateNow() - this[kSawReturnAt] > this.crlfDelay + ) { + this._line(); + } + this[kSawReturnAt] = 0; + break; + + default: + if (typeof s === "string" && s) { + this.line += s; + this.cursor += s.length; + this._writeToOutput(s); + } + } +} + +module.exports = { + Interface, + clearLine, + clearScreenDown, + createInterface, + cursorTo, + emitKeypressEvents, + moveCursor, + promises, +}; diff --git a/node/repl.js b/node/repl.js new file mode 100644 index 00000000..9bad4999 --- /dev/null +++ b/node/repl.js @@ -0,0 +1,1488 @@ +"use strict"; + +const { + ArrayPrototypeAt, + ArrayPrototypeFilter, + ArrayPrototypeFindLastIndex, + ArrayPrototypeForEach, + ArrayPrototypeJoin, + ArrayPrototypeMap, + ArrayPrototypePop, + ArrayPrototypePush, + ArrayPrototypeShift, + ArrayPrototypeSlice, + ArrayPrototypeSort, + Boolean, + Error: MainContextError, + FunctionPrototypeBind, + FunctionPrototypeCall, + JSONStringify, + MathMaxApply, + NumberIsNaN, + NumberParseFloat, + ObjectAssign, + ObjectDefineProperty, + ObjectGetOwnPropertyDescriptor, + ObjectGetOwnPropertyNames, + ObjectKeys, + Promise, + ReflectApply, + RegExp, + RegExpPrototypeExec, + SafePromiseRace, + SafeSet, + SafeWeakSet, + StringPrototypeCharAt, + StringPrototypeEndsWith, + StringPrototypeIncludes, + StringPrototypeRepeat, + StringPrototypeSlice, + StringPrototypeStartsWith, + StringPrototypeTrim, + Symbol, + SyntaxError, + globalThis, +} = primordials; + +const { + makeRequireFunction, + addBuiltinLibsToObject, +} = require("internal/modules/helpers"); +const { parse: acornParse } = require("internal/deps/acorn/acorn/dist/acorn"); +const acornWalk = require("internal/deps/acorn/acorn-walk/dist/walk"); +const { + decorateErrorStack, + isError, + deprecate, + SideEffectFreeRegExpPrototypeSymbolReplace, + SideEffectFreeRegExpPrototypeSymbolSplit, +} = require("internal/util"); +const { inspect } = require("internal/util/inspect"); +const vm = require("vm"); + +const { runInThisContext, runInContext } = vm.Script.prototype; + +const path = require("path"); +const fs = require("fs"); +const { Interface } = require("readline"); +const { commonPrefix } = require("internal/readline/utils"); +const { Console } = require("console"); +const { shouldColorize } = require("internal/util/colors"); +const CJSModule = require("internal/modules/cjs/loader").Module; +const domain = require("domain"); +let debug = require("internal/util/debuglog").debuglog("repl", (fn) => { + debug = fn; +}); +const { + ErrorPrepareStackTrace, + codes: { + ERR_CANNOT_WATCH_SIGINT, + ERR_INVALID_REPL_EVAL_CONFIG, + ERR_INVALID_REPL_INPUT, + ERR_MISSING_ARGS, + ERR_SCRIPT_EXECUTION_INTERRUPTED, + }, + isErrorStackTraceLimitWritable, + overrideStackTrace, +} = require("internal/errors"); +const { sendInspectorCommand } = require("internal/util/inspector"); +const { getOptionValue } = require("internal/options"); +const { validateFunction, validateObject } = require("internal/validators"); +const experimentalREPLAwait = getOptionValue("--experimental-repl-await"); +const pendingDeprecation = getOptionValue("--pending-deprecation"); +const { + REPL_MODE_SLOPPY, + REPL_MODE_STRICT, + isRecoverableError, + kStandaloneREPL, + setupPreview, + setupReverseSearch, + isObjectLiteral, + isValidSyntax, + kContextId, + getREPLResourceName, + globalBuiltins, + getReplBuiltinLibs, + setReplBuiltinLibs, + fixReplRequire, +} = require("internal/repl/utils"); +const { complete } = require("internal/repl/completion"); +const { startSigintWatchdog, stopSigintWatchdog } = + internalBinding("contextify"); + +const { makeContextifyScript } = require("internal/vm"); +const { + kMultilinePrompt, + kAddNewLineOnTTY, + kLastCommandErrored, +} = require("internal/readline/interface"); + +// Lazy-loaded. +let processTopLevelAwait; + +const parentModule = module; +const domainSet = new SafeWeakSet(); + +const kBufferedCommandSymbol = Symbol("bufferedCommand"); +const kLoadingSymbol = Symbol("loading"); + +let addedNewListener = false; + +fixReplRequire(module); + +// This is the default "writer" value, if none is passed in the REPL options, +// and it can be overridden by custom print functions, such as `probe` or +// `eyes.js`. +const writer = (obj) => inspect(obj, writer.options); +writer.options = { ...inspect.defaultOptions, showProxy: true }; + +// Converts static import statement to dynamic import statement +const toDynamicImport = (codeLine) => { + let dynamicImportStatement = ""; + const ast = acornParse(codeLine, { + __proto__: null, + sourceType: "module", + ecmaVersion: "latest", + }); + acornWalk.ancestor(ast, { + ImportDeclaration(node) { + const awaitDynamicImport = `await import(${JSONStringify( + node.source.value + )});`; + if (node.specifiers.length === 0) { + dynamicImportStatement += awaitDynamicImport; + } else if ( + node.specifiers.length === 1 && + node.specifiers[0].type === "ImportNamespaceSpecifier" + ) { + dynamicImportStatement += `const ${node.specifiers[0].local.name} = ${awaitDynamicImport}`; + } else { + const importNames = ArrayPrototypeJoin( + ArrayPrototypeMap(node.specifiers, ({ local, imported }) => + local.name === imported?.name + ? local.name + : `${imported?.name ?? "default"}: ${local.name}` + ), + ", " + ); + dynamicImportStatement += `const { ${importNames} } = ${awaitDynamicImport}`; + } + }, + }); + return dynamicImportStatement; +}; + +class Recoverable extends SyntaxError { + constructor(err) { + super(); + this.err = err; + } +} + +class REPLServer extends Interface { + constructor(prompt, stream, eval_, useGlobal, ignoreUndefined, replMode) { + let options; + if (prompt !== null && typeof prompt === "object") { + // An options object was given. + options = { ...prompt }; + stream = options.stream || options.socket; + eval_ = options.eval; + useGlobal = options.useGlobal; + ignoreUndefined = options.ignoreUndefined; + prompt = options.prompt; + replMode = options.replMode; + } else { + options = {}; + } + + if (!options.input && !options.output) { + // Legacy API, passing a 'stream'/'socket' option. + // Use stdin and stdout as the default streams if none were given. + stream ||= process; + + // We're given a duplex readable/writable Stream, like a `net.Socket` + // or a custom object with 2 streams, or the `process` object. + options.input = stream.stdin || stream; + options.output = stream.stdout || stream; + } + + if (options.terminal === undefined) { + options.terminal = options.output.isTTY; + } + options.terminal = !!options.terminal; + + if (options.terminal && options.useColors === undefined) { + // If possible, check if stdout supports colors or not. + options.useColors = shouldColorize(options.output); + } + + const preview = + options.terminal && + (options.preview !== undefined ? !!options.preview : !eval_); + + super({ + input: options.input, + output: options.output, + completer: options.completer || completer, + terminal: options.terminal, + historySize: options.historySize, + prompt, + }); + + ObjectDefineProperty(this, "inputStream", { + __proto__: null, + get: pendingDeprecation + ? deprecate( + () => this.input, + "repl.inputStream and repl.outputStream are deprecated. " + + "Use repl.input and repl.output instead", + "DEP0141" + ) + : () => this.input, + set: pendingDeprecation + ? deprecate( + (val) => (this.input = val), + "repl.inputStream and repl.outputStream are deprecated. " + + "Use repl.input and repl.output instead", + "DEP0141" + ) + : (val) => (this.input = val), + enumerable: false, + configurable: true, + }); + ObjectDefineProperty(this, "outputStream", { + __proto__: null, + get: pendingDeprecation + ? deprecate( + () => this.output, + "repl.inputStream and repl.outputStream are deprecated. " + + "Use repl.input and repl.output instead", + "DEP0141" + ) + : () => this.output, + set: pendingDeprecation + ? deprecate( + (val) => (this.output = val), + "repl.inputStream and repl.outputStream are deprecated. " + + "Use repl.input and repl.output instead", + "DEP0141" + ) + : (val) => (this.output = val), + enumerable: false, + configurable: true, + }); + + this.allowBlockingCompletions = !!options.allowBlockingCompletions; + this.useColors = !!options.useColors; + this._domain = options.domain || domain.create(); + this.useGlobal = !!useGlobal; + this.ignoreUndefined = !!ignoreUndefined; + this.replMode = replMode || module.exports.REPL_MODE_SLOPPY; + this.underscoreAssigned = false; + this.last = undefined; + this.underscoreErrAssigned = false; + this.lastError = undefined; + this.breakEvalOnSigint = !!options.breakEvalOnSigint; + this.editorMode = false; + // Context id for use with the inspector protocol. + this[kContextId] = undefined; + this[kLastCommandErrored] = false; + + if (this.breakEvalOnSigint && eval_) { + // Allowing this would not reflect user expectations. + // breakEvalOnSigint affects only the behavior of the default eval(). + throw new ERR_INVALID_REPL_EVAL_CONFIG(); + } + + if (options[kStandaloneREPL]) { + // It is possible to introspect the running REPL accessing this variable + // from inside the REPL. This is useful for anyone working on the REPL. + module.exports.repl = this; + } else if (!addedNewListener) { + // Add this listener only once and use a WeakSet that contains the REPLs + // domains. Otherwise we'd have to add a single listener to each REPL + // instance and that could trigger the `MaxListenersExceededWarning`. + process.prependListener("newListener", (event, listener) => { + if ( + event === "uncaughtException" && + process.domain && + listener.name !== "domainUncaughtExceptionClear" && + domainSet.has(process.domain) + ) { + // Throw an error so that the event will not be added and the current + // domain takes over. That way the user is notified about the error + // and the current code evaluation is stopped, just as any other code + // that contains an error. + throw new ERR_INVALID_REPL_INPUT( + "Listeners for `uncaughtException` cannot be used in the REPL" + ); + } + }); + addedNewListener = true; + } + + domainSet.add(this._domain); + + const savedRegExMatches = ["", "", "", "", "", "", "", "", "", ""]; + const sep = "\u0000\u0000\u0000"; + const regExMatcher = new RegExp( + `^${sep}(.*)${sep}(.*)${sep}(.*)${sep}(.*)` + + `${sep}(.*)${sep}(.*)${sep}(.*)${sep}(.*)` + + `${sep}(.*)$` + ); + + eval_ ||= defaultEval; + + const self = this; + + // Pause taking in new input, and store the keys in a buffer. + const pausedBuffer = []; + let paused = false; + function pause() { + paused = true; + } + + function unpause() { + if (!paused) return; + paused = false; + let entry; + const tmpCompletionEnabled = self.isCompletionEnabled; + while ((entry = ArrayPrototypeShift(pausedBuffer)) !== undefined) { + const { 0: type, 1: payload, 2: isCompletionEnabled } = entry; + switch (type) { + case "key": { + const { 0: d, 1: key } = payload; + self.isCompletionEnabled = isCompletionEnabled; + self._ttyWrite(d, key); + break; + } + case "close": + self.emit("exit"); + break; + } + if (paused) { + break; + } + } + self.isCompletionEnabled = tmpCompletionEnabled; + } + + function defaultEval(code, context, file, cb) { + let result, script, wrappedErr; + let err = null; + let wrappedCmd = false; + let awaitPromise = false; + const input = code; + + if (isObjectLiteral(code) && isValidSyntax(code)) { + // Add parentheses to make sure `code` is parsed as an expression + code = `(${StringPrototypeTrim(code)})\n`; + wrappedCmd = true; + } + + const hostDefinedOptionId = Symbol(`eval:${file}`); + let parentURL; + try { + const { pathToFileURL } = require("internal/url"); + // Adding `/repl` prevents dynamic imports from loading relative + // to the parent of `process.cwd()`. + parentURL = pathToFileURL(path.join(process.cwd(), "repl")).href; + } catch { + // Continue regardless of error. + } + async function importModuleDynamically( + specifier, + _, + importAttributes, + phase + ) { + const cascadedLoader = + require("internal/modules/esm/loader").getOrInitializeCascadedLoader(); + return cascadedLoader.import( + specifier, + parentURL, + importAttributes, + phase === "evaluation" + ? cascadedLoader.kEvaluationPhase + : cascadedLoader.kSourcePhase + ); + } + // `experimentalREPLAwait` is set to true by default. + // Shall be false in case `--no-experimental-repl-await` flag is used. + if (experimentalREPLAwait && StringPrototypeIncludes(code, "await")) { + if (processTopLevelAwait === undefined) { + ({ processTopLevelAwait } = require("internal/repl/await")); + } + + try { + const potentialWrappedCode = processTopLevelAwait(code); + if (potentialWrappedCode !== null) { + code = potentialWrappedCode; + wrappedCmd = true; + awaitPromise = true; + } + } catch (e) { + let recoverableError = false; + if (e.name === "SyntaxError") { + // Remove all "await"s and attempt running the script + // in order to detect if error is truly non recoverable + const fallbackCode = SideEffectFreeRegExpPrototypeSymbolReplace( + /\bawait\b/g, + code, + "" + ); + try { + makeContextifyScript( + fallbackCode, // code + file, // filename, + 0, // lineOffset + 0, // columnOffset, + undefined, // cachedData + false, // produceCachedData + undefined, // parsingContext + hostDefinedOptionId, // hostDefinedOptionId + importModuleDynamically // importModuleDynamically + ); + } catch (fallbackError) { + if (isRecoverableError(fallbackError, fallbackCode)) { + recoverableError = true; + err = new Recoverable(e); + } + } + } + if (!recoverableError) { + decorateErrorStack(e); + err = e; + } + } + } + + // First, create the Script object to check the syntax + if (code === "\n") return cb(null); + + if (err === null) { + while (true) { + try { + if ( + self.replMode === module.exports.REPL_MODE_STRICT && + RegExpPrototypeExec(/^\s*$/, code) === null + ) { + // "void 0" keeps the repl from returning "use strict" as the result + // value for statements and declarations that don't return a value. + code = `'use strict'; void 0;\n${code}`; + } + script = makeContextifyScript( + code, // code + file, // filename, + 0, // lineOffset + 0, // columnOffset, + undefined, // cachedData + false, // produceCachedData + undefined, // parsingContext + hostDefinedOptionId, // hostDefinedOptionId + importModuleDynamically // importModuleDynamically + ); + } catch (e) { + debug("parse error %j", code, e); + if (wrappedCmd) { + // Unwrap and try again + wrappedCmd = false; + awaitPromise = false; + code = input; + wrappedErr = e; + continue; + } + // Preserve original error for wrapped command + const error = wrappedErr || e; + if (isRecoverableError(error, code)) err = new Recoverable(error); + else err = error; + } + break; + } + } + + // This will set the values from `savedRegExMatches` to corresponding + // predefined RegExp properties `RegExp.$1`, `RegExp.$2` ... `RegExp.$9` + RegExpPrototypeExec( + regExMatcher, + ArrayPrototypeJoin(savedRegExMatches, sep) + ); + + let finished = false; + function finishExecution(err, result) { + if (finished) return; + finished = true; + + // After executing the current expression, store the values of RegExp + // predefined properties back in `savedRegExMatches` + for (let idx = 1; idx < savedRegExMatches.length; idx += 1) { + savedRegExMatches[idx] = RegExp[`$${idx}`]; + } + + cb(err, result); + } + + if (!err) { + // Unset raw mode during evaluation so that Ctrl+C raises a signal. + let previouslyInRawMode; + if (self.breakEvalOnSigint) { + // Start the SIGINT watchdog before entering raw mode so that a very + // quick Ctrl+C doesn't lead to aborting the process completely. + if (!startSigintWatchdog()) throw new ERR_CANNOT_WATCH_SIGINT(); + previouslyInRawMode = self._setRawMode(false); + } + + try { + try { + const scriptOptions = { + displayErrors: false, + breakOnSigint: self.breakEvalOnSigint, + }; + + if (self.useGlobal) { + result = FunctionPrototypeCall( + runInThisContext, + script, + scriptOptions + ); + } else { + result = FunctionPrototypeCall( + runInContext, + script, + context, + scriptOptions + ); + } + } finally { + if (self.breakEvalOnSigint) { + // Reset terminal mode to its previous value. + self._setRawMode(previouslyInRawMode); + + // Returns true if there were pending SIGINTs *after* the script + // has terminated without being interrupted itself. + if (stopSigintWatchdog()) { + self.emit("SIGINT"); + } + } + } + } catch (e) { + err = e; + + if (process.domain) { + debug("not recoverable, send to domain"); + process.domain.emit("error", err); + process.domain.exit(); + return; + } + } + + if (awaitPromise && !err) { + let sigintListener; + pause(); + let promise = result; + if (self.breakEvalOnSigint) { + const interrupt = new Promise((resolve, reject) => { + sigintListener = () => { + const tmp = MainContextError.stackTraceLimit; + if (isErrorStackTraceLimitWritable()) + MainContextError.stackTraceLimit = 0; + const err = new ERR_SCRIPT_EXECUTION_INTERRUPTED(); + if (isErrorStackTraceLimitWritable()) + MainContextError.stackTraceLimit = tmp; + reject(err); + }; + prioritizedSigintQueue.add(sigintListener); + }); + promise = SafePromiseRace([promise, interrupt]); + } + + (async () => { + try { + const result = (await promise)?.value; + finishExecution(null, result); + } catch (err) { + if (err && process.domain) { + debug("not recoverable, send to domain"); + process.domain.emit("error", err); + process.domain.exit(); + return; + } + finishExecution(err); + } finally { + // Remove prioritized SIGINT listener if it was not called. + prioritizedSigintQueue.delete(sigintListener); + unpause(); + } + })(); + } + } + + if (!awaitPromise || err) { + finishExecution(err, result); + } + } + + self.eval = self._domain.bind(eval_); + + self._domain.on("error", function debugDomainError(e) { + debug("domain error"); + let errStack = ""; + + if (typeof e === "object" && e !== null) { + overrideStackTrace.set(e, (error, stackFrames) => { + let frames; + if (typeof stackFrames === "object") { + // Search from the bottom of the call stack to + // find the first frame with a null function name + const idx = ArrayPrototypeFindLastIndex( + stackFrames, + (frame) => frame.getFunctionName() === null + ); + // If found, get rid of it and everything below it + frames = ArrayPrototypeSlice(stackFrames, 0, idx); + } else { + frames = stackFrames; + } + // FIXME(devsnek): this is inconsistent with the checks + // that the real prepareStackTrace dispatch uses in + // lib/internal/errors.js. + if (typeof MainContextError.prepareStackTrace === "function") { + return MainContextError.prepareStackTrace(error, frames); + } + return ErrorPrepareStackTrace(error, frames); + }); + decorateErrorStack(e); + + if (e.domainThrown) { + delete e.domain; + delete e.domainThrown; + } + + if (isError(e)) { + if (e.stack) { + if (e.name === "SyntaxError") { + // Remove stack trace. + e.stack = SideEffectFreeRegExpPrototypeSymbolReplace( + /^\s+at\s.*\n?/gm, + SideEffectFreeRegExpPrototypeSymbolReplace( + /^REPL\d+:\d+\r?\n/, + e.stack, + "" + ), + "" + ); + const importErrorStr = + "Cannot use import statement outside a " + "module"; + if (StringPrototypeIncludes(e.message, importErrorStr)) { + e.message = + "Cannot use import statement inside the Node.js " + + "REPL, alternatively use dynamic import: " + + toDynamicImport(ArrayPrototypeAt(self.lines, -1)); + e.stack = SideEffectFreeRegExpPrototypeSymbolReplace( + /SyntaxError:.*\n/, + e.stack, + `SyntaxError: ${e.message}\n` + ); + } + } else if (self.replMode === module.exports.REPL_MODE_STRICT) { + e.stack = SideEffectFreeRegExpPrototypeSymbolReplace( + /(\s+at\s+REPL\d+:)(\d+)/, + e.stack, + (_, pre, line) => pre + (line - 1) + ); + } + } + errStack = self.writer(e); + + // Remove one line error braces to keep the old style in place. + if (errStack[0] === "[" && errStack[errStack.length - 1] === "]") { + errStack = StringPrototypeSlice(errStack, 1, -1); + } + } + } + + if (!self.underscoreErrAssigned) { + self.lastError = e; + } + + if ( + options[kStandaloneREPL] && + process.listenerCount("uncaughtException") !== 0 + ) { + process.nextTick(() => { + process.emit("uncaughtException", e); + self.clearBufferedCommand(); + self.lines.level = []; + if (!self.closed) { + self.displayPrompt(); + } + }); + } else { + if (errStack === "") { + errStack = self.writer(e); + } + const lines = SideEffectFreeRegExpPrototypeSymbolSplit( + /(?<=\n)/, + errStack + ); + let matched = false; + + errStack = ""; + ArrayPrototypeForEach(lines, (line) => { + if ( + !matched && + RegExpPrototypeExec(/^\[?([A-Z][a-z0-9_]*)*Error/, line) !== null + ) { + errStack += + writer.options.breakLength >= line.length + ? `Uncaught ${line}` + : `Uncaught:\n${line}`; + matched = true; + } else { + errStack += line; + } + }); + if (!matched) { + const ln = lines.length === 1 ? " " : ":\n"; + errStack = `Uncaught${ln}${errStack}`; + } + // Normalize line endings. + errStack += StringPrototypeEndsWith(errStack, "\n") ? "" : "\n"; + self.output.write(errStack); + self.clearBufferedCommand(); + self.lines.level = []; + if (!self.closed) { + self.displayPrompt(); + } + } + }); + + self.clearBufferedCommand(); + + function completer(text, cb) { + FunctionPrototypeCall( + complete, + self, + text, + self.editorMode ? self.completeOnEditorMode(cb) : cb + ); + } + + self.resetContext(); + + this.commands = { __proto__: null }; + defineDefaultCommands(this); + + // Figure out which "writer" function to use + self.writer = options.writer || module.exports.writer; + + if (self.writer === writer) { + // Conditionally turn on ANSI coloring. + writer.options.colors = self.useColors; + + if (options[kStandaloneREPL]) { + ObjectDefineProperty(inspect, "replDefaults", { + __proto__: null, + get() { + return writer.options; + }, + set(options) { + validateObject(options, "options"); + return ObjectAssign(writer.options, options); + }, + enumerable: true, + configurable: true, + }); + } + } + + function _parseREPLKeyword(keyword, rest) { + const cmd = this.commands[keyword]; + if (cmd) { + FunctionPrototypeCall(cmd.action, this, rest); + return true; + } + return false; + } + + self.on("close", function emitExit() { + if (paused) { + ArrayPrototypePush(pausedBuffer, ["close"]); + return; + } + self.emit("exit"); + }); + + let sawSIGINT = false; + let sawCtrlD = false; + const prioritizedSigintQueue = new SafeSet(); + self.on("SIGINT", function onSigInt() { + if (prioritizedSigintQueue.size > 0) { + for (const task of prioritizedSigintQueue) { + task(); + } + return; + } + + const empty = self.line.length === 0; + self.clearLine(); + _turnOffEditorMode(self); + + const cmd = self[kBufferedCommandSymbol]; + if (!(cmd && cmd.length > 0) && empty) { + if (sawSIGINT) { + self.close(); + sawSIGINT = false; + return; + } + self.output.write( + "(To exit, press Ctrl+C again or Ctrl+D or type .exit)\n" + ); + sawSIGINT = true; + } else { + sawSIGINT = false; + } + + self.clearBufferedCommand(); + self.lines.level = []; + self.displayPrompt(); + }); + + self.on("line", function onLine(cmd) { + debug("line %j", cmd); + cmd ||= ""; + sawSIGINT = false; + + if (self.editorMode) { + self[kBufferedCommandSymbol] += cmd + "\n"; + + // code alignment + const matches = + self._sawKeyPress && !self[kLoadingSymbol] + ? RegExpPrototypeExec(/^\s+/, cmd) + : null; + if (matches) { + const prefix = matches[0]; + self.write(prefix); + self.line = prefix; + self.cursor = prefix.length; + } + FunctionPrototypeCall(_memory, self, cmd); + return; + } + + // Check REPL keywords and empty lines against a trimmed line input. + const trimmedCmd = StringPrototypeTrim(cmd); + + // Check to see if a REPL keyword was used. If it returns true, + // display next prompt and return. + if (trimmedCmd) { + if ( + StringPrototypeCharAt(trimmedCmd, 0) === "." && + StringPrototypeCharAt(trimmedCmd, 1) !== "." && + NumberIsNaN(NumberParseFloat(trimmedCmd)) + ) { + const matches = RegExpPrototypeExec( + /^\.([^\s]+)\s*(.*)$/, + trimmedCmd + ); + const keyword = matches?.[1]; + const rest = matches?.[2]; + if ( + FunctionPrototypeCall(_parseREPLKeyword, self, keyword, rest) === + true + ) { + return; + } + if (!self[kBufferedCommandSymbol]) { + self.output.write("Invalid REPL keyword\n"); + finish(null); + return; + } + } + } + + const evalCmd = self[kBufferedCommandSymbol] + cmd + "\n"; + + debug("eval %j", evalCmd); + self.eval(evalCmd, self.context, getREPLResourceName(), finish); + + function finish(e, ret) { + debug("finish", e, ret); + FunctionPrototypeCall(_memory, self, cmd); + + if ( + e && + !self[kBufferedCommandSymbol] && + StringPrototypeStartsWith(StringPrototypeTrim(cmd), "npm ") && + !(e instanceof Recoverable) + ) { + self.output.write( + "npm should be run outside of the " + + "Node.js REPL, in your normal shell.\n" + + "(Press Ctrl+D to exit.)\n" + ); + self.displayPrompt(); + return; + } + + // If error was SyntaxError and not JSON.parse error + // We can start a multiline command + if (e instanceof Recoverable && !sawCtrlD) { + if (self.terminal) { + self[kAddNewLineOnTTY](); + } else { + self[kBufferedCommandSymbol] += cmd + "\n"; + self.displayPrompt(); + } + return; + } + + if (e) { + self._domain.emit("error", e.err || e); + self[kLastCommandErrored] = true; + } + + // Clear buffer if no SyntaxErrors + self.clearBufferedCommand(); + sawCtrlD = false; + + // If we got any output - print it (if no error) + if ( + !e && + // When an invalid REPL command is used, error message is printed + // immediately. We don't have to print anything else. So, only when + // the second argument to this function is there, print it. + arguments.length === 2 && + (!self.ignoreUndefined || ret !== undefined) + ) { + if (!self.underscoreAssigned) { + self.last = ret; + } + self.output.write(self.writer(ret) + "\n"); + } + + // If the REPL sever hasn't closed display prompt again (unless we already + // did by emitting the 'error' event on the domain instance). + if (!self.closed && !e) { + self[kLastCommandErrored] = false; + self.displayPrompt(); + } + } + }); + + self.on("SIGCONT", function onSigCont() { + if (self.editorMode) { + self.output.write(`${self._initialPrompt}.editor\n`); + self.output.write( + "// Entering editor mode (Ctrl+D to finish, Ctrl+C to cancel)\n" + ); + self.output.write(`${self[kBufferedCommandSymbol]}\n`); + self.prompt(true); + } else { + self.displayPrompt(true); + } + }); + + const { reverseSearch } = setupReverseSearch(this); + + const { clearPreview, showPreview } = setupPreview( + this, + kContextId, + kBufferedCommandSymbol, + preview + ); + + // Wrap readline tty to enable editor mode and pausing. + const ttyWrite = FunctionPrototypeBind(self._ttyWrite, self); + self._ttyWrite = (d, key) => { + key ||= {}; + if (paused && !(self.breakEvalOnSigint && key.ctrl && key.name === "c")) { + ArrayPrototypePush(pausedBuffer, [ + "key", + [d, key], + self.isCompletionEnabled, + ]); + return; + } + if (!self.editorMode || !self.terminal) { + // Before exiting, make sure to clear the line. + if ( + key.ctrl && + key.name === "d" && + self.cursor === 0 && + self.line.length === 0 + ) { + self.clearLine(); + } + clearPreview(key); + if (!reverseSearch(d, key)) { + ttyWrite(d, key); + const showCompletionPreview = key.name !== "escape"; + showPreview(showCompletionPreview); + } + return; + } + + // Editor mode + if (key.ctrl && !key.shift) { + switch (key.name) { + // TODO(BridgeAR): There should not be a special mode necessary for full + // multiline support. + case "d": // End editor mode + _turnOffEditorMode(self); + sawCtrlD = true; + ttyWrite(d, { name: "return" }); + break; + case "n": // Override next history item + case "p": // Override previous history item + break; + default: + ttyWrite(d, key); + } + } else { + switch (key.name) { + case "up": // Override previous history item + case "down": // Override next history item + break; + case "tab": + // Prevent double tab behavior + self._previousKey = null; + ttyWrite(d, key); + break; + default: + ttyWrite(d, key); + } + } + }; + + self.displayPrompt(); + } + setupHistory(historyConfig = {}, cb) { + // TODO(puskin94): necessary because historyConfig can be a string for backwards compatibility + const options = + typeof historyConfig === "string" + ? { filePath: historyConfig } + : historyConfig; + + if (typeof cb === "function") { + options.onHistoryFileLoaded = cb; + } + + this.setupHistoryManager(options); + } + clearBufferedCommand() { + this[kBufferedCommandSymbol] = ""; + } + close() { + if ( + this.terminal && + this.historyManager.isFlushing && + !this._closingOnFlush + ) { + this._closingOnFlush = true; + this.once("flushHistory", () => super.close()); + + return; + } + process.nextTick(() => super.close()); + } + createContext() { + let context; + if (this.useGlobal) { + context = globalThis; + } else { + sendInspectorCommand( + (session) => { + session.post("Runtime.enable"); + session.once("Runtime.executionContextCreated", ({ params }) => { + this[kContextId] = params.context.id; + }); + context = vm.createContext(); + session.post("Runtime.disable"); + }, + () => { + context = vm.createContext(); + } + ); + ArrayPrototypeForEach(ObjectGetOwnPropertyNames(globalThis), (name) => { + // Only set properties that do not already exist as a global builtin. + if (!globalBuiltins.has(name)) { + ObjectDefineProperty(context, name, { + __proto__: null, + ...ObjectGetOwnPropertyDescriptor(globalThis, name), + }); + } + }); + context.global = context; + const _console = new Console(this.output); + ObjectDefineProperty(context, "console", { + __proto__: null, + configurable: true, + writable: true, + value: _console, + }); + } + + const replModule = new CJSModule(""); + replModule.paths = CJSModule._resolveLookupPaths("", parentModule); + + ObjectDefineProperty(context, "module", { + __proto__: null, + configurable: true, + writable: true, + value: replModule, + }); + ObjectDefineProperty(context, "require", { + __proto__: null, + configurable: true, + writable: true, + value: makeRequireFunction(replModule), + }); + + addBuiltinLibsToObject(context, ""); + + return context; + } + resetContext() { + this.context = this.createContext(); + this.underscoreAssigned = false; + this.underscoreErrAssigned = false; + // TODO(BridgeAR): Deprecate the lines. + this.lines = []; + this.lines.level = []; + + ObjectDefineProperty(this.context, "_", { + __proto__: null, + configurable: true, + get: () => this.last, + set: (value) => { + this.last = value; + if (!this.underscoreAssigned) { + this.underscoreAssigned = true; + this.output.write("Expression assignment to _ now disabled.\n"); + } + }, + }); + + ObjectDefineProperty(this.context, "_error", { + __proto__: null, + configurable: true, + get: () => this.lastError, + set: (value) => { + this.lastError = value; + if (!this.underscoreErrAssigned) { + this.underscoreErrAssigned = true; + this.output.write("Expression assignment to _error now disabled.\n"); + } + }, + }); + + // Allow REPL extensions to extend the new context + this.emit("reset", this.context); + } + displayPrompt(preserveCursor) { + let prompt = this._initialPrompt; + if (this[kBufferedCommandSymbol].length) { + prompt = kMultilinePrompt.description; + } + + // Do not overwrite `_initialPrompt` here + super.setPrompt(prompt); + this.prompt(preserveCursor); + } + // When invoked as an API method, overwrite _initialPrompt + setPrompt(prompt) { + this._initialPrompt = prompt; + super.setPrompt(prompt); + } + complete() { + ReflectApply(this.completer, this, arguments); + } + completeOnEditorMode(callback) { + return (err, results) => { + if (err) return callback(err); + + const { 0: completions, 1: completeOn = "" } = results; + let result = ArrayPrototypeFilter(completions, Boolean); + + if (completeOn && result.length !== 0) { + result = [commonPrefix(result)]; + } + + callback(null, [result, completeOn]); + }; + } + defineCommand(keyword, cmd) { + if (typeof cmd === "function") { + cmd = { action: cmd }; + } else { + validateFunction(cmd.action, "cmd.action"); + } + this.commands[keyword] = cmd; + } +} + +// Prompt is a string to print on each line for the prompt, +// source is a stream to use for I/O, defaulting to stdin/stdout. +function start(prompt, source, eval_, useGlobal, ignoreUndefined, replMode) { + return new REPLServer( + prompt, + source, + eval_, + useGlobal, + ignoreUndefined, + replMode + ); +} + +// TODO(BridgeAR): This should be replaced with acorn to build an AST. The +// language became more complex and using a simple approach like this is not +// sufficient anymore. +function _memory(cmd) { + const self = this; + self.lines ||= []; + self.lines.level ||= []; + + // Save the line so I can do magic later + if (cmd) { + const len = self.lines.level.length ? self.lines.level.length - 1 : 0; + ArrayPrototypePush(self.lines, StringPrototypeRepeat(" ", len) + cmd); + } else { + // I don't want to not change the format too much... + ArrayPrototypePush(self.lines, ""); + } + + if (!cmd) { + self.lines.level = []; + return; + } + + // I need to know "depth." + // Because I can not tell the difference between a } that + // closes an object literal and a } that closes a function + const countMatches = (regex, str) => { + let count = 0; + while (RegExpPrototypeExec(regex, str) !== null) count++; + return count; + }; + + // Going down is { and ( e.g. function() { + // going up is } and ) + const dw = countMatches(/[{(]/g, cmd); + const up = countMatches(/[})]/g, cmd); + let depth = dw.length - up.length; + + if (depth) { + (function workIt() { + if (depth > 0) { + // Going... down. + // Push the line#, depth count, and if the line is a function. + // Since JS only has functional scope I only need to remove + // "function() {" lines, clearly this will not work for + // "function() + // {" but nothing should break, only tab completion for local + // scope will not work for this function. + ArrayPrototypePush(self.lines.level, { + line: self.lines.length - 1, + depth: depth, + }); + } else if (depth < 0) { + // Going... up. + const curr = ArrayPrototypePop(self.lines.level); + if (curr) { + const tmp = curr.depth + depth; + if (tmp < 0) { + // More to go, recurse + depth += curr.depth; + workIt(); + } else if (tmp > 0) { + // Remove and push back + curr.depth += depth; + ArrayPrototypePush(self.lines.level, curr); + } + } + } + })(); + } +} + +function _turnOnEditorMode(repl) { + repl.editorMode = true; + FunctionPrototypeCall(Interface.prototype.setPrompt, repl, ""); +} + +function _turnOffEditorMode(repl) { + repl.editorMode = false; + repl.setPrompt(repl._initialPrompt); +} + +function defineDefaultCommands(repl) { + repl.defineCommand("break", { + help: "Sometimes you get stuck, this gets you out", + action: function () { + this.clearBufferedCommand(); + this.displayPrompt(); + }, + }); + + let clearMessage; + if (repl.useGlobal) { + clearMessage = "Alias for .break"; + } else { + clearMessage = "Break, and also clear the local context"; + } + repl.defineCommand("clear", { + help: clearMessage, + action: function () { + this.clearBufferedCommand(); + if (!this.useGlobal) { + this.output.write("Clearing context...\n"); + this.resetContext(); + } + this.displayPrompt(); + }, + }); + + repl.defineCommand("exit", { + help: "Exit the REPL", + action: function () { + this.close(); + }, + }); + + repl.defineCommand("help", { + help: "Print this help message", + action: function () { + const names = ArrayPrototypeSort(ObjectKeys(this.commands)); + const longestNameLength = MathMaxApply( + ArrayPrototypeMap(names, (name) => name.length) + ); + ArrayPrototypeForEach(names, (name) => { + const cmd = this.commands[name]; + const spaces = StringPrototypeRepeat( + " ", + longestNameLength - name.length + 3 + ); + const line = `.${name}${cmd.help ? spaces + cmd.help : ""}\n`; + this.output.write(line); + }); + this.output.write( + "\nPress Ctrl+C to abort current expression, " + + "Ctrl+D to exit the REPL\n" + ); + this.displayPrompt(); + }, + }); + + repl.defineCommand("save", { + help: "Save all evaluated commands in this REPL session to a file", + action: function (file) { + try { + if (file === "") { + throw new ERR_MISSING_ARGS("file"); + } + fs.writeFileSync(file, ArrayPrototypeJoin(this.lines, "\n")); + this.output.write(`Session saved to: ${file}\n`); + } catch (error) { + if (error instanceof ERR_MISSING_ARGS) { + this.output.write(`${error.message}\n`); + } else { + this.output.write(`Failed to save: ${file}\n`); + } + } + this.displayPrompt(); + }, + }); + + repl.defineCommand("load", { + help: "Load JS from a file into the REPL session", + action: function (file) { + try { + if (file === "") { + throw new ERR_MISSING_ARGS("file"); + } + const stats = fs.statSync(file); + if (stats && stats.isFile()) { + _turnOnEditorMode(this); + this[kLoadingSymbol] = true; + const data = fs.readFileSync(file, "utf8"); + this.write(data); + this[kLoadingSymbol] = false; + _turnOffEditorMode(this); + this.write("\n"); + } else { + this.output.write(`Failed to load: ${file} is not a valid file\n`); + } + } catch (error) { + if (error instanceof ERR_MISSING_ARGS) { + this.output.write(`${error.message}\n`); + } else { + this.output.write(`Failed to load: ${file}\n`); + } + } + this.displayPrompt(); + }, + }); + if (repl.terminal) { + repl.defineCommand("editor", { + help: "Enter editor mode", + action() { + _turnOnEditorMode(this); + this.output.write( + "// Entering editor mode (Ctrl+D to finish, Ctrl+C to cancel)\n" + ); + }, + }); + } +} + +module.exports = { + start, + writer, + REPLServer, + REPL_MODE_SLOPPY, + REPL_MODE_STRICT, + Recoverable, + isValidSyntax, +}; + +ObjectDefineProperty(module.exports, "builtinModules", { + __proto__: null, + get: pendingDeprecation + ? deprecate( + () => getReplBuiltinLibs(), + "repl.builtinModules is deprecated. Check module.builtinModules instead", + "DEP0191" + ) + : () => getReplBuiltinLibs(), + set: pendingDeprecation + ? deprecate( + (val) => setReplBuiltinLibs(val), + "repl.builtinModules is deprecated. Check module.builtinModules instead", + "DEP0191" + ) + : (val) => setReplBuiltinLibs(val), + enumerable: false, + configurable: true, +}); + +ObjectDefineProperty(module.exports, "_builtinLibs", { + __proto__: null, + get: pendingDeprecation + ? deprecate( + () => getReplBuiltinLibs(), + "repl._builtinLibs is deprecated. Check module.builtinModules instead", + "DEP0142" + ) + : () => getReplBuiltinLibs(), + set: pendingDeprecation + ? deprecate( + (val) => setReplBuiltinLibs(val), + "repl._builtinLibs is deprecated. Check module.builtinModules instead", + "DEP0142" + ) + : (val) => setReplBuiltinLibs(val), + enumerable: false, + configurable: true, +}); diff --git a/node/section-links.js b/node/section-links.js new file mode 100644 index 00000000..e3a25655 --- /dev/null +++ b/node/section-links.js @@ -0,0 +1,21 @@ +document.addEventListener('DOMContentLoaded', function(event) { + function f(n) { + if (n.nodeType == 1 && n.tagName.match(/^H[1-6]$/)) { + var span = document.createElement('span'); + span.className = 'section-link'; + span.textContent = '\xa0'; + var a = document.createElement('a'); + a.href = '#' + n.parentNode.id; + a.textContent = '\xb6'; + span.appendChild(a); + n.appendChild(span); + } else { + n = n.firstChild; + while (n) { + f(n); + n = n.nextSibling; + } + } + } + f(document.getElementById('sections')); + }, false); \ No newline at end of file diff --git a/node/sqlite.js b/node/sqlite.js new file mode 100644 index 00000000..7724f789 --- /dev/null +++ b/node/sqlite.js @@ -0,0 +1,6 @@ +"use strict"; +const { emitExperimentalWarning } = require("internal/util"); + +emitExperimentalWarning("SQLite"); + +module.exports = internalBinding("sqlite"); diff --git a/node/stream.js b/node/stream.js new file mode 100644 index 00000000..3d75a30e --- /dev/null +++ b/node/stream.js @@ -0,0 +1,130 @@ +"use strict"; + +const { ObjectDefineProperty, ObjectKeys, ReflectApply } = primordials; + +const { + promisify: { custom: customPromisify }, +} = require("internal/util"); + +const { + streamReturningOperators, + promiseReturningOperators, +} = require("internal/streams/operators"); + +const { + codes: { ERR_ILLEGAL_CONSTRUCTOR }, +} = require("internal/errors"); +const compose = require("internal/streams/compose"); +const { + setDefaultHighWaterMark, + getDefaultHighWaterMark, +} = require("internal/streams/state"); +const { pipeline } = require("internal/streams/pipeline"); +const { destroyer } = require("internal/streams/destroy"); +const eos = require("internal/streams/end-of-stream"); +const internalBuffer = require("internal/buffer"); + +const promises = require("stream/promises"); +const utils = require("internal/streams/utils"); +const { isArrayBufferView, isUint8Array } = require("internal/util/types"); + +const Stream = (module.exports = require("internal/streams/legacy").Stream); + +Stream.isDestroyed = utils.isDestroyed; +Stream.isDisturbed = utils.isDisturbed; +Stream.isErrored = utils.isErrored; +Stream.isReadable = utils.isReadable; +Stream.isWritable = utils.isWritable; + +Stream.Readable = require("internal/streams/readable"); +const streamKeys = ObjectKeys(streamReturningOperators); +for (let i = 0; i < streamKeys.length; i++) { + const key = streamKeys[i]; + const op = streamReturningOperators[key]; + function fn(...args) { + if (new.target) { + throw new ERR_ILLEGAL_CONSTRUCTOR(); + } + return Stream.Readable.from(ReflectApply(op, this, args)); + } + ObjectDefineProperty(fn, "name", { __proto__: null, value: op.name }); + ObjectDefineProperty(fn, "length", { __proto__: null, value: op.length }); + ObjectDefineProperty(Stream.Readable.prototype, key, { + __proto__: null, + value: fn, + enumerable: false, + configurable: true, + writable: true, + }); +} +const promiseKeys = ObjectKeys(promiseReturningOperators); +for (let i = 0; i < promiseKeys.length; i++) { + const key = promiseKeys[i]; + const op = promiseReturningOperators[key]; + function fn(...args) { + if (new.target) { + throw new ERR_ILLEGAL_CONSTRUCTOR(); + } + return ReflectApply(op, this, args); + } + ObjectDefineProperty(fn, "name", { __proto__: null, value: op.name }); + ObjectDefineProperty(fn, "length", { __proto__: null, value: op.length }); + ObjectDefineProperty(Stream.Readable.prototype, key, { + __proto__: null, + value: fn, + enumerable: false, + configurable: true, + writable: true, + }); +} +Stream.Writable = require("internal/streams/writable"); +Stream.Duplex = require("internal/streams/duplex"); +Stream.Transform = require("internal/streams/transform"); +Stream.PassThrough = require("internal/streams/passthrough"); +Stream.duplexPair = require("internal/streams/duplexpair"); +Stream.pipeline = pipeline; +const { addAbortSignal } = require("internal/streams/add-abort-signal"); +Stream.addAbortSignal = addAbortSignal; +Stream.finished = eos; +Stream.destroy = destroyer; +Stream.compose = compose; +Stream.setDefaultHighWaterMark = setDefaultHighWaterMark; +Stream.getDefaultHighWaterMark = getDefaultHighWaterMark; + +ObjectDefineProperty(Stream, "promises", { + __proto__: null, + configurable: true, + enumerable: true, + get() { + return promises; + }, +}); + +ObjectDefineProperty(pipeline, customPromisify, { + __proto__: null, + enumerable: true, + get() { + return promises.pipeline; + }, +}); + +ObjectDefineProperty(eos, customPromisify, { + __proto__: null, + enumerable: true, + get() { + return promises.finished; + }, +}); + +// Backwards-compat with node 0.4.x +Stream.Stream = Stream; + +Stream._isArrayBufferView = isArrayBufferView; +Stream._isUint8Array = isUint8Array; +Stream._uint8ArrayToBuffer = function _uint8ArrayToBuffer(chunk) { + return new internalBuffer.FastBuffer( + chunk.buffer, + chunk.byteOffset, + chunk.byteLength + ); +}; diff --git a/node/string_decoder.js b/node/string_decoder.js new file mode 100644 index 00000000..b774af1b --- /dev/null +++ b/node/string_decoder.js @@ -0,0 +1,125 @@ +'use strict'; + +const { + ArrayBufferIsView, + ObjectDefineProperties, + Symbol, + TypedArrayPrototypeSubarray, +} = primordials; + +const { Buffer } = require('buffer'); +const { + kIncompleteCharactersStart, + kIncompleteCharactersEnd, + kMissingBytes, + kBufferedBytes, + kEncodingField, + kSize, + decode, + flush, +} = internalBinding('string_decoder'); +const { + encodingsMap, + normalizeEncoding, +} = require('internal/util'); +const { + ERR_INVALID_ARG_TYPE, + ERR_INVALID_THIS, + ERR_UNKNOWN_ENCODING, +} = require('internal/errors').codes; + +const kNativeDecoder = Symbol('kNativeDecoder'); + +/** + * StringDecoder provides an interface for efficiently splitting a series of + * buffers into a series of JS strings without breaking apart multibyte + * characters. + * @param {string} [encoding] + */ +function StringDecoder(encoding) { + this.encoding = normalizeEncoding(encoding); + if (this.encoding === undefined) { + throw new ERR_UNKNOWN_ENCODING(encoding); + } + this[kNativeDecoder] = Buffer.alloc(kSize); + this[kNativeDecoder][kEncodingField] = encodingsMap[this.encoding]; +} + +/** + * Returns a decoded string, omitting any incomplete multi-bytes + * characters at the end of the Buffer, or TypedArray, or DataView + * @param {string | Buffer | TypedArray | DataView} buf + * @returns {string} + * @throws {TypeError} Throws when buf is not in one of supported types + */ +StringDecoder.prototype.write = function write(buf) { + if (typeof buf === 'string') + return buf; + if (!ArrayBufferIsView(buf)) + throw new ERR_INVALID_ARG_TYPE('buf', + ['Buffer', 'TypedArray', 'DataView'], + buf); + if (!this[kNativeDecoder]) { + throw new ERR_INVALID_THIS('StringDecoder'); + } + return decode(this[kNativeDecoder], buf); +}; + +/** + * Returns any remaining input stored in the internal buffer as a string. + * After end() is called, the stringDecoder object can be reused for new + * input. + * @param {string | Buffer | TypedArray | DataView} [buf] + * @returns {string} + */ +StringDecoder.prototype.end = function end(buf) { + const ret = buf === undefined ? '' : this.write(buf); + if (this[kNativeDecoder][kBufferedBytes] > 0) + return ret + flush(this[kNativeDecoder]); + return ret; +}; + +/* Everything below this line is undocumented legacy stuff. */ +/** + * + * @param {string | Buffer | TypedArray | DataView} buf + * @param {number} offset + * @returns {string} + */ +StringDecoder.prototype.text = function text(buf, offset) { + this[kNativeDecoder][kMissingBytes] = 0; + this[kNativeDecoder][kBufferedBytes] = 0; + return this.write(buf.slice(offset)); +}; + +ObjectDefineProperties(StringDecoder.prototype, { + lastChar: { + __proto__: null, + configurable: true, + enumerable: true, + get() { + return TypedArrayPrototypeSubarray(this[kNativeDecoder], + kIncompleteCharactersStart, + kIncompleteCharactersEnd); + }, + }, + lastNeed: { + __proto__: null, + configurable: true, + enumerable: true, + get() { + return this[kNativeDecoder][kMissingBytes]; + }, + }, + lastTotal: { + __proto__: null, + configurable: true, + enumerable: true, + get() { + return this[kNativeDecoder][kBufferedBytes] + + this[kNativeDecoder][kMissingBytes]; + }, + }, +}); + +exports.StringDecoder = StringDecoder; \ No newline at end of file diff --git a/node/task_processor.js b/node/task_processor.js new file mode 100644 index 00000000..ebb455ee --- /dev/null +++ b/node/task_processor.js @@ -0,0 +1,91 @@ +const { parentPort } = require('node:worker_threads'); +parentPort.on('message', (task) => { + parentPort.postMessage(task.a + task.b); +}); +const { AsyncResource } = require('node:async_hooks'); +const { EventEmitter } = require('node:events'); +const path = require('node:path'); +const { Worker } = require('node:worker_threads'); + +const kTaskInfo = Symbol('kTaskInfo'); +const kWorkerFreedEvent = Symbol('kWorkerFreedEvent'); + +class WorkerPoolTaskInfo extends AsyncResource { + constructor(callback) { + super('WorkerPoolTaskInfo'); + this.callback = callback; + } + + done(err, result) { + this.runInAsyncScope(this.callback, null, err, result); + this.emitDestroy(); // `TaskInfo`s are used only once. + } +} + +class WorkerPool extends EventEmitter { + constructor(numThreads) { + super(); + this.numThreads = numThreads; + this.workers = []; + this.freeWorkers = []; + this.tasks = []; + + for (let i = 0; i < numThreads; i++) + this.addNewWorker(); + + // Any time the kWorkerFreedEvent is emitted, dispatch + // the next task pending in the queue, if any. + this.on(kWorkerFreedEvent, () => { + if (this.tasks.length > 0) { + const { task, callback } = this.tasks.shift(); + this.runTask(task, callback); + } + }); + } + + addNewWorker() { + const worker = new Worker(path.resolve(__dirname, 'task_processor.js')); + worker.on('message', (result) => { + // In case of success: Call the callback that was passed to `runTask`, + // remove the `TaskInfo` associated with the Worker, and mark it as free + // again. + worker[kTaskInfo].done(null, result); + worker[kTaskInfo] = null; + this.freeWorkers.push(worker); + this.emit(kWorkerFreedEvent); + }); + worker.on('error', (err) => { + // In case of an uncaught exception: Call the callback that was passed to + // `runTask` with the error. + if (worker[kTaskInfo]) + worker[kTaskInfo].done(err, null); + else + this.emit('error', err); + // Remove the worker from the list and start a new Worker to replace the + // current one. + this.workers.splice(this.workers.indexOf(worker), 1); + this.addNewWorker(); + }); + this.workers.push(worker); + this.freeWorkers.push(worker); + this.emit(kWorkerFreedEvent); + } + + runTask(task, callback) { + if (this.freeWorkers.length === 0) { + // No free threads, wait until a worker thread becomes free. + this.tasks.push({ task, callback }); + return; + } + + const worker = this.freeWorkers.pop(); + worker[kTaskInfo] = new WorkerPoolTaskInfo(callback); + worker.postMessage(task); + } + + close() { + for (const worker of this.workers) worker.terminate(); + } +} + +module.exports = WorkerPool; \ No newline at end of file diff --git a/node/test.js b/node/test.js new file mode 100644 index 00000000..23df1276 --- /dev/null +++ b/node/test.js @@ -0,0 +1,83 @@ +"use strict"; + +const { ObjectAssign, ObjectDefineProperty } = primordials; + +const { + test, + suite, + before, + after, + beforeEach, + afterEach, +} = require("internal/test_runner/harness"); +const { run } = require("internal/test_runner/runner"); + +module.exports = test; +ObjectAssign(module.exports, { + after, + afterEach, + before, + beforeEach, + describe: suite, + it: test, + run, + suite, + test, +}); + +let lazyMock; + +ObjectDefineProperty(module.exports, "mock", { + __proto__: null, + configurable: true, + enumerable: true, + get() { + if (lazyMock === undefined) { + const { MockTracker } = require("internal/test_runner/mock/mock"); + + lazyMock = new MockTracker(); + } + + return lazyMock; + }, +}); + +let lazySnapshot; + +ObjectDefineProperty(module.exports, "snapshot", { + __proto__: null, + configurable: true, + enumerable: true, + get() { + if (lazySnapshot === undefined) { + const { + setDefaultSnapshotSerializers, + setResolveSnapshotPath, + } = require("internal/test_runner/snapshot"); + + lazySnapshot = { + __proto__: null, + setDefaultSnapshotSerializers, + setResolveSnapshotPath, + }; + } + + return lazySnapshot; + }, +}); + +let lazyAssert; + +ObjectDefineProperty(module.exports, "assert", { + __proto__: null, + configurable: true, + enumerable: true, + get() { + if (lazyAssert === undefined) { + const { register } = require("internal/test_runner/assert"); + lazyAssert = { __proto__: null, register }; + } + + return lazyAssert; + }, +}); diff --git a/node/tls.js b/node/tls.js new file mode 100644 index 00000000..6cd6b269 --- /dev/null +++ b/node/tls.js @@ -0,0 +1,405 @@ +'use strict'; + +const { + Array, + ArrayIsArray, + // eslint-disable-next-line no-restricted-syntax + ArrayPrototypePush, + JSONParse, + ObjectDefineProperty, + ObjectFreeze, + StringFromCharCode, +} = primordials; + +const { + ERR_TLS_CERT_ALTNAME_FORMAT, + ERR_TLS_CERT_ALTNAME_INVALID, + ERR_OUT_OF_RANGE, + ERR_INVALID_ARG_VALUE, + ERR_INVALID_ARG_TYPE, +} = require('internal/errors').codes; +const internalUtil = require('internal/util'); +internalUtil.assertCrypto(); +const { + isArrayBufferView, + isUint8Array, +} = require('internal/util/types'); + +const net = require('net'); +const { getOptionValue } = require('internal/options'); +const { + getBundledRootCertificates, + getExtraCACertificates, + getSystemCACertificates, + resetRootCertStore, + getUserRootCertificates, + getSSLCiphers, +} = internalBinding('crypto'); +const { Buffer } = require('buffer'); +const { canonicalizeIP } = internalBinding('cares_wrap'); +const _tls_common = require('_tls_common'); +const _tls_wrap = require('_tls_wrap'); +const { validateString } = require('internal/validators'); + +// Allow {CLIENT_RENEG_LIMIT} client-initiated session renegotiations +// every {CLIENT_RENEG_WINDOW} seconds. An error event is emitted if more +// renegotiations are seen. The settings are applied to all remote client +// connections. +exports.CLIENT_RENEG_LIMIT = 3; +exports.CLIENT_RENEG_WINDOW = 600; + +exports.DEFAULT_CIPHERS = getOptionValue('--tls-cipher-list'); + +exports.DEFAULT_ECDH_CURVE = 'auto'; + +if (getOptionValue('--tls-min-v1.0')) + exports.DEFAULT_MIN_VERSION = 'TLSv1'; +else if (getOptionValue('--tls-min-v1.1')) + exports.DEFAULT_MIN_VERSION = 'TLSv1.1'; +else if (getOptionValue('--tls-min-v1.2')) + exports.DEFAULT_MIN_VERSION = 'TLSv1.2'; +else if (getOptionValue('--tls-min-v1.3')) + exports.DEFAULT_MIN_VERSION = 'TLSv1.3'; +else + exports.DEFAULT_MIN_VERSION = 'TLSv1.2'; + +if (getOptionValue('--tls-max-v1.3')) + exports.DEFAULT_MAX_VERSION = 'TLSv1.3'; +else if (getOptionValue('--tls-max-v1.2')) + exports.DEFAULT_MAX_VERSION = 'TLSv1.2'; +else + exports.DEFAULT_MAX_VERSION = 'TLSv1.3'; // Will depend on node version. + + +exports.getCiphers = internalUtil.cachedResult( + () => internalUtil.filterDuplicateStrings(getSSLCiphers(), true), +); + +let bundledRootCertificates; +function cacheBundledRootCertificates() { + bundledRootCertificates ||= ObjectFreeze(getBundledRootCertificates()); + + return bundledRootCertificates; +} + +ObjectDefineProperty(exports, 'rootCertificates', { + __proto__: null, + configurable: false, + enumerable: true, + get: cacheBundledRootCertificates, +}); + +let extraCACertificates; +function cacheExtraCACertificates() { + extraCACertificates ||= ObjectFreeze(getExtraCACertificates()); + + return extraCACertificates; +} + +let systemCACertificates; +function cacheSystemCACertificates() { + systemCACertificates ||= ObjectFreeze(getSystemCACertificates()); + + return systemCACertificates; +} + +let defaultCACertificates; +let hasResetDefaultCACertificates = false; + +function cacheDefaultCACertificates() { + if (defaultCACertificates) { return defaultCACertificates; } + + if (hasResetDefaultCACertificates) { + defaultCACertificates = getUserRootCertificates(); + ObjectFreeze(defaultCACertificates); + return defaultCACertificates; + } + + defaultCACertificates = []; + + if (!getOptionValue('--use-openssl-ca')) { + const bundled = cacheBundledRootCertificates(); + for (let i = 0; i < bundled.length; ++i) { + ArrayPrototypePush(defaultCACertificates, bundled[i]); + } + if (getOptionValue('--use-system-ca')) { + const system = cacheSystemCACertificates(); + for (let i = 0; i < system.length; ++i) { + + ArrayPrototypePush(defaultCACertificates, system[i]); + } + } + } + + if (process.env.NODE_EXTRA_CA_CERTS) { + const extra = cacheExtraCACertificates(); + for (let i = 0; i < extra.length; ++i) { + + ArrayPrototypePush(defaultCACertificates, extra[i]); + } + } + + ObjectFreeze(defaultCACertificates); + return defaultCACertificates; +} + +// TODO(joyeecheung): support X509Certificate output? +function getCACertificates(type = 'default') { + validateString(type, 'type'); + + switch (type) { + case 'default': + return cacheDefaultCACertificates(); + case 'bundled': + return cacheBundledRootCertificates(); + case 'system': + return cacheSystemCACertificates(); + case 'extra': + return cacheExtraCACertificates(); + default: + throw new ERR_INVALID_ARG_VALUE('type', type); + } +} +exports.getCACertificates = getCACertificates; + +function setDefaultCACertificates(certs) { + if (!ArrayIsArray(certs)) { + throw new ERR_INVALID_ARG_TYPE('certs', 'Array', certs); + } + + // Verify that all elements in the array are strings + for (let i = 0; i < certs.length; i++) { + if (typeof certs[i] !== 'string' && !isArrayBufferView(certs[i])) { + throw new ERR_INVALID_ARG_TYPE( + `certs[${i}]`, ['string', 'ArrayBufferView'], certs[i]); + } + } + + resetRootCertStore(certs); + defaultCACertificates = undefined; // Reset the cached default certificates + hasResetDefaultCACertificates = true; +} + +exports.setDefaultCACertificates = setDefaultCACertificates; + +// Convert protocols array into valid OpenSSL protocols list +// ("\x06spdy/2\x08http/1.1\x08http/1.0") +function convertProtocols(protocols) { + const lens = new Array(protocols.length); + const buff = Buffer.allocUnsafe(protocols.reduce((p, c, i) => { + const len = Buffer.byteLength(c); + if (len > 255) { + throw new ERR_OUT_OF_RANGE('The byte length of the protocol at index ' + + `${i} exceeds the maximum length.`, '<= 255', len, true); + } + lens[i] = len; + return p + 1 + len; + }, 0)); + + let offset = 0; + for (let i = 0, c = protocols.length; i < c; i++) { + buff[offset++] = lens[i]; + buff.write(protocols[i], offset); + offset += lens[i]; + } + + return buff; +} + +exports.convertALPNProtocols = function convertALPNProtocols(protocols, out) { + // If protocols is Array - translate it into buffer + if (ArrayIsArray(protocols)) { + out.ALPNProtocols = convertProtocols(protocols); + } else if (isUint8Array(protocols)) { + // Copy new buffer not to be modified by user. + out.ALPNProtocols = Buffer.from(protocols); + } else if (isArrayBufferView(protocols)) { + out.ALPNProtocols = Buffer.from(protocols.buffer.slice( + protocols.byteOffset, + protocols.byteOffset + protocols.byteLength, + )); + } +}; + +function unfqdn(host) { + return host.replace(/[.]$/, ''); +} + +// String#toLowerCase() is locale-sensitive so we use +// a conservative version that only lowercases A-Z. +function toLowerCase(c) { + return StringFromCharCode(32 + c.charCodeAt(0)); +} + +function splitHost(host) { + return unfqdn(host).replace(/[A-Z]/g, toLowerCase).split('.'); +} + +function check(hostParts, pattern, wildcards) { + // Empty strings, null, undefined, etc. never match. + if (!pattern) + return false; + + const patternParts = splitHost(pattern); + + if (hostParts.length !== patternParts.length) + return false; + + // Pattern has empty components, e.g. "bad..example.com". + if (patternParts.includes('')) + return false; + + // RFC 6125 allows IDNA U-labels (Unicode) in names but we have no + // good way to detect their encoding or normalize them so we simply + // reject them. Control characters and blanks are rejected as well + // because nothing good can come from accepting them. + const isBad = (s) => /[^\u0021-\u007F]/u.test(s); + if (patternParts.some(isBad)) + return false; + + // Check host parts from right to left first. + for (let i = hostParts.length - 1; i > 0; i -= 1) { + if (hostParts[i] !== patternParts[i]) + return false; + } + + const hostSubdomain = hostParts[0]; + const patternSubdomain = patternParts[0]; + const patternSubdomainParts = patternSubdomain.split('*', 3); + + // Short-circuit when the subdomain does not contain a wildcard. + // RFC 6125 does not allow wildcard substitution for components + // containing IDNA A-labels (Punycode) so match those verbatim. + if (patternSubdomainParts.length === 1 || + patternSubdomain.includes('xn--')) + return hostSubdomain === patternSubdomain; + + if (!wildcards) + return false; + + // More than one wildcard is always wrong. + if (patternSubdomainParts.length > 2) + return false; + + // *.tld wildcards are not allowed. + if (patternParts.length <= 2) + return false; + + const { 0: prefix, 1: suffix } = patternSubdomainParts; + + if (prefix.length + suffix.length > hostSubdomain.length) + return false; + + if (!hostSubdomain.startsWith(prefix)) + return false; + + if (!hostSubdomain.endsWith(suffix)) + return false; + + return true; +} + +// This pattern is used to determine the length of escaped sequences within +// the subject alt names string. It allows any valid JSON string literal. +// This MUST match the JSON specification (ECMA-404 / RFC8259) exactly. +const jsonStringPattern = + // eslint-disable-next-line no-control-regex + /^"(?:[^"\\\u0000-\u001f]|\\(?:["\\/bfnrt]|u[0-9a-fA-F]{4}))*"/; + +function splitEscapedAltNames(altNames) { + const result = []; + let currentToken = ''; + let offset = 0; + while (offset !== altNames.length) { + const nextSep = altNames.indexOf(',', offset); + const nextQuote = altNames.indexOf('"', offset); + if (nextQuote !== -1 && (nextSep === -1 || nextQuote < nextSep)) { + // There is a quote character and there is no separator before the quote. + currentToken += altNames.substring(offset, nextQuote); + const match = jsonStringPattern.exec(altNames.substring(nextQuote)); + if (!match) { + throw new ERR_TLS_CERT_ALTNAME_FORMAT(); + } + currentToken += JSONParse(match[0]); + offset = nextQuote + match[0].length; + } else if (nextSep !== -1) { + // There is a separator and no quote before it. + currentToken += altNames.substring(offset, nextSep); + result.push(currentToken); + currentToken = ''; + offset = nextSep + 2; + } else { + currentToken += altNames.substring(offset); + offset = altNames.length; + } + } + result.push(currentToken); + return result; +} + +exports.checkServerIdentity = function checkServerIdentity(hostname, cert) { + const subject = cert.subject; + const altNames = cert.subjectaltname; + const dnsNames = []; + const ips = []; + + hostname = '' + hostname; + + if (altNames) { + const splitAltNames = altNames.includes('"') ? + splitEscapedAltNames(altNames) : + altNames.split(', '); + splitAltNames.forEach((name) => { + if (name.startsWith('DNS:')) { + dnsNames.push(name.slice(4)); + } else if (name.startsWith('IP Address:')) { + ips.push(canonicalizeIP(name.slice(11))); + } + }); + } + + let valid = false; + let reason = 'Unknown reason'; + + hostname = unfqdn(hostname); // Remove trailing dot for error messages. + + if (net.isIP(hostname)) { + valid = ips.includes(canonicalizeIP(hostname)); + if (!valid) + reason = `IP: ${hostname} is not in the cert's list: ` + ips.join(', '); + } else if (dnsNames.length > 0 || subject?.CN) { + const hostParts = splitHost(hostname); + const wildcard = (pattern) => check(hostParts, pattern, true); + + if (dnsNames.length > 0) { + valid = dnsNames.some(wildcard); + if (!valid) + reason = + `Host: ${hostname}. is not in the cert's altnames: ${altNames}`; + } else { + // Match against Common Name only if no supported identifiers exist. + const cn = subject.CN; + + if (ArrayIsArray(cn)) + valid = cn.some(wildcard); + else if (cn) + valid = wildcard(cn); + + if (!valid) + reason = `Host: ${hostname}. is not cert's CN: ${cn}`; + } + } else { + reason = 'Cert does not contain a DNS name'; + } + + if (!valid) { + return new ERR_TLS_CERT_ALTNAME_INVALID(reason, hostname, cert); + } +}; + +exports.createSecureContext = _tls_common.createSecureContext; +exports.SecureContext = _tls_common.SecureContext; +exports.TLSSocket = _tls_wrap.TLSSocket; +exports.Server = _tls_wrap.Server; +exports.createServer = _tls_wrap.createServer; +exports.connect = _tls_wrap.connect; \ No newline at end of file diff --git a/node/trace_events.js b/node/trace_events.js new file mode 100644 index 00000000..69fccafc --- /dev/null +++ b/node/trace_events.js @@ -0,0 +1,75 @@ +'use strict'; + +const { + ObjectAssign, + ObjectDefineProperty, +} = primordials; + +const { test, suite, before, after, beforeEach, afterEach } = require('internal/test_runner/harness'); +const { run } = require('internal/test_runner/runner'); + +module.exports = test; +ObjectAssign(module.exports, { + after, + afterEach, + before, + beforeEach, + describe: suite, + it: test, + run, + suite, + test, +}); + +let lazyMock; + +ObjectDefineProperty(module.exports, 'mock', { + __proto__: null, + configurable: true, + enumerable: true, + get() { + if (lazyMock === undefined) { + const { MockTracker } = require('internal/test_runner/mock/mock'); + + lazyMock = new MockTracker(); + } + + return lazyMock; + }, +}); + +let lazySnapshot; + +ObjectDefineProperty(module.exports, 'snapshot', { + __proto__: null, + configurable: true, + enumerable: true, + get() { + if (lazySnapshot === undefined) { + const { + setDefaultSnapshotSerializers, + setResolveSnapshotPath, + } = require('internal/test_runner/snapshot'); + + lazySnapshot = { + __proto__: null, + setDefaultSnapshotSerializers, + setResolveSnapshotPath, + }; + } + + return lazySnapshot; + }, +}); + +ObjectDefineProperty(module.exports, 'assert', { + __proto__: null, + configurable: true, + enumerable: true, + get() { + const { register } = require('internal/test_runner/assert'); + const assert = { __proto__: null, register }; + ObjectDefineProperty(module.exports, 'assert', assert); + return assert; + }, +}); \ No newline at end of file diff --git a/node/tty.js b/node/tty.js new file mode 100644 index 00000000..f9275fab --- /dev/null +++ b/node/tty.js @@ -0,0 +1,147 @@ +'use strict'; + +const { + NumberIsInteger, + ObjectSetPrototypeOf, +} = primordials; + +const net = require('net'); +const { TTY, isTTY } = internalBinding('tty_wrap'); +const { + ErrnoException, + codes: { + ERR_INVALID_FD, + ERR_TTY_INIT_FAILED, + }, +} = require('internal/errors'); +const { + getColorDepth, + hasColors, +} = require('internal/tty'); + +// Lazy loaded for startup performance. +let readline; + +function isatty(fd) { + return NumberIsInteger(fd) && fd >= 0 && fd <= 2147483647 && + isTTY(fd); +} + +function ReadStream(fd, options) { + if (!(this instanceof ReadStream)) + return new ReadStream(fd, options); + if (fd >> 0 !== fd || fd < 0) + throw new ERR_INVALID_FD(fd); + + const ctx = {}; + const tty = new TTY(fd, ctx); + if (ctx.code !== undefined) { + throw new ERR_TTY_INIT_FAILED(ctx); + } + + net.Socket.call(this, { + readableHighWaterMark: 0, + handle: tty, + manualStart: true, + ...options, + }); + + this.isRaw = false; + this.isTTY = true; +} + +ObjectSetPrototypeOf(ReadStream.prototype, net.Socket.prototype); +ObjectSetPrototypeOf(ReadStream, net.Socket); + +ReadStream.prototype.setRawMode = function(flag) { + flag = !!flag; + const err = this._handle?.setRawMode(flag); + if (err) { + this.emit('error', new ErrnoException(err, 'setRawMode')); + return this; + } + this.isRaw = flag; + return this; +}; + +function WriteStream(fd) { + if (!(this instanceof WriteStream)) + return new WriteStream(fd); + if (fd >> 0 !== fd || fd < 0) + throw new ERR_INVALID_FD(fd); + + const ctx = {}; + const tty = new TTY(fd, ctx); + if (ctx.code !== undefined) { + throw new ERR_TTY_INIT_FAILED(ctx); + } + + net.Socket.call(this, { + readableHighWaterMark: 0, + handle: tty, + manualStart: true, + }); + + // Prevents interleaved or dropped stdout/stderr output for terminals. + // As noted in the following reference, local TTYs tend to be quite fast and + // this behavior has become expected due historical functionality on OS X, + // even though it was originally intended to change in v1.0.2 (Libuv 1.2.1). + // Ref: https://github.com/nodejs/node/pull/1771#issuecomment-119351671 + this._handle.setBlocking(true); + + const winSize = [0, 0]; + const err = this._handle.getWindowSize(winSize); + if (!err) { + this.columns = winSize[0]; + this.rows = winSize[1]; + } +} + +ObjectSetPrototypeOf(WriteStream.prototype, net.Socket.prototype); +ObjectSetPrototypeOf(WriteStream, net.Socket); + +WriteStream.prototype.isTTY = true; + +WriteStream.prototype.getColorDepth = getColorDepth; + +WriteStream.prototype.hasColors = hasColors; + +WriteStream.prototype._refreshSize = function() { + const oldCols = this.columns; + const oldRows = this.rows; + const winSize = [0, 0]; + const err = this._handle.getWindowSize(winSize); + if (err) { + this.emit('error', new ErrnoException(err, 'getWindowSize')); + return; + } + const { 0: newCols, 1: newRows } = winSize; + if (oldCols !== newCols || oldRows !== newRows) { + this.columns = newCols; + this.rows = newRows; + this.emit('resize'); + } +}; + +// Backwards-compat +WriteStream.prototype.cursorTo = function(x, y, callback) { + if (readline === undefined) readline = require('readline'); + return readline.cursorTo(this, x, y, callback); +}; +WriteStream.prototype.moveCursor = function(dx, dy, callback) { + if (readline === undefined) readline = require('readline'); + return readline.moveCursor(this, dx, dy, callback); +}; +WriteStream.prototype.clearLine = function(dir, callback) { + if (readline === undefined) readline = require('readline'); + return readline.clearLine(this, dir, callback); +}; +WriteStream.prototype.clearScreenDown = function(callback) { + if (readline === undefined) readline = require('readline'); + return readline.clearScreenDown(this, callback); +}; +WriteStream.prototype.getWindowSize = function() { + return [this.columns, this.rows]; +}; + +module.exports = { isatty, ReadStream, WriteStream }; \ No newline at end of file diff --git a/node/url.js b/node/url.js new file mode 100644 index 00000000..55ad1f38 --- /dev/null +++ b/node/url.js @@ -0,0 +1,1024 @@ +'use strict'; + +const { + ArrayPrototypeJoin, + Boolean, + Int8Array, + ObjectAssign, + ObjectKeys, + StringPrototypeAt, + StringPrototypeCharCodeAt, + StringPrototypeIndexOf, + StringPrototypeReplaceAll, + StringPrototypeSlice, + decodeURIComponent, +} = primordials; + +const { URLPattern } = internalBinding('url_pattern'); +const { toASCII } = internalBinding('encoding_binding'); +const { encodeStr, hexTable } = require('internal/querystring'); +const querystring = require('querystring'); + +const { + ERR_INVALID_ARG_TYPE, + ERR_INVALID_URL, + ERR_INVALID_ARG_VALUE, +} = require('internal/errors').codes; +const { + validateString, + validateObject, +} = require('internal/validators'); + +// This ensures setURLConstructor() is called before the native +// URL::ToObject() method is used. +const { spliceOne } = require('internal/util'); +const { isInsideNodeModules } = internalBinding('util'); + +// WHATWG URL implementation provided by internal/url +const { + URL, + URLSearchParams, + domainToASCII, + domainToUnicode, + fileURLToPath, + fileURLToPathBuffer, + pathToFileURL: _pathToFileURL, + urlToHttpOptions, + unsafeProtocol, + hostlessProtocol, + slashedProtocol, +} = require('internal/url'); + +const bindingUrl = internalBinding('url'); + +// Original url.parse() API + +function Url() { + this.protocol = null; + this.slashes = null; + this.auth = null; + this.host = null; + this.port = null; + this.hostname = null; + this.hash = null; + this.search = null; + this.query = null; + this.pathname = null; + this.path = null; + this.href = null; +} + +// Reference: RFC 3986, RFC 1808, RFC 2396 + +// define these here so at least they only have to be +// compiled once on the first module load. +const protocolPattern = /^[a-z0-9.+-]+:/i; +const portPattern = /:[0-9]*$/; +const hostPattern = /^\/\/[^@/]+@[^@/]+/; + +// Special case for a simple path URL +const simplePathPattern = /^(\/\/?(?!\/)[^?\s]*)(\?[^\s]*)?$/; + +const hostnameMaxLen = 255; +const { + CHAR_SPACE, + CHAR_TAB, + CHAR_CARRIAGE_RETURN, + CHAR_LINE_FEED, + CHAR_NO_BREAK_SPACE, + CHAR_ZERO_WIDTH_NOBREAK_SPACE, + CHAR_HASH, + CHAR_FORWARD_SLASH, + CHAR_LEFT_SQUARE_BRACKET, + CHAR_RIGHT_SQUARE_BRACKET, + CHAR_LEFT_ANGLE_BRACKET, + CHAR_RIGHT_ANGLE_BRACKET, + CHAR_LEFT_CURLY_BRACKET, + CHAR_RIGHT_CURLY_BRACKET, + CHAR_QUESTION_MARK, + CHAR_DOUBLE_QUOTE, + CHAR_SINGLE_QUOTE, + CHAR_PERCENT, + CHAR_SEMICOLON, + CHAR_BACKWARD_SLASH, + CHAR_CIRCUMFLEX_ACCENT, + CHAR_GRAVE_ACCENT, + CHAR_VERTICAL_LINE, + CHAR_AT, + CHAR_COLON, +} = require('internal/constants'); + +let urlParseWarned = false; + +function urlParse(url, parseQueryString, slashesDenoteHost) { + if (!urlParseWarned && !isInsideNodeModules(100, true)) { + urlParseWarned = true; + process.emitWarning( + '`url.parse()` behavior is not standardized and prone to ' + + 'errors that have security implications. Use the WHATWG URL API ' + + 'instead. CVEs are not issued for `url.parse()` vulnerabilities.', + 'DeprecationWarning', + 'DEP0169', + ); + } + + if (url instanceof Url) return url; + + const urlObject = new Url(); + urlObject.parse(url, parseQueryString, slashesDenoteHost); + return urlObject; +} + +function isIpv6Hostname(hostname) { + return ( + StringPrototypeCharCodeAt(hostname, 0) === CHAR_LEFT_SQUARE_BRACKET && + StringPrototypeCharCodeAt(hostname, hostname.length - 1) === + CHAR_RIGHT_SQUARE_BRACKET + ); +} + +// This prevents some common spoofing bugs due to our use of IDNA toASCII. For +// compatibility, the set of characters we use here is the *intersection* of +// "forbidden host code point" in the WHATWG URL Standard [1] and the +// characters in the host parsing loop in Url.prototype.parse, with the +// following additions: +// +// - ':' since this could cause a "protocol spoofing" bug +// - '@' since this could cause parts of the hostname to be confused with auth +// - '[' and ']' since this could cause a non-IPv6 hostname to be interpreted +// as IPv6 by isIpv6Hostname above +// +// [1]: https://url.spec.whatwg.org/#forbidden-host-code-point +const forbiddenHostChars = /[\0\t\n\r #%/:<>?@[\\\]^|]/; +// For IPv6, permit '[', ']', and ':'. +const forbiddenHostCharsIpv6 = /[\0\t\n\r #%/<>?@\\^|]/; + +Url.prototype.parse = function parse(url, parseQueryString, slashesDenoteHost) { + validateString(url, 'url'); + + // Copy chrome, IE, opera backslash-handling behavior. + // Back slashes before the query string get converted to forward slashes + // See: https://code.google.com/p/chromium/issues/detail?id=25916 + let hasHash = false; + let hasAt = false; + let start = -1; + let end = -1; + let rest = ''; + let lastPos = 0; + for (let i = 0, inWs = false, split = false; i < url.length; ++i) { + const code = url.charCodeAt(i); + + // Find first and last non-whitespace characters for trimming + const isWs = code < 33 || + code === CHAR_NO_BREAK_SPACE || + code === CHAR_ZERO_WIDTH_NOBREAK_SPACE; + if (start === -1) { + if (isWs) + continue; + lastPos = start = i; + } else if (inWs) { + if (!isWs) { + end = -1; + inWs = false; + } + } else if (isWs) { + end = i; + inWs = true; + } + + // Only convert backslashes while we haven't seen a split character + if (!split) { + switch (code) { + case CHAR_AT: + hasAt = true; + break; + case CHAR_HASH: + hasHash = true; + // Fall through + case CHAR_QUESTION_MARK: + split = true; + break; + case CHAR_BACKWARD_SLASH: + if (i - lastPos > 0) + rest += url.slice(lastPos, i); + rest += '/'; + lastPos = i + 1; + break; + } + } else if (!hasHash && code === CHAR_HASH) { + hasHash = true; + } + } + + // Check if string was non-empty (including strings with only whitespace) + if (start !== -1) { + if (lastPos === start) { + // We didn't convert any backslashes + + if (end === -1) { + if (start === 0) + rest = url; + else + rest = url.slice(start); + } else { + rest = url.slice(start, end); + } + } else if (end === -1 && lastPos < url.length) { + // We converted some backslashes and have only part of the entire string + rest += url.slice(lastPos); + } else if (end !== -1 && lastPos < end) { + // We converted some backslashes and have only part of the entire string + rest += url.slice(lastPos, end); + } + } + + if (!slashesDenoteHost && !hasHash && !hasAt) { + // Try fast path regexp + const simplePath = simplePathPattern.exec(rest); + if (simplePath) { + this.path = rest; + this.href = rest; + this.pathname = simplePath[1]; + if (simplePath[2]) { + this.search = simplePath[2]; + if (parseQueryString) { + this.query = querystring.parse(this.search.slice(1)); + } else { + this.query = this.search.slice(1); + } + } else if (parseQueryString) { + this.search = null; + this.query = { __proto__: null }; + } + return this; + } + } + + let proto = protocolPattern.exec(rest); + let lowerProto; + if (proto) { + proto = proto[0]; + lowerProto = proto.toLowerCase(); + this.protocol = lowerProto; + rest = rest.slice(proto.length); + } + + // Figure out if it's got a host + // user@server is *always* interpreted as a hostname, and url + // resolution will treat //foo/bar as host=foo,path=bar because that's + // how the browser resolves relative URLs. + let slashes; + if (slashesDenoteHost || proto || hostPattern.test(rest)) { + slashes = rest.charCodeAt(0) === CHAR_FORWARD_SLASH && + rest.charCodeAt(1) === CHAR_FORWARD_SLASH; + if (slashes && !(proto && hostlessProtocol.has(lowerProto))) { + rest = rest.slice(2); + this.slashes = true; + } + } + + if (!hostlessProtocol.has(lowerProto) && + (slashes || (proto && !slashedProtocol.has(proto)))) { + + // there's a hostname. + // the first instance of /, ?, ;, or # ends the host. + // + // If there is an @ in the hostname, then non-host chars *are* allowed + // to the left of the last @ sign, unless some host-ending character + // comes *before* the @-sign. + // URLs are obnoxious. + // + // ex: + // http://a@b@c/ => user:a@b host:c + // http://a@b?@c => user:a host:b path:/?@c + + let hostEnd = -1; + let atSign = -1; + let nonHost = -1; + for (let i = 0; i < rest.length; ++i) { + switch (rest.charCodeAt(i)) { + case CHAR_TAB: + case CHAR_LINE_FEED: + case CHAR_CARRIAGE_RETURN: + // WHATWG URL removes tabs, newlines, and carriage returns. Let's do that too. + rest = rest.slice(0, i) + rest.slice(i + 1); + i -= 1; + break; + case CHAR_SPACE: + case CHAR_DOUBLE_QUOTE: + case CHAR_PERCENT: + case CHAR_SINGLE_QUOTE: + case CHAR_SEMICOLON: + case CHAR_LEFT_ANGLE_BRACKET: + case CHAR_RIGHT_ANGLE_BRACKET: + case CHAR_BACKWARD_SLASH: + case CHAR_CIRCUMFLEX_ACCENT: + case CHAR_GRAVE_ACCENT: + case CHAR_LEFT_CURLY_BRACKET: + case CHAR_VERTICAL_LINE: + case CHAR_RIGHT_CURLY_BRACKET: + // Characters that are never ever allowed in a hostname from RFC 2396 + if (nonHost === -1) + nonHost = i; + break; + case CHAR_HASH: + case CHAR_FORWARD_SLASH: + case CHAR_QUESTION_MARK: + // Find the first instance of any host-ending characters + if (nonHost === -1) + nonHost = i; + hostEnd = i; + break; + case CHAR_AT: + // At this point, either we have an explicit point where the + // auth portion cannot go past, or the last @ char is the decider. + atSign = i; + nonHost = -1; + break; + } + if (hostEnd !== -1) + break; + } + start = 0; + if (atSign !== -1) { + this.auth = decodeURIComponent(rest.slice(0, atSign)); + start = atSign + 1; + } + if (nonHost === -1) { + this.host = rest.slice(start); + rest = ''; + } else { + this.host = rest.slice(start, nonHost); + rest = rest.slice(nonHost); + } + + // pull out port. + this.parseHost(); + + // We've indicated that there is a hostname, + // so even if it's empty, it has to be present. + if (typeof this.hostname !== 'string') + this.hostname = ''; + + const hostname = this.hostname; + + // If hostname begins with [ and ends with ] + // assume that it's an IPv6 address. + const ipv6Hostname = isIpv6Hostname(hostname); + + // validate a little. + if (!ipv6Hostname) { + rest = getHostname(this, rest, hostname, url); + } + + if (this.hostname.length > hostnameMaxLen) { + this.hostname = ''; + } else { + // Hostnames are always lower case. + this.hostname = this.hostname.toLowerCase(); + } + + if (this.hostname !== '') { + if (ipv6Hostname) { + if (forbiddenHostCharsIpv6.test(this.hostname)) { + throw new ERR_INVALID_URL(url); + } + } else { + // IDNA Support: Returns a punycoded representation of "domain". + // It only converts parts of the domain name that + // have non-ASCII characters, i.e. it doesn't matter if + // you call it with a domain that already is ASCII-only. + this.hostname = toASCII(this.hostname); + + // Prevent two potential routes of hostname spoofing. + // 1. If this.hostname is empty, it must have become empty due to toASCII + // since we checked this.hostname above. + // 2. If any of forbiddenHostChars appears in this.hostname, it must have + // also gotten in due to toASCII. This is since getHostname would have + // filtered them out otherwise. + // Rather than trying to correct this by moving the non-host part into + // the pathname as we've done in getHostname, throw an exception to + // convey the severity of this issue. + if (this.hostname === '' || forbiddenHostChars.test(this.hostname)) { + throw new ERR_INVALID_URL(url); + } + } + } + + const p = this.port ? ':' + this.port : ''; + const h = this.hostname || ''; + this.host = h + p; + + // strip [ and ] from the hostname + // the host field still retains them, though + if (ipv6Hostname) { + this.hostname = this.hostname.slice(1, -1); + if (rest[0] !== '/') { + rest = '/' + rest; + } + } + } + + // Now rest is set to the post-host stuff. + // Chop off any delim chars. + if (!unsafeProtocol.has(lowerProto)) { + // First, make 100% sure that any "autoEscape" chars get + // escaped, even if encodeURIComponent doesn't think they + // need to be. + rest = autoEscapeStr(rest); + } + + let questionIdx = -1; + let hashIdx = -1; + for (let i = 0; i < rest.length; ++i) { + const code = rest.charCodeAt(i); + if (code === CHAR_HASH) { + this.hash = rest.slice(i); + hashIdx = i; + break; + } else if (code === CHAR_QUESTION_MARK && questionIdx === -1) { + questionIdx = i; + } + } + + if (questionIdx !== -1) { + if (hashIdx === -1) { + this.search = rest.slice(questionIdx); + this.query = rest.slice(questionIdx + 1); + } else { + this.search = rest.slice(questionIdx, hashIdx); + this.query = rest.slice(questionIdx + 1, hashIdx); + } + if (parseQueryString) { + this.query = querystring.parse(this.query); + } + } else if (parseQueryString) { + // No query string, but parseQueryString still requested + this.search = null; + this.query = { __proto__: null }; + } + + const useQuestionIdx = + questionIdx !== -1 && (hashIdx === -1 || questionIdx < hashIdx); + const firstIdx = useQuestionIdx ? questionIdx : hashIdx; + if (firstIdx === -1) { + if (rest.length > 0) + this.pathname = rest; + } else if (firstIdx > 0) { + this.pathname = rest.slice(0, firstIdx); + } + if (slashedProtocol.has(lowerProto) && + this.hostname && !this.pathname) { + this.pathname = '/'; + } + + // To support http.request + if (this.pathname || this.search) { + const p = this.pathname || ''; + const s = this.search || ''; + this.path = p + s; + } + + // Finally, reconstruct the href based on what has been validated. + this.href = this.format(); + return this; +}; + +function getHostname(self, rest, hostname, url) { + for (let i = 0; i < hostname.length; ++i) { + const code = hostname.charCodeAt(i); + const isValid = (code !== CHAR_FORWARD_SLASH && + code !== CHAR_BACKWARD_SLASH && + code !== CHAR_HASH && + code !== CHAR_QUESTION_MARK && + code !== CHAR_COLON); + + if (!isValid) { + // If leftover starts with :, then it represents an invalid port. + if (code === CHAR_COLON) { + throw new ERR_INVALID_ARG_VALUE('url', 'Invalid port in url', url); + } + self.hostname = hostname.slice(0, i); + return `/${hostname.slice(i)}${rest}`; + } + } + return rest; +} + +// Escaped characters. Use empty strings to fill up unused entries. +// Using Array is faster than Object/Map +const escapedCodes = [ + /* 0 - 9 */ '', '', '', '', '', '', '', '', '', '%09', + /* 10 - 19 */ '%0A', '', '', '%0D', '', '', '', '', '', '', + /* 20 - 29 */ '', '', '', '', '', '', '', '', '', '', + /* 30 - 39 */ '', '', '%20', '', '%22', '', '', '', '', '%27', + /* 40 - 49 */ '', '', '', '', '', '', '', '', '', '', + /* 50 - 59 */ '', '', '', '', '', '', '', '', '', '', + /* 60 - 69 */ '%3C', '', '%3E', '', '', '', '', '', '', '', + /* 70 - 79 */ '', '', '', '', '', '', '', '', '', '', + /* 80 - 89 */ '', '', '', '', '', '', '', '', '', '', + /* 90 - 99 */ '', '', '%5C', '', '%5E', '', '%60', '', '', '', + /* 100 - 109 */ '', '', '', '', '', '', '', '', '', '', + /* 110 - 119 */ '', '', '', '', '', '', '', '', '', '', + /* 120 - 125 */ '', '', '', '%7B', '%7C', '%7D', +]; + +// Automatically escape all delimiters and unwise characters from RFC 2396. +// Also escape single quotes in case of an XSS attack. +// Return the escaped string. +function autoEscapeStr(rest) { + let escaped = ''; + let lastEscapedPos = 0; + for (let i = 0; i < rest.length; ++i) { + // `escaped` contains substring up to the last escaped character. + const escapedChar = escapedCodes[rest.charCodeAt(i)]; + if (escapedChar) { + // Concat if there are ordinary characters in the middle. + if (i > lastEscapedPos) + escaped += rest.slice(lastEscapedPos, i); + escaped += escapedChar; + lastEscapedPos = i + 1; + } + } + if (lastEscapedPos === 0) // Nothing has been escaped. + return rest; + + // There are ordinary characters at the end. + if (lastEscapedPos < rest.length) + escaped += rest.slice(lastEscapedPos); + + return escaped; +} + +// Format a parsed object into a url string +function urlFormat(urlObject, options) { + // Ensure it's an object, and not a string url. + // If it's an object, this is a no-op. + // this way, you can call urlParse() on strings + // to clean up potentially wonky urls. + if (typeof urlObject === 'string') { + urlObject = urlParse(urlObject); + } else if (typeof urlObject !== 'object' || urlObject === null) { + throw new ERR_INVALID_ARG_TYPE('urlObject', + ['Object', 'string'], urlObject); + } else if (urlObject instanceof URL) { + let fragment = true; + let unicode = false; + let search = true; + let auth = true; + + if (options) { + validateObject(options, 'options'); + + if (options.fragment != null) { + fragment = Boolean(options.fragment); + } + + if (options.unicode != null) { + unicode = Boolean(options.unicode); + } + + if (options.search != null) { + search = Boolean(options.search); + } + + if (options.auth != null) { + auth = Boolean(options.auth); + } + } + + return bindingUrl.format(urlObject.href, fragment, unicode, search, auth); + } + + return Url.prototype.format.call(urlObject); +} + +// These characters do not need escaping: +// ! - . _ ~ +// ' ( ) * : +// digits +// alpha (uppercase) +// alpha (lowercase) +const noEscapeAuth = new Int8Array([ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 0x00 - 0x0F + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 0x10 - 0x1F + 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0, // 0x20 - 0x2F + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, // 0x30 - 0x3F + 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 0x40 - 0x4F + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, // 0x50 - 0x5F + 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 0x60 - 0x6F + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, // 0x70 - 0x7F +]); + +Url.prototype.format = function format() { + let auth = this.auth || ''; + if (auth) { + auth = encodeStr(auth, noEscapeAuth, hexTable); + auth += '@'; + } + + let protocol = this.protocol || ''; + if (protocol && StringPrototypeCharCodeAt(protocol, protocol.length - 1) !== 58 /* : */) { + protocol += ':'; + } + + let pathname = this.pathname || ''; + let hash = this.hash || ''; + let host = ''; + let query = ''; + + if (this.host) { + host = auth + this.host; + } else if (this.hostname) { + host = auth + ( + StringPrototypeIndexOf(this.hostname, ':') !== -1 && !isIpv6Hostname(this.hostname) ? + '[' + this.hostname + ']' : + this.hostname + ); + if (this.port) { + host += ':' + this.port; + } + } + + if (this.query !== null && typeof this.query === 'object') { + query = querystring.stringify(this.query); + } + let search = this.search || (query && ('?' + query)) || ''; + + if (StringPrototypeIndexOf(pathname, '#') !== -1 || StringPrototypeIndexOf(pathname, '?') !== -1) { + let newPathname = ''; + let lastPos = 0; + const len = pathname.length; + for (let i = 0; i < len; i++) { + const code = StringPrototypeCharCodeAt(pathname, i); + if (code === CHAR_HASH || code === CHAR_QUESTION_MARK) { + if (i > lastPos) { + newPathname += StringPrototypeSlice(pathname, lastPos, i); + } + newPathname += (code === CHAR_HASH ? '%23' : '%3F'); + lastPos = i + 1; + } + } + if (lastPos < len) { + newPathname += StringPrototypeSlice(pathname, lastPos); + } + pathname = newPathname; + } + + // Only the slashedProtocols get the //. Not mailto:, xmpp:, etc. + // unless they had them to begin with. + if (this.slashes || slashedProtocol.has(protocol)) { + if (this.slashes || host) { + if (pathname && StringPrototypeCharCodeAt(pathname, 0) !== CHAR_FORWARD_SLASH) + pathname = '/' + pathname; + host = '//' + host; + } else if (protocol.length >= 4 && + StringPrototypeCharCodeAt(protocol, 0) === 102/* f */ && + StringPrototypeCharCodeAt(protocol, 1) === 105/* i */ && + StringPrototypeCharCodeAt(protocol, 2) === 108/* l */ && + StringPrototypeCharCodeAt(protocol, 3) === 101/* e */) { + host = '//'; + } + } + + // Escape '#' in search. + if (StringPrototypeIndexOf(search, '#') !== -1) { + search = StringPrototypeReplaceAll(search, '#', '%23'); + } + + if (hash && StringPrototypeCharCodeAt(hash, 0) !== CHAR_HASH) { + hash = '#' + hash; + } + if (search && StringPrototypeCharCodeAt(search, 0) !== CHAR_QUESTION_MARK) { + search = '?' + search; + } + + return protocol + host + pathname + search + hash; +}; + +function urlResolve(source, relative) { + return urlParse(source, false, true).resolve(relative); +} + +Url.prototype.resolve = function resolve(relative) { + return this.resolveObject(urlParse(relative, false, true)).format(); +}; + +function urlResolveObject(source, relative) { + if (!source) return relative; + return urlParse(source, false, true).resolveObject(relative); +} + +Url.prototype.resolveObject = function resolveObject(relative) { + if (typeof relative === 'string') { + const rel = new Url(); + rel.parse(relative, false, true); + relative = rel; + } + + const result = new Url(); + ObjectAssign(result, this); + + // Hash is always overridden, no matter what. + // even href="" will remove it. + result.hash = relative.hash; + + // If the relative url is empty, then there's nothing left to do here. + if (relative.href === '') { + result.href = result.format(); + return result; + } + + // Hrefs like //foo/bar always cut to the protocol. + if (relative.slashes && !relative.protocol) { + // Take everything except the protocol from relative + const relativeWithoutProtocol = ObjectKeys(relative).reduce((acc, key) => { + if (key !== 'protocol') { + acc[key] = relative[key]; + } + return acc; + }, {}); + ObjectAssign(result, relativeWithoutProtocol); + + // urlParse appends trailing / to urls like http://www.example.com + if (slashedProtocol.has(result.protocol) && + result.hostname && !result.pathname) { + result.path = result.pathname = '/'; + } + + result.href = result.format(); + return result; + } + + if (relative.protocol && relative.protocol !== result.protocol) { + // If it's a known url protocol, then changing + // the protocol does weird things + // first, if it's not file:, then we MUST have a host, + // and if there was a path + // to begin with, then we MUST have a path. + // if it is file:, then the host is dropped, + // because that's known to be hostless. + // anything else is assumed to be absolute. + if (!slashedProtocol.has(relative.protocol)) { + ObjectAssign(result, relative); + result.href = result.format(); + return result; + } + + result.protocol = relative.protocol; + if (!relative.host && + !/^file:?$/.test(relative.protocol) && + !hostlessProtocol.has(relative.protocol)) { + const relPath = (relative.pathname || '').split('/'); + while (relPath.length && !(relative.host = relPath.shift())); + relative.host ||= ''; + relative.hostname ||= ''; + if (relPath[0] !== '') relPath.unshift(''); + if (relPath.length < 2) relPath.unshift(''); + result.pathname = relPath.join('/'); + } else { + result.pathname = relative.pathname; + } + result.search = relative.search; + result.query = relative.query; + result.host = relative.host || ''; + result.auth = relative.auth; + result.hostname = relative.hostname || relative.host; + result.port = relative.port; + // To support http.request + if (result.pathname || result.search) { + const p = result.pathname || ''; + const s = result.search || ''; + result.path = p + s; + } + result.slashes ||= relative.slashes; + result.href = result.format(); + return result; + } + + const isSourceAbs = (result.pathname && result.pathname.charAt(0) === '/'); + const isRelAbs = ( + relative.host || (relative.pathname && relative.pathname.charAt(0) === '/') + ); + let mustEndAbs = (isRelAbs || isSourceAbs || + (result.host && relative.pathname)); + const removeAllDots = mustEndAbs; + let srcPath = (result.pathname && result.pathname.split('/')) || []; + const relPath = (relative.pathname && relative.pathname.split('/')) || []; + const noLeadingSlashes = result.protocol && + !slashedProtocol.has(result.protocol); + + // If the url is a non-slashed url, then relative + // links like ../.. should be able + // to crawl up to the hostname, as well. This is strange. + // result.protocol has already been set by now. + // Later on, put the first path part into the host field. + if (noLeadingSlashes) { + result.hostname = ''; + result.port = null; + if (result.host) { + if (srcPath[0] === '') srcPath[0] = result.host; + else srcPath.unshift(result.host); + } + result.host = ''; + if (relative.protocol) { + relative.hostname = null; + relative.port = null; + result.auth = null; + if (relative.host) { + if (relPath[0] === '') relPath[0] = relative.host; + else relPath.unshift(relative.host); + } + relative.host = null; + } + mustEndAbs &&= (relPath[0] === '' || srcPath[0] === ''); + } + + if (isRelAbs) { + // it's absolute. + if (relative.host || relative.host === '') { + if (result.host !== relative.host) result.auth = null; + result.host = relative.host; + result.port = relative.port; + } + if (relative.hostname || relative.hostname === '') { + if (result.hostname !== relative.hostname) result.auth = null; + result.hostname = relative.hostname; + } + result.search = relative.search; + result.query = relative.query; + srcPath = relPath; + // Fall through to the dot-handling below. + } else if (relPath.length) { + // it's relative + // throw away the existing file, and take the new path instead. + srcPath ||= []; + srcPath.pop(); + srcPath = srcPath.concat(relPath); + result.search = relative.search; + result.query = relative.query; + } else if (relative.search !== null && relative.search !== undefined) { + // Just pull out the search. + // like href='?foo'. + // Put this after the other two cases because it simplifies the booleans + if (noLeadingSlashes) { + result.hostname = result.host = srcPath.shift(); + // Occasionally the auth can get stuck only in host. + // This especially happens in cases like + // url.resolveObject('mailto:local1@domain1', 'local2@domain2') + const authInHost = + result.host && result.host.indexOf('@') > 0 && result.host.split('@'); + if (authInHost) { + result.auth = authInHost.shift(); + result.host = result.hostname = authInHost.shift(); + } + } + result.search = relative.search; + result.query = relative.query; + // To support http.request + if (result.pathname !== null || result.search !== null) { + result.path = (result.pathname ? result.pathname : '') + + (result.search ? result.search : ''); + } + result.href = result.format(); + return result; + } + + if (!srcPath.length) { + // No path at all. All other things were already handled above. + result.pathname = null; + // To support http.request + if (result.search) { + result.path = '/' + result.search; + } else { + result.path = null; + } + result.href = result.format(); + return result; + } + + // If a url ENDs in . or .., then it must get a trailing slash. + // however, if it ends in anything else non-slashy, + // then it must NOT get a trailing slash. + let last = srcPath[srcPath.length - 1]; + const hasTrailingSlash = ( + ((result.host || relative.host || srcPath.length > 1) && + (last === '.' || last === '..')) || last === ''); + + // Strip single dots, resolve double dots to parent dir + // if the path tries to go above the root, `up` ends up > 0 + let up = 0; + for (let i = srcPath.length - 1; i >= 0; i--) { + last = srcPath[i]; + if (last === '.') { + spliceOne(srcPath, i); + } else if (last === '..') { + spliceOne(srcPath, i); + up++; + } else if (up) { + spliceOne(srcPath, i); + up--; + } + } + + // If the path is allowed to go above the root, restore leading ..s + if (!mustEndAbs && !removeAllDots) { + while (up--) { + srcPath.unshift('..'); + } + } + + if (mustEndAbs && srcPath[0] !== '' && + (!srcPath[0] || srcPath[0].charAt(0) !== '/')) { + srcPath.unshift(''); + } + + if (hasTrailingSlash && StringPrototypeAt(ArrayPrototypeJoin(srcPath, '/'), -1) !== '/') { + srcPath.push(''); + } + + const isAbsolute = srcPath[0] === '' || + (srcPath[0] && srcPath[0].charAt(0) === '/'); + + // put the host back + if (noLeadingSlashes) { + result.hostname = + result.host = isAbsolute ? '' : srcPath.length ? srcPath.shift() : ''; + // Occasionally the auth can get stuck only in host. + // This especially happens in cases like + // url.resolveObject('mailto:local1@domain1', 'local2@domain2') + const authInHost = result.host && result.host.indexOf('@') > 0 ? + result.host.split('@') : false; + if (authInHost) { + result.auth = authInHost.shift(); + result.host = result.hostname = authInHost.shift(); + } + } + + mustEndAbs ||= (result.host && srcPath.length); + + if (mustEndAbs && !isAbsolute) { + srcPath.unshift(''); + } + + if (!srcPath.length) { + result.pathname = null; + result.path = null; + } else { + result.pathname = srcPath.join('/'); + } + + // To support request.http + if (result.pathname !== null || result.search !== null) { + result.path = (result.pathname ? result.pathname : '') + + (result.search ? result.search : ''); + } + result.auth = relative.auth || result.auth; + result.slashes ||= relative.slashes; + result.href = result.format(); + return result; +}; + +Url.prototype.parseHost = function parseHost() { + let host = this.host; + let port = portPattern.exec(host); + if (port) { + port = port[0]; + if (port !== ':') { + this.port = port.slice(1); + } + host = host.slice(0, host.length - port.length); + } + if (host) this.hostname = host; +}; + +// When used internally, we are not obligated to associate TypeError with +// this function, so non-strings can be rejected by underlying implementation. +// Public API has to validate input and throw appropriate error. +function pathToFileURL(path, options) { + validateString(path, 'path'); + + return _pathToFileURL(path, options); +} + +module.exports = { + // Original API + Url, + parse: urlParse, + resolve: urlResolve, + resolveObject: urlResolveObject, + format: urlFormat, + + // WHATWG API + URL, + URLPattern, + URLSearchParams, + domainToASCII, + domainToUnicode, + + // Utilities + pathToFileURL, + fileURLToPath, + fileURLToPathBuffer, + urlToHttpOptions, +}; \ No newline at end of file diff --git a/node/util.js b/node/util.js new file mode 100644 index 00000000..d458b0a2 --- /dev/null +++ b/node/util.js @@ -0,0 +1,521 @@ +'use strict'; + +const { + ArrayIsArray, + ArrayPrototypePop, + ArrayPrototypePush, + ArrayPrototypeReduce, + Error, + ErrorCaptureStackTrace, + FunctionPrototypeBind, + NumberIsSafeInteger, + ObjectDefineProperties, + ObjectDefineProperty, + ObjectGetOwnPropertyDescriptors, + ObjectKeys, + ObjectSetPrototypeOf, + ObjectValues, + ReflectApply, + RegExp, + RegExpPrototypeSymbolReplace, + StringPrototypeToWellFormed, +} = primordials; + +const { + ErrnoException, + ExceptionWithHostPort, + codes: { + ERR_FALSY_VALUE_REJECTION, + ERR_INVALID_ARG_TYPE, + ERR_OUT_OF_RANGE, + }, + isErrorStackTraceLimitWritable, +} = require('internal/errors'); +const { + format, + formatWithOptions, + inspect, + stripVTControlCharacters, +} = require('internal/util/inspect'); +const { debuglog } = require('internal/util/debuglog'); +const { + validateBoolean, + validateFunction, + validateNumber, + validateString, + validateOneOf, + validateObject, +} = require('internal/validators'); +const { + isReadableStream, + isWritableStream, + isNodeStream, +} = require('internal/streams/utils'); +const types = require('internal/util/types'); + +let utilColors; +function lazyUtilColors() { + utilColors ??= require('internal/util/colors'); + return utilColors; +} +const { getOptionValue } = require('internal/options'); + +const binding = internalBinding('util'); + +const { + deprecate, + getLazy, + getSystemErrorMap, + getSystemErrorName: internalErrorName, + getSystemErrorMessage: internalErrorMessage, + promisify, + defineLazyProperties, +} = require('internal/util'); + +let abortController; + +function lazyAbortController() { + abortController ??= require('internal/abort_controller'); + return abortController; +} + +let internalDeepEqual; + +/** + * @param {string} [code] + * @returns {string} + */ +function escapeStyleCode(code) { + if (code === undefined) return ''; + return `\u001b[${code}m`; +} + +/** + * @param {string | string[]} format + * @param {string} text + * @param {object} [options] + * @param {boolean} [options.validateStream] - Whether to validate the stream. + * @param {Stream} [options.stream] - The stream used for validation. + * @returns {string} + */ +function styleText(format, text, { validateStream = true, stream = process.stdout } = {}) { + validateString(text, 'text'); + validateBoolean(validateStream, 'options.validateStream'); + + let skipColorize; + if (validateStream) { + if ( + !isReadableStream(stream) && + !isWritableStream(stream) && + !isNodeStream(stream) + ) { + throw new ERR_INVALID_ARG_TYPE('stream', ['ReadableStream', 'WritableStream', 'Stream'], stream); + } + + // If the stream is falsy or should not be colorized, set skipColorize to true + skipColorize = !lazyUtilColors().shouldColorize(stream); + } + + // If the format is not an array, convert it to an array + const formatArray = ArrayIsArray(format) ? format : [format]; + + const codes = []; + for (const key of formatArray) { + if (key === 'none') continue; + const formatCodes = inspect.colors[key]; + // If the format is not a valid style, throw an error + if (formatCodes == null) { + validateOneOf(key, 'format', ObjectKeys(inspect.colors)); + } + if (skipColorize) continue; + ArrayPrototypePush(codes, formatCodes); + } + + if (skipColorize) { + return text; + } + + // Build opening codes + let openCodes = ''; + for (let i = 0; i < codes.length; i++) { + openCodes += escapeStyleCode(codes[i][0]); + } + + // Process the text to handle nested styles + let processedText; + if (codes.length > 0) { + processedText = ArrayPrototypeReduce( + codes, + (text, code) => RegExpPrototypeSymbolReplace( + // Find the reset code + new RegExp(`\\u001b\\[${code[1]}m`, 'g'), + text, + (match, offset) => { + // Check if there's more content after this reset + if (offset + match.length < text.length) { + if ( + code[0] === inspect.colors.dim[0] || + code[0] === inspect.colors.bold[0] + ) { + // Dim and bold are not mutually exclusive, so we need to reapply + return `${match}${escapeStyleCode(code[0])}`; + } + return escapeStyleCode(code[0]); + } + return match; + }, + ), + text, + ); + } else { + processedText = text; + } + + // Build closing codes in reverse order + let closeCodes = ''; + for (let i = codes.length - 1; i >= 0; i--) { + closeCodes += escapeStyleCode(codes[i][1]); + } + + return `${openCodes}${processedText}${closeCodes}`; +} + +/** + * Inherit the prototype methods from one constructor into another. + * + * The Function.prototype.inherits from lang.js rewritten as a standalone + * function (not on Function.prototype). NOTE: If this file is to be loaded + * during bootstrapping this function needs to be rewritten using some native + * functions as prototype setup using normal JavaScript does not work as + * expected during bootstrapping (see mirror.js in r114903). + * @param {Function} ctor Constructor function which needs to inherit the + * prototype. + * @param {Function} superCtor Constructor function to inherit prototype from. + * @throws {TypeError} Will error if either constructor is null, or if + * the super constructor lacks a prototype. + */ +function inherits(ctor, superCtor) { + + if (ctor === undefined || ctor === null) + throw new ERR_INVALID_ARG_TYPE('ctor', 'Function', ctor); + + if (superCtor === undefined || superCtor === null) + throw new ERR_INVALID_ARG_TYPE('superCtor', 'Function', superCtor); + + if (superCtor.prototype === undefined) { + throw new ERR_INVALID_ARG_TYPE('superCtor.prototype', + 'Object', superCtor.prototype); + } + ObjectDefineProperty(ctor, 'super_', { + __proto__: null, + value: superCtor, + writable: true, + configurable: true, + }); + ObjectSetPrototypeOf(ctor.prototype, superCtor.prototype); +} + +/** + * @deprecated since v6.0.0 + * @template T + * @template S + * @param {T} target + * @param {S} source + * @returns {(T & S) | null} + */ +function _extend(target, source) { + // Don't do anything if source isn't an object + if (source === null || typeof source !== 'object') return target; + + const keys = ObjectKeys(source); + let i = keys.length; + while (i--) { + target[keys[i]] = source[keys[i]]; + } + return target; +} + +const callbackifyOnRejected = (reason, cb) => { + // `!reason` guard inspired by bluebird (Ref: https://goo.gl/t5IS6M). + // Because `null` is a special error value in callbacks which means "no error + // occurred", we error-wrap so the callback consumer can distinguish between + // "the promise rejected with null" or "the promise fulfilled with undefined". + if (!reason) { + reason = new ERR_FALSY_VALUE_REJECTION.HideStackFramesError(reason); + ErrorCaptureStackTrace(reason, callbackifyOnRejected); + } + return cb(reason); +}; + +/** + * Converts a Promise-returning function to callback style + * @param {Function} original + * @returns {Function} + */ +function callbackify(original) { + validateFunction(original, 'original'); + + // We DO NOT return the promise as it gives the user a false sense that + // the promise is actually somehow related to the callback's execution + // and that the callback throwing will reject the promise. + function callbackified(...args) { + const maybeCb = ArrayPrototypePop(args); + validateFunction(maybeCb, 'last argument'); + const cb = FunctionPrototypeBind(maybeCb, this); + // In true node style we process the callback on `nextTick` with all the + // implications (stack, `uncaughtException`, `async_hooks`) + ReflectApply(original, this, args) + .then((ret) => process.nextTick(cb, null, ret), + (rej) => process.nextTick(callbackifyOnRejected, rej, cb)); + } + + const descriptors = ObjectGetOwnPropertyDescriptors(original); + // It is possible to manipulate a functions `length` or `name` property. This + // guards against the manipulation. + if (typeof descriptors.length.value === 'number') { + descriptors.length.value++; + } + if (typeof descriptors.name.value === 'string') { + descriptors.name.value += 'Callbackified'; + } + const propertiesValues = ObjectValues(descriptors); + for (let i = 0; i < propertiesValues.length; i++) { + // We want to use null-prototype objects to not rely on globally mutable + // %Object.prototype%. + ObjectSetPrototypeOf(propertiesValues[i], null); + } + ObjectDefineProperties(callbackified, descriptors); + return callbackified; +} + +/** + * @param {number} err + * @returns {string} + */ +function getSystemErrorMessage(err) { + validateNumber(err, 'err'); + if (err >= 0 || !NumberIsSafeInteger(err)) { + throw new ERR_OUT_OF_RANGE('err', 'a negative integer', err); + } + return internalErrorMessage(err); +} + +/** + * @param {number} err + * @returns {string} + */ +function getSystemErrorName(err) { + validateNumber(err, 'err'); + if (err >= 0 || !NumberIsSafeInteger(err)) { + throw new ERR_OUT_OF_RANGE('err', 'a negative integer', err); + } + return internalErrorName(err); +} + +function _errnoException(...args) { + if (isErrorStackTraceLimitWritable()) { + const limit = Error.stackTraceLimit; + Error.stackTraceLimit = 0; + const e = new ErrnoException(...args); + Error.stackTraceLimit = limit; + ErrorCaptureStackTrace(e, _exceptionWithHostPort); + return e; + } + return new ErrnoException(...args); +} + +function _exceptionWithHostPort(...args) { + if (isErrorStackTraceLimitWritable()) { + const limit = Error.stackTraceLimit; + Error.stackTraceLimit = 0; + const e = new ExceptionWithHostPort(...args); + Error.stackTraceLimit = limit; + ErrorCaptureStackTrace(e, _exceptionWithHostPort); + return e; + } + return new ExceptionWithHostPort(...args); +} + +/** + * Parses the content of a `.env` file. + * @param {string} content + * @returns {Record} + */ +function parseEnv(content) { + validateString(content, 'content'); + return binding.parseEnv(content); +} + +const lazySourceMap = getLazy(() => require('internal/source_map/source_map_cache')); + +/** + * @typedef {object} CallSite // The call site + * @property {string} scriptName // The name of the resource that contains the + * script for the function for this StackFrame + * @property {string} functionName // The name of the function associated with this stack frame + * @property {number} lineNumber // The number, 1-based, of the line for the associate function call + * @property {number} columnNumber // The 1-based column offset on the line for the associated function call + */ + +/** + * @param {CallSite} callSite // The call site object to reconstruct from source map + * @returns {CallSite | undefined} // The reconstructed call site object + */ +function reconstructCallSite(callSite) { + const { scriptName, lineNumber, columnNumber } = callSite; + const sourceMap = lazySourceMap().findSourceMap(scriptName); + if (!sourceMap) return; + const entry = sourceMap.findEntry(lineNumber - 1, columnNumber - 1); + if (!entry?.originalSource) return; + return { + __proto__: null, + // If the name is not found, it is an empty string to match the behavior of `util.getCallSite()` + functionName: entry.name ?? '', + scriptName: entry.originalSource, + lineNumber: entry.originalLine + 1, + column: entry.originalColumn + 1, + columnNumber: entry.originalColumn + 1, + }; +} + +/** + * + * The call site array to map + * @param {CallSite[]} callSites + * Array of objects with the reconstructed call site + * @returns {CallSite[]} + */ +function mapCallSite(callSites) { + const result = []; + for (let i = 0; i < callSites.length; ++i) { + const callSite = callSites[i]; + const found = reconstructCallSite(callSite); + ArrayPrototypePush(result, found ?? callSite); + } + return result; +} + +/** + * @typedef {object} CallSiteOptions // The call site options + * @property {boolean} sourceMap // Enable source map support + */ + +/** + * Returns the callSite + * @param {number} frameCount + * @param {CallSiteOptions} options + * @returns {CallSite[]} + */ +function getCallSites(frameCount = 10, options) { + // If options is not provided check if frameCount is an object + if (options === undefined) { + if (typeof frameCount === 'object') { + // If frameCount is an object, it is the options object + options = frameCount; + validateObject(options, 'options'); + if (options.sourceMap !== undefined) { + validateBoolean(options.sourceMap, 'options.sourceMap'); + } + frameCount = 10; + } else { + // If options is not provided, set it to an empty object + options = {}; + }; + } else { + // If options is provided, validate it + validateObject(options, 'options'); + if (options.sourceMap !== undefined) { + validateBoolean(options.sourceMap, 'options.sourceMap'); + } + } + + // Using kDefaultMaxCallStackSizeToCapture as reference + validateNumber(frameCount, 'frameCount', 1, 200); + // If options.sourceMaps is true or if sourceMaps are enabled but the option.sourceMaps is not set explictly to false + if (options.sourceMap === true || (getOptionValue('--enable-source-maps') && options.sourceMap !== false)) { + return mapCallSite(binding.getCallSites(frameCount)); + } + return binding.getCallSites(frameCount); +}; + +// Keep the `exports =` so that various functions can still be monkeypatched +module.exports = { + _errnoException, + _exceptionWithHostPort, + _extend: deprecate(_extend, + 'The `util._extend` API is deprecated. Please use Object.assign() instead.', + 'DEP0060'), + callbackify, + debug: debuglog, + debuglog, + deprecate, + format, + styleText, + formatWithOptions, + // Deprecated getCallSite. + // This API can be removed in next semver-minor release. + getCallSite: deprecate(getCallSites, + 'The `util.getCallSite` API has been renamed to `util.getCallSites()`.', + 'ExperimentalWarning'), + getCallSites, + getSystemErrorMap, + getSystemErrorName, + getSystemErrorMessage, + inherits, + inspect, + isArray: deprecate(ArrayIsArray, + 'The `util.isArray` API is deprecated. Please use `Array.isArray()` instead.', + 'DEP0044'), + isDeepStrictEqual(a, b, skipPrototype) { + if (internalDeepEqual === undefined) { + internalDeepEqual = require('internal/util/comparisons').isDeepStrictEqual; + } + return internalDeepEqual(a, b, skipPrototype); + }, + promisify, + stripVTControlCharacters, + toUSVString(input) { + return StringPrototypeToWellFormed(`${input}`); + }, + get transferableAbortSignal() { + return lazyAbortController().transferableAbortSignal; + }, + get transferableAbortController() { + return lazyAbortController().transferableAbortController; + }, + get aborted() { + return lazyAbortController().aborted; + }, + types, + parseEnv, +}; + +defineLazyProperties( + module.exports, + 'internal/util/parse_args/parse_args', + ['parseArgs'], +); + +defineLazyProperties( + module.exports, + 'internal/encoding', + ['TextDecoder', 'TextEncoder'], +); + +defineLazyProperties( + module.exports, + 'internal/mime', + ['MIMEType', 'MIMEParams'], +); + +defineLazyProperties( + module.exports, + 'internal/util/diff', + ['diff'], +); + +defineLazyProperties( + module.exports, + 'internal/util/trace_sigint', + ['setTraceSigInt'], +); \ No newline at end of file diff --git a/node/v8.js b/node/v8.js new file mode 100644 index 00000000..a28391e8 --- /dev/null +++ b/node/v8.js @@ -0,0 +1,467 @@ +'use strict'; + +const { + Array, + BigInt64Array, + BigUint64Array, + DataView, + Error, + Float32Array, + Float64Array, + Int16Array, + Int32Array, + Int8Array, + JSONParse, + ObjectPrototypeToString, + Uint16Array, + Uint32Array, + Uint8Array, + Uint8ClampedArray, + globalThis: { + Float16Array, + }, +} = primordials; + +const { Buffer } = require('buffer'); +const { + validateString, + validateUint32, + validateOneOf, +} = require('internal/validators'); +const { + Serializer, + Deserializer, +} = internalBinding('serdes'); +const { + namespace: startupSnapshot, +} = require('internal/v8/startup_snapshot'); + +let profiler = {}; +if (internalBinding('config').hasInspector) { + profiler = internalBinding('profiler'); +} + +const assert = require('internal/assert'); +const { inspect } = require('internal/util/inspect'); +const { FastBuffer } = require('internal/buffer'); +const { getValidatedPath } = require('internal/fs/utils'); +const { + createHeapSnapshotStream, + triggerHeapSnapshot, +} = internalBinding('heap_utils'); +const { + HeapSnapshotStream, + getHeapSnapshotOptions, + queryObjects, +} = require('internal/heap_utils'); +const promiseHooks = require('internal/promise_hooks'); +const { getOptionValue } = require('internal/options'); + +/** + * Generates a snapshot of the current V8 heap + * and writes it to a JSON file. + * @param {string} [filename] + * @param {{ + * exposeInternals?: boolean, + * exposeNumericValues?: boolean + * }} [options] + * @returns {string} + */ +function writeHeapSnapshot(filename, options) { + if (filename !== undefined) { + filename = getValidatedPath(filename); + } + const optionArray = getHeapSnapshotOptions(options); + return triggerHeapSnapshot(filename, optionArray); +} + +/** + * Generates a snapshot of the current V8 heap + * and returns a Readable Stream. + * @param {{ + * exposeInternals?: boolean, + * exposeNumericValues?: boolean + * }} [options] + * @returns {import('./stream.js').Readable} + */ +function getHeapSnapshot(options) { + const optionArray = getHeapSnapshotOptions(options); + const handle = createHeapSnapshotStream(optionArray); + assert(handle); + return new HeapSnapshotStream(handle); +} + +// We need to get the buffer from the binding at the callsite since +// it's re-initialized after deserialization. +const binding = internalBinding('v8'); + +const { + cachedDataVersionTag, + setFlagsFromString: _setFlagsFromString, + isStringOneByteRepresentation: _isStringOneByteRepresentation, + updateHeapStatisticsBuffer, + updateHeapSpaceStatisticsBuffer, + updateHeapCodeStatisticsBuffer, + setHeapSnapshotNearHeapLimit: _setHeapSnapshotNearHeapLimit, + + // Properties for heap statistics buffer extraction. + kTotalHeapSizeIndex, + kTotalHeapSizeExecutableIndex, + kTotalPhysicalSizeIndex, + kTotalAvailableSize, + kUsedHeapSizeIndex, + kHeapSizeLimitIndex, + kDoesZapGarbageIndex, + kMallocedMemoryIndex, + kPeakMallocedMemoryIndex, + kNumberOfNativeContextsIndex, + kNumberOfDetachedContextsIndex, + kTotalGlobalHandlesSizeIndex, + kUsedGlobalHandlesSizeIndex, + kExternalMemoryIndex, + + // Properties for heap spaces statistics buffer extraction. + kHeapSpaces, + kSpaceSizeIndex, + kSpaceUsedSizeIndex, + kSpaceAvailableSizeIndex, + kPhysicalSpaceSizeIndex, + + // Properties for heap code statistics buffer extraction. + kCodeAndMetadataSizeIndex, + kBytecodeAndMetadataSizeIndex, + kExternalScriptSourceSizeIndex, + kCPUProfilerMetaDataSizeIndex, + + heapStatisticsBuffer, + heapCodeStatisticsBuffer, + heapSpaceStatisticsBuffer, + getCppHeapStatistics: _getCppHeapStatistics, + detailLevel, +} = binding; + +const kNumberOfHeapSpaces = kHeapSpaces.length; + +/** + * Sets V8 command-line flags. + * @param {string} flags + * @returns {void} + */ +function setFlagsFromString(flags) { + validateString(flags, 'flags'); + _setFlagsFromString(flags); +} + +/** + * Return whether this string uses one byte as underlying representation or not. + * @param {string} content + * @returns {boolean} + */ +function isStringOneByteRepresentation(content) { + validateString(content, 'content'); + return _isStringOneByteRepresentation(content); +} + + +/** + * Gets the current V8 heap statistics. + * @returns {{ + * total_heap_size: number; + * total_heap_size_executable: number; + * total_physical_size: number; + * total_available_size: number; + * used_heap_size: number; + * heap_size_limit: number; + * malloced_memory: number; + * peak_malloced_memory: number; + * does_zap_garbage: number; + * number_of_native_contexts: number; + * number_of_detached_contexts: number; + * }} + */ +function getHeapStatistics() { + const buffer = heapStatisticsBuffer; + + updateHeapStatisticsBuffer(); + + return { + total_heap_size: buffer[kTotalHeapSizeIndex], + total_heap_size_executable: buffer[kTotalHeapSizeExecutableIndex], + total_physical_size: buffer[kTotalPhysicalSizeIndex], + total_available_size: buffer[kTotalAvailableSize], + used_heap_size: buffer[kUsedHeapSizeIndex], + heap_size_limit: buffer[kHeapSizeLimitIndex], + malloced_memory: buffer[kMallocedMemoryIndex], + peak_malloced_memory: buffer[kPeakMallocedMemoryIndex], + does_zap_garbage: buffer[kDoesZapGarbageIndex], + number_of_native_contexts: buffer[kNumberOfNativeContextsIndex], + number_of_detached_contexts: buffer[kNumberOfDetachedContextsIndex], + total_global_handles_size: buffer[kTotalGlobalHandlesSizeIndex], + used_global_handles_size: buffer[kUsedGlobalHandlesSizeIndex], + external_memory: buffer[kExternalMemoryIndex], + }; +} + +/** + * Gets the current V8 heap space statistics. + * @returns {{ + * space_name: string; + * space_size: number; + * space_used_size: number; + * space_available_size: number; + * physical_space_size: number; + * }[]} + */ +function getHeapSpaceStatistics() { + const heapSpaceStatistics = new Array(kNumberOfHeapSpaces); + const buffer = heapSpaceStatisticsBuffer; + + for (let i = 0; i < kNumberOfHeapSpaces; i++) { + updateHeapSpaceStatisticsBuffer(i); + heapSpaceStatistics[i] = { + space_name: kHeapSpaces[i], + space_size: buffer[kSpaceSizeIndex], + space_used_size: buffer[kSpaceUsedSizeIndex], + space_available_size: buffer[kSpaceAvailableSizeIndex], + physical_space_size: buffer[kPhysicalSpaceSizeIndex], + }; + } + + return heapSpaceStatistics; +} + +/** + * Gets the current V8 heap code statistics. + * @returns {{ + * code_and_metadata_size: number; + * bytecode_and_metadata_size: number; + * external_script_source_size: number; + * cpu_profiler_metadata_size: number; + * }} + */ +function getHeapCodeStatistics() { + const buffer = heapCodeStatisticsBuffer; + + updateHeapCodeStatisticsBuffer(); + return { + code_and_metadata_size: buffer[kCodeAndMetadataSizeIndex], + bytecode_and_metadata_size: buffer[kBytecodeAndMetadataSizeIndex], + external_script_source_size: buffer[kExternalScriptSourceSizeIndex], + cpu_profiler_metadata_size: buffer[kCPUProfilerMetaDataSizeIndex], + }; +} + +let heapSnapshotNearHeapLimitCallbackAdded = false; +function setHeapSnapshotNearHeapLimit(limit) { + validateUint32(limit, 'limit', true); + if (heapSnapshotNearHeapLimitCallbackAdded || + getOptionValue('--heapsnapshot-near-heap-limit') > 0 + ) { + return; + } + heapSnapshotNearHeapLimitCallbackAdded = true; + _setHeapSnapshotNearHeapLimit(limit); +} + +const detailLevelDict = { + __proto__: null, + detailed: detailLevel.DETAILED, + brief: detailLevel.BRIEF, +}; + +function getCppHeapStatistics(type = 'detailed') { + validateOneOf(type, 'type', ['brief', 'detailed']); + const result = _getCppHeapStatistics(detailLevelDict[type]); + result.detail_level = type; + return result; +} + +/* V8 serialization API */ + +/* JS methods for the base objects */ +Serializer.prototype._getDataCloneError = Error; + +/** + * Reads raw bytes from the deserializer's internal buffer. + * @param {number} length + * @returns {Buffer} + */ +Deserializer.prototype.readRawBytes = function readRawBytes(length) { + const offset = this._readRawBytes(length); + // `this.buffer` can be a Buffer or a plain Uint8Array, so just calling + // `.slice()` doesn't work. + return new FastBuffer(this.buffer.buffer, + this.buffer.byteOffset + offset, + length); +}; + +function arrayBufferViewTypeToIndex(abView) { + const type = ObjectPrototypeToString(abView); + if (type === '[object Int8Array]') return 0; + if (type === '[object Uint8Array]') return 1; + if (type === '[object Uint8ClampedArray]') return 2; + if (type === '[object Int16Array]') return 3; + if (type === '[object Uint16Array]') return 4; + if (type === '[object Int32Array]') return 5; + if (type === '[object Uint32Array]') return 6; + if (type === '[object Float32Array]') return 7; + if (type === '[object Float64Array]') return 8; + if (type === '[object DataView]') return 9; + // Index 10 is FastBuffer. + if (type === '[object BigInt64Array]') return 11; + if (type === '[object BigUint64Array]') return 12; + if (type === '[object Float16Array]') return 13; + return -1; +} + +function arrayBufferViewIndexToType(index) { + if (index === 0) return Int8Array; + if (index === 1) return Uint8Array; + if (index === 2) return Uint8ClampedArray; + if (index === 3) return Int16Array; + if (index === 4) return Uint16Array; + if (index === 5) return Int32Array; + if (index === 6) return Uint32Array; + if (index === 7) return Float32Array; + if (index === 8) return Float64Array; + if (index === 9) return DataView; + if (index === 10) return FastBuffer; + if (index === 11) return BigInt64Array; + if (index === 12) return BigUint64Array; + if (index === 13) return Float16Array; + return undefined; +} + +class DefaultSerializer extends Serializer { + constructor() { + super(); + + this._setTreatArrayBufferViewsAsHostObjects(true); + } + + /** + * Used to write some kind of host object, i.e. an + * object that is created by native C++ bindings. + * @param {object} abView + * @returns {void} + */ + _writeHostObject(abView) { + // Keep track of how to handle different ArrayBufferViews. The default + // Serializer for Node does not use the V8 methods for serializing those + // objects because Node's `Buffer` objects use pooled allocation in many + // cases, and their underlying `ArrayBuffer`s would show up in the + // serialization. Because a) those may contain sensitive data and the user + // may not be aware of that and b) they are often much larger than the + // `Buffer` itself, custom serialization is applied. + let i = 10; // FastBuffer + if (abView.constructor !== Buffer) { + i = arrayBufferViewTypeToIndex(abView); + if (i === -1) { + throw new this._getDataCloneError( + `Unserializable host object: ${inspect(abView)}`); + } + } + this.writeUint32(i); + this.writeUint32(abView.byteLength); + this.writeRawBytes(new Uint8Array(abView.buffer, + abView.byteOffset, + abView.byteLength)); + } +} + +class DefaultDeserializer extends Deserializer { + /** + * Used to read some kind of host object, i.e. an + * object that is created by native C++ bindings. + * @returns {any} + */ + _readHostObject() { + const typeIndex = this.readUint32(); + const ctor = arrayBufferViewIndexToType(typeIndex); + const byteLength = this.readUint32(); + const byteOffset = this._readRawBytes(byteLength); + const BYTES_PER_ELEMENT = ctor.BYTES_PER_ELEMENT || 1; + + const offset = this.buffer.byteOffset + byteOffset; + if (offset % BYTES_PER_ELEMENT === 0) { + return new ctor(this.buffer.buffer, + offset, + byteLength / BYTES_PER_ELEMENT); + } + // Copy to an aligned buffer first. + const buffer_copy = Buffer.allocUnsafe(byteLength); + buffer_copy.set(new Uint8Array(this.buffer.buffer, this.buffer.byteOffset + byteOffset, byteLength)); + return new ctor(buffer_copy.buffer, + buffer_copy.byteOffset, + byteLength / BYTES_PER_ELEMENT); + } +} + +/** + * Uses a `DefaultSerializer` to serialize `value` + * into a buffer. + * @param {any} value + * @returns {Buffer} + */ +function serialize(value) { + const ser = new DefaultSerializer(); + ser.writeHeader(); + ser.writeValue(value); + return ser.releaseBuffer(); +} + +/** + * Uses a `DefaultDeserializer` with default options + * to read a JavaScript value from a buffer. + * @param {Buffer | TypedArray | DataView} buffer + * @returns {any} + */ +function deserialize(buffer) { + const der = new DefaultDeserializer(buffer); + der.readHeader(); + return der.readValue(); +} + +class GCProfiler { + #profiler = null; + + start() { + if (!this.#profiler) { + this.#profiler = new binding.GCProfiler(); + this.#profiler.start(); + } + } + + stop() { + if (this.#profiler) { + const data = this.#profiler.stop(); + this.#profiler = null; + return JSONParse(data); + } + } +} + +module.exports = { + cachedDataVersionTag, + getHeapSnapshot, + getHeapStatistics, + getHeapSpaceStatistics, + getHeapCodeStatistics, + getCppHeapStatistics, + setFlagsFromString, + Serializer, + Deserializer, + DefaultSerializer, + DefaultDeserializer, + deserialize, + takeCoverage: profiler.takeCoverage, + stopCoverage: profiler.stopCoverage, + serialize, + writeHeapSnapshot, + promiseHooks, + queryObjects, + startupSnapshot, + setHeapSnapshotNearHeapLimit, + GCProfiler, + isStringOneByteRepresentation, +}; \ No newline at end of file diff --git a/node/vm.js b/node/vm.js new file mode 100644 index 00000000..695e39ad --- /dev/null +++ b/node/vm.js @@ -0,0 +1,424 @@ +"use strict"; + +const { + ArrayPrototypeForEach, + ObjectFreeze, + PromiseReject, + ReflectApply, + Symbol, +} = primordials; + +const { + ContextifyScript, + makeContext, + constants, + measureMemory: _measureMemory, +} = internalBinding("contextify"); +const { ERR_CONTEXT_NOT_INITIALIZED, ERR_INVALID_ARG_TYPE } = + require("internal/errors").codes; +const { + validateArray, + validateBoolean, + validateBuffer, + validateInt32, + validateOneOf, + validateObject, + validateString, + validateStringArray, + validateUint32, + kValidateObjectAllowArray, + kValidateObjectAllowNullable, +} = require("internal/validators"); +const { + emitExperimentalWarning, + kEmptyObject, + kVmBreakFirstLineSymbol, +} = require("internal/util"); +const { + getHostDefinedOptionId, + internalCompileFunction, + isContext: _isContext, + registerImportModuleDynamically, +} = require("internal/vm"); +const { vm_dynamic_import_main_context_default, vm_context_no_contextify } = + internalBinding("symbols"); +const kParsingContext = Symbol("script parsing context"); + +/** + * Check if object is a context object created by vm.createContext(). + * @throws {TypeError} If object is not an object in the first place, throws TypeError. + * @param {object} object Object to check. + * @returns {boolean} + */ +function isContext(object) { + validateObject(object, "object", kValidateObjectAllowArray); + + return _isContext(object); +} + +class Script extends ContextifyScript { + constructor(code, options = kEmptyObject) { + code = `${code}`; + if (typeof options === "string") { + options = { filename: options }; + } else { + validateObject(options, "options"); + } + + const { + filename = "evalmachine.", + lineOffset = 0, + columnOffset = 0, + cachedData, + produceCachedData = false, + importModuleDynamically, + [kParsingContext]: parsingContext, + } = options; + + validateString(filename, "options.filename"); + validateInt32(lineOffset, "options.lineOffset"); + validateInt32(columnOffset, "options.columnOffset"); + if (cachedData !== undefined) { + validateBuffer(cachedData, "options.cachedData"); + } + validateBoolean(produceCachedData, "options.produceCachedData"); + + const hostDefinedOptionId = getHostDefinedOptionId( + importModuleDynamically, + filename + ); + // Calling `ReThrow()` on a native TryCatch does not generate a new + // abort-on-uncaught-exception check. A dummy try/catch in JS land + // protects against that. + try { + // eslint-disable-line no-useless-catch + super( + code, + filename, + lineOffset, + columnOffset, + cachedData, + produceCachedData, + parsingContext, + hostDefinedOptionId + ); + } catch (e) { + throw e; /* node-do-not-add-exception-line */ + } + + registerImportModuleDynamically(this, importModuleDynamically); + } + + runInThisContext(options) { + const { breakOnSigint, args } = getRunInContextArgs(null, options); + if (breakOnSigint && process.listenerCount("SIGINT") > 0) { + return sigintHandlersWrap(super.runInContext, this, args); + } + return ReflectApply(super.runInContext, this, args); + } + + runInContext(contextifiedObject, options) { + validateContext(contextifiedObject); + const { breakOnSigint, args } = getRunInContextArgs( + contextifiedObject, + options + ); + if (breakOnSigint && process.listenerCount("SIGINT") > 0) { + return sigintHandlersWrap(super.runInContext, this, args); + } + return ReflectApply(super.runInContext, this, args); + } + + runInNewContext(contextObject, options) { + const context = createContext(contextObject, getContextOptions(options)); + return this.runInContext(context, options); + } +} + +function validateContext(contextifiedObject) { + if (!isContext(contextifiedObject)) { + throw new ERR_INVALID_ARG_TYPE( + "contextifiedObject", + "vm.Context", + contextifiedObject + ); + } +} + +function getRunInContextArgs(contextifiedObject, options = kEmptyObject) { + validateObject(options, "options"); + + let timeout = options.timeout; + if (timeout === undefined) { + timeout = -1; + } else { + validateUint32(timeout, "options.timeout", true); + } + + const { + displayErrors = true, + breakOnSigint = false, + [kVmBreakFirstLineSymbol]: breakFirstLine = false, + } = options; + + validateBoolean(displayErrors, "options.displayErrors"); + validateBoolean(breakOnSigint, "options.breakOnSigint"); + + return { + breakOnSigint, + args: [ + contextifiedObject, + timeout, + displayErrors, + breakOnSigint, + breakFirstLine, + ], + }; +} + +function getContextOptions(options) { + if (!options) return {}; + const contextOptions = { + name: options.contextName, + origin: options.contextOrigin, + codeGeneration: undefined, + microtaskMode: options.microtaskMode, + }; + if (contextOptions.name !== undefined) + validateString(contextOptions.name, "options.contextName"); + if (contextOptions.origin !== undefined) + validateString(contextOptions.origin, "options.contextOrigin"); + if (options.contextCodeGeneration !== undefined) { + validateObject( + options.contextCodeGeneration, + "options.contextCodeGeneration" + ); + const { strings, wasm } = options.contextCodeGeneration; + if (strings !== undefined) + validateBoolean(strings, "options.contextCodeGeneration.strings"); + if (wasm !== undefined) + validateBoolean(wasm, "options.contextCodeGeneration.wasm"); + contextOptions.codeGeneration = { strings, wasm }; + } + if (options.microtaskMode !== undefined) + validateString(options.microtaskMode, "options.microtaskMode"); + return contextOptions; +} + +let defaultContextNameIndex = 1; +function createContext(contextObject = {}, options = kEmptyObject) { + if (contextObject !== vm_context_no_contextify && isContext(contextObject)) { + return contextObject; + } + + validateObject(options, "options"); + + const { + name = `VM Context ${defaultContextNameIndex++}`, + origin, + codeGeneration, + microtaskMode, + importModuleDynamically, + } = options; + + validateString(name, "options.name"); + if (origin !== undefined) validateString(origin, "options.origin"); + if (codeGeneration !== undefined) + validateObject(codeGeneration, "options.codeGeneration"); + + let strings = true; + let wasm = true; + if (codeGeneration !== undefined) { + ({ strings = true, wasm = true } = codeGeneration); + validateBoolean(strings, "options.codeGeneration.strings"); + validateBoolean(wasm, "options.codeGeneration.wasm"); + } + + validateOneOf(microtaskMode, "options.microtaskMode", [ + "afterEvaluate", + undefined, + ]); + const microtaskQueue = microtaskMode === "afterEvaluate"; + + const hostDefinedOptionId = getHostDefinedOptionId( + importModuleDynamically, + name + ); + + const result = makeContext( + contextObject, + name, + origin, + strings, + wasm, + microtaskQueue, + hostDefinedOptionId + ); + // Register the context scope callback after the context was initialized. + registerImportModuleDynamically(result, importModuleDynamically); + return result; +} + +function createScript(code, options) { + return new Script(code, options); +} + +// Remove all SIGINT listeners and re-attach them after the wrapped function +// has executed, so that caught SIGINT are handled by the listeners again. +function sigintHandlersWrap(fn, thisArg, argsArray) { + const sigintListeners = process.rawListeners("SIGINT"); + + process.removeAllListeners("SIGINT"); + + try { + return ReflectApply(fn, thisArg, argsArray); + } finally { + // Add using the public methods so that the `newListener` handler of + // process can re-attach the listeners. + ArrayPrototypeForEach(sigintListeners, (listener) => { + process.addListener("SIGINT", listener); + }); + } +} + +function runInContext(code, contextifiedObject, options) { + validateContext(contextifiedObject); + if (typeof options === "string") { + options = { + filename: options, + [kParsingContext]: contextifiedObject, + }; + } else { + options = { ...options, [kParsingContext]: contextifiedObject }; + } + return createScript(code, options).runInContext(contextifiedObject, options); +} + +function runInNewContext(code, contextObject, options) { + if (typeof options === "string") { + options = { filename: options }; + } + contextObject = createContext(contextObject, getContextOptions(options)); + options = { ...options, [kParsingContext]: contextObject }; + return createScript(code, options).runInNewContext(contextObject, options); +} + +function runInThisContext(code, options) { + if (typeof options === "string") { + options = { filename: options }; + } + return createScript(code, options).runInThisContext(options); +} + +function compileFunction(code, params, options = kEmptyObject) { + validateString(code, "code"); + validateObject(options, "options"); + if (params !== undefined) { + validateStringArray(params, "params"); + } + const { + filename = "", + columnOffset = 0, + lineOffset = 0, + cachedData = undefined, + produceCachedData = false, + parsingContext = undefined, + contextExtensions = [], + importModuleDynamically, + } = options; + + validateString(filename, "options.filename"); + validateInt32(columnOffset, "options.columnOffset"); + validateInt32(lineOffset, "options.lineOffset"); + if (cachedData !== undefined) + validateBuffer(cachedData, "options.cachedData"); + validateBoolean(produceCachedData, "options.produceCachedData"); + if (parsingContext !== undefined) { + if ( + typeof parsingContext !== "object" || + parsingContext === null || + !isContext(parsingContext) + ) { + throw new ERR_INVALID_ARG_TYPE( + "options.parsingContext", + "Context", + parsingContext + ); + } + } + validateArray(contextExtensions, "options.contextExtensions"); + ArrayPrototypeForEach(contextExtensions, (extension, i) => { + const name = `options.contextExtensions[${i}]`; + validateObject(extension, name, kValidateObjectAllowNullable); + }); + + const hostDefinedOptionId = getHostDefinedOptionId( + importModuleDynamically, + filename + ); + + return internalCompileFunction( + code, + filename, + lineOffset, + columnOffset, + cachedData, + produceCachedData, + parsingContext, + contextExtensions, + params, + hostDefinedOptionId, + importModuleDynamically + ).function; +} + +const measureMemoryModes = { + summary: constants.measureMemory.mode.SUMMARY, + detailed: constants.measureMemory.mode.DETAILED, +}; + +const measureMemoryExecutions = { + default: constants.measureMemory.execution.DEFAULT, + eager: constants.measureMemory.execution.EAGER, +}; + +function measureMemory(options = kEmptyObject) { + emitExperimentalWarning("vm.measureMemory"); + validateObject(options, "options"); + const { mode = "summary", execution = "default" } = options; + validateOneOf(mode, "options.mode", ["summary", "detailed"]); + validateOneOf(execution, "options.execution", ["default", "eager"]); + const result = _measureMemory( + measureMemoryModes[mode], + measureMemoryExecutions[execution] + ); + if (result === undefined) { + return PromiseReject(new ERR_CONTEXT_NOT_INITIALIZED()); + } + return result; +} + +const vmConstants = { + __proto__: null, + USE_MAIN_CONTEXT_DEFAULT_LOADER: vm_dynamic_import_main_context_default, + DONT_CONTEXTIFY: vm_context_no_contextify, +}; + +ObjectFreeze(vmConstants); + +module.exports = { + Script, + createContext, + createScript, + runInContext, + runInNewContext, + runInThisContext, + isContext, + compileFunction, + measureMemory, + constants: vmConstants, +}; + +// The vm module is patched to include vm.Module, vm.SourceTextModule +// and vm.SyntheticModule in the pre-execution phase when +// --experimental-vm-modules is on. diff --git a/node/wasi.js b/node/wasi.js new file mode 100644 index 00000000..71dbc60a --- /dev/null +++ b/node/wasi.js @@ -0,0 +1,176 @@ +'use strict'; +const { + ArrayPrototypeForEach, + ArrayPrototypeMap, + ArrayPrototypePush, + FunctionPrototypeBind, + ObjectEntries, + String, + Symbol, +} = primordials; + +const { + ERR_INVALID_ARG_VALUE, + ERR_WASI_ALREADY_STARTED, +} = require('internal/errors').codes; +const { + emitExperimentalWarning, + kEmptyObject, +} = require('internal/util'); +const { + validateArray, + validateBoolean, + validateFunction, + validateInt32, + validateObject, + validateString, + validateUndefined, +} = require('internal/validators'); +const kExitCode = Symbol('kExitCode'); +const kSetMemory = Symbol('kSetMemory'); +const kStarted = Symbol('kStarted'); +const kInstance = Symbol('kInstance'); +const kBindingName = Symbol('kBindingName'); + +emitExperimentalWarning('WASI'); + +class WASI { + constructor(options = kEmptyObject) { + validateObject(options, 'options'); + + let _WASI; + validateString(options.version, 'options.version'); + switch (options.version) { + case 'unstable': + ({ WASI: _WASI } = internalBinding('wasi')); + this[kBindingName] = 'wasi_unstable'; + break; + case 'preview1': + ({ WASI: _WASI } = internalBinding('wasi')); + this[kBindingName] = 'wasi_snapshot_preview1'; + break; + // When adding support for additional wasi versions add case here + default: + throw new ERR_INVALID_ARG_VALUE('options.version', + options.version, + 'unsupported WASI version'); + } + + if (options.args !== undefined) + validateArray(options.args, 'options.args'); + const args = ArrayPrototypeMap(options.args || [], String); + + const env = []; + if (options.env !== undefined) { + validateObject(options.env, 'options.env'); + ArrayPrototypeForEach( + ObjectEntries(options.env), + ({ 0: key, 1: value }) => { + if (value !== undefined) + ArrayPrototypePush(env, `${key}=${value}`); + }); + } + + const preopens = []; + if (options.preopens !== undefined) { + validateObject(options.preopens, 'options.preopens'); + ArrayPrototypeForEach( + ObjectEntries(options.preopens), + ({ 0: key, 1: value }) => + ArrayPrototypePush(preopens, String(key), String(value)), + ); + } + + const { stdin = 0, stdout = 1, stderr = 2 } = options; + validateInt32(stdin, 'options.stdin', 0); + validateInt32(stdout, 'options.stdout', 0); + validateInt32(stderr, 'options.stderr', 0); + const stdio = [stdin, stdout, stderr]; + + const wrap = new _WASI(args, env, preopens, stdio); + + for (const prop in wrap) { + wrap[prop] = FunctionPrototypeBind(wrap[prop], wrap); + } + + let returnOnExit = true; + if (options.returnOnExit !== undefined) { + validateBoolean(options.returnOnExit, 'options.returnOnExit'); + returnOnExit = options.returnOnExit; + } + if (returnOnExit) + wrap.proc_exit = FunctionPrototypeBind(wasiReturnOnProcExit, this); + + this[kSetMemory] = wrap._setMemory; + delete wrap._setMemory; + this.wasiImport = wrap; + this[kStarted] = false; + this[kExitCode] = 0; + this[kInstance] = undefined; + } + + finalizeBindings(instance, { + memory = instance?.exports?.memory, + } = {}) { + if (this[kStarted]) { + throw new ERR_WASI_ALREADY_STARTED(); + } + + validateObject(instance, 'instance'); + validateObject(instance.exports, 'instance.exports'); + + this[kSetMemory](memory); + + this[kInstance] = instance; + this[kStarted] = true; + } + + // Must not export _initialize, must export _start + start(instance) { + this.finalizeBindings(instance); + + const { _start, _initialize } = this[kInstance].exports; + + validateFunction(_start, 'instance.exports._start'); + validateUndefined(_initialize, 'instance.exports._initialize'); + + try { + _start(); + } catch (err) { + if (err !== kExitCode) { + throw err; + } + } + + return this[kExitCode]; + } + + // Must not export _start, may optionally export _initialize + initialize(instance) { + this.finalizeBindings(instance); + + const { _start, _initialize } = this[kInstance].exports; + + validateUndefined(_start, 'instance.exports._start'); + if (_initialize !== undefined) { + validateFunction(_initialize, 'instance.exports._initialize'); + _initialize(); + } + } + + getImportObject() { + return { [this[kBindingName]]: this.wasiImport }; + } +} + +module.exports = { WASI }; + + +function wasiReturnOnProcExit(rval) { + // If __wasi_proc_exit() does not terminate the process, an assertion is + // triggered in the wasm runtime. Node can sidestep the assertion and return + // an exit code by recording the exit code, and throwing a JavaScript + // exception that WebAssembly cannot catch. + this[kExitCode] = rval; + throw kExitCode; +} \ No newline at end of file diff --git a/node/worker_pool.js b/node/worker_pool.js new file mode 100644 index 00000000..bc0e59c7 --- /dev/null +++ b/node/worker_pool.js @@ -0,0 +1,13 @@ +const WorkerPool = require('./worker_pool.js'); +const os = require('node:os'); + +const pool = new WorkerPool(os.availableParallelism()); + +let finished = 0; +for (let i = 0; i < 10; i++) { + pool.runTask({ a: 42, b: 100 }, (err, result) => { + console.log(i, err, result); + if (++finished === 10) + pool.close(); + }); +} \ No newline at end of file diff --git a/node/worker_threads.js b/node/worker_threads.js new file mode 100644 index 00000000..d7fa1a72 --- /dev/null +++ b/node/worker_threads.js @@ -0,0 +1,55 @@ +"use strict"; + +const { + isInternalThread, + isMainThread, + SHARE_ENV, + resourceLimits, + setEnvironmentData, + getEnvironmentData, + threadId, + threadName, + Worker, +} = require("internal/worker"); + +const { + MessagePort, + MessageChannel, + markAsUncloneable, + moveMessagePortToContext, + receiveMessageOnPort, + BroadcastChannel, +} = require("internal/worker/io"); + +const { postMessageToThread } = require("internal/worker/messaging"); + +const { + markAsUntransferable, + isMarkedAsUntransferable, +} = require("internal/buffer"); + +const { locks } = require("internal/locks"); + +module.exports = { + isInternalThread, + isMainThread, + MessagePort, + MessageChannel, + markAsUncloneable, + markAsUntransferable, + isMarkedAsUntransferable, + moveMessagePortToContext, + receiveMessageOnPort, + resourceLimits, + postMessageToThread, + threadId, + threadName, + SHARE_ENV, + Worker, + parentPort: null, + workerData: null, + BroadcastChannel, + setEnvironmentData, + getEnvironmentData, + locks, +}; diff --git a/node/zlib.js b/node/zlib.js new file mode 100644 index 00000000..c56051f9 --- /dev/null +++ b/node/zlib.js @@ -0,0 +1,1104 @@ +"use strict"; + +const { + ArrayBuffer, + MathMax, + NumberIsNaN, + ObjectDefineProperties, + ObjectDefineProperty, + ObjectEntries, + ObjectFreeze, + ObjectKeys, + ObjectSetPrototypeOf, + ReflectApply, + Symbol, + Uint32Array, +} = primordials; + +const { + codes: { + ERR_BROTLI_INVALID_PARAM, + ERR_BUFFER_TOO_LARGE, + ERR_INVALID_ARG_TYPE, + ERR_OUT_OF_RANGE, + ERR_TRAILING_JUNK_AFTER_STREAM_END, + ERR_ZSTD_INVALID_PARAM, + }, + genericNodeError, +} = require("internal/errors"); +const { Transform, finished } = require("stream"); +const { deprecateInstantiation } = require("internal/util"); +const { + isArrayBufferView, + isAnyArrayBuffer, + isUint8Array, +} = require("internal/util/types"); +const binding = internalBinding("zlib"); +const { crc32: crc32Native } = binding; +const assert = require("internal/assert"); +const { Buffer, kMaxLength } = require("buffer"); +const { owner_symbol } = require("internal/async_hooks").symbols; +const { + checkRangesOrGetDefault, + validateFunction, + validateUint32, + validateFiniteNumber, +} = require("internal/validators"); + +const kFlushFlag = Symbol("kFlushFlag"); +const kError = Symbol("kError"); + +const constants = internalBinding("constants").zlib; +const { + // Zlib flush levels + Z_NO_FLUSH, + Z_BLOCK, + Z_PARTIAL_FLUSH, + Z_SYNC_FLUSH, + Z_FULL_FLUSH, + Z_FINISH, + // Zlib option values + Z_MIN_CHUNK, + Z_MIN_WINDOWBITS, + Z_MAX_WINDOWBITS, + Z_MIN_LEVEL, + Z_MAX_LEVEL, + Z_MIN_MEMLEVEL, + Z_MAX_MEMLEVEL, + Z_DEFAULT_CHUNK, + Z_DEFAULT_COMPRESSION, + Z_DEFAULT_STRATEGY, + Z_DEFAULT_WINDOWBITS, + Z_DEFAULT_MEMLEVEL, + Z_FIXED, + // Node's compression stream modes (node_zlib_mode) + DEFLATE, + DEFLATERAW, + INFLATE, + INFLATERAW, + GZIP, + GUNZIP, + UNZIP, + BROTLI_DECODE, + BROTLI_ENCODE, + ZSTD_COMPRESS, + ZSTD_DECOMPRESS, + // Brotli operations (~flush levels) + BROTLI_OPERATION_PROCESS, + BROTLI_OPERATION_FLUSH, + BROTLI_OPERATION_FINISH, + BROTLI_OPERATION_EMIT_METADATA, + // Zstd end directives (~flush levels) + ZSTD_e_continue, + ZSTD_e_flush, + ZSTD_e_end, +} = constants; + +// Translation table for return codes. +const codes = { + Z_OK: constants.Z_OK, + Z_STREAM_END: constants.Z_STREAM_END, + Z_NEED_DICT: constants.Z_NEED_DICT, + Z_ERRNO: constants.Z_ERRNO, + Z_STREAM_ERROR: constants.Z_STREAM_ERROR, + Z_DATA_ERROR: constants.Z_DATA_ERROR, + Z_MEM_ERROR: constants.Z_MEM_ERROR, + Z_BUF_ERROR: constants.Z_BUF_ERROR, + Z_VERSION_ERROR: constants.Z_VERSION_ERROR, +}; + +for (const ckey of ObjectKeys(codes)) { + codes[codes[ckey]] = ckey; +} + +function zlibBuffer(engine, buffer, callback) { + validateFunction(callback, "callback"); + // Streams do not support non-Uint8Array ArrayBufferViews yet. Convert it to a + // Buffer without copying. + if (isArrayBufferView(buffer) && !isUint8Array(buffer)) { + buffer = Buffer.from(buffer.buffer, buffer.byteOffset, buffer.byteLength); + } else if (isAnyArrayBuffer(buffer)) { + buffer = Buffer.from(buffer); + } + engine.buffers = null; + engine.nread = 0; + engine.cb = callback; + engine.on("data", zlibBufferOnData); + engine.on("error", zlibBufferOnError); + engine.on("end", zlibBufferOnEnd); + engine.end(buffer); +} + +function zlibBufferOnData(chunk) { + if (!this.buffers) { + this.buffers = [chunk]; + } else { + this.buffers.push(chunk); + } + this.nread += chunk.length; + if (this.nread > this._maxOutputLength) { + this.close(); + this.removeAllListeners("end"); + this.cb(new ERR_BUFFER_TOO_LARGE(this._maxOutputLength)); + } +} + +function zlibBufferOnError(err) { + this.removeAllListeners("end"); + this.cb(err); +} + +function zlibBufferOnEnd() { + let buf; + if (this.nread === 0) { + buf = Buffer.alloc(0); + } else { + const bufs = this.buffers; + buf = bufs.length === 1 ? bufs[0] : Buffer.concat(bufs, this.nread); + } + this.close(); + if (this._info) this.cb(null, { buffer: buf, engine: this }); + else this.cb(null, buf); +} + +function zlibBufferSync(engine, buffer) { + if (typeof buffer === "string") { + buffer = Buffer.from(buffer); + } else if (!isArrayBufferView(buffer)) { + if (isAnyArrayBuffer(buffer)) { + buffer = Buffer.from(buffer); + } else { + throw new ERR_INVALID_ARG_TYPE( + "buffer", + ["string", "Buffer", "TypedArray", "DataView", "ArrayBuffer"], + buffer + ); + } + } + buffer = processChunkSync(engine, buffer, engine._finishFlushFlag); + if (engine._info) return { buffer, engine }; + return buffer; +} + +function zlibOnError(message, errno, code) { + const self = this[owner_symbol]; + // There is no way to cleanly recover. + // Continuing only obscures problems. + + const error = genericNodeError(message, { errno, code }); + error.errno = errno; + error.code = code; + self.destroy(error); + self[kError] = error; +} + +const FLUSH_BOUND = [ + [Z_NO_FLUSH, Z_BLOCK], + [BROTLI_OPERATION_PROCESS, BROTLI_OPERATION_EMIT_METADATA], + [ZSTD_e_continue, ZSTD_e_end], +]; +const FLUSH_BOUND_IDX_NORMAL = 0; +const FLUSH_BOUND_IDX_BROTLI = 1; +const FLUSH_BOUND_IDX_ZSTD = 2; + +/** + * The base class for all Zlib-style streams. + * @class + */ +function ZlibBase(opts, mode, handle, { flush, finishFlush, fullFlush }) { + let chunkSize = Z_DEFAULT_CHUNK; + let maxOutputLength = kMaxLength; + // The ZlibBase class is not exported to user land, the mode should only be + // passed in by us. + assert(typeof mode === "number"); + assert(mode >= DEFLATE && mode <= ZSTD_DECOMPRESS); + + let flushBoundIdx; + if (mode === BROTLI_ENCODE || mode === BROTLI_DECODE) { + flushBoundIdx = FLUSH_BOUND_IDX_BROTLI; + } else if (mode === ZSTD_COMPRESS || mode === ZSTD_DECOMPRESS) { + flushBoundIdx = FLUSH_BOUND_IDX_ZSTD; + } else { + flushBoundIdx = FLUSH_BOUND_IDX_NORMAL; + } + + if (opts) { + chunkSize = opts.chunkSize; + if (!validateFiniteNumber(chunkSize, "options.chunkSize")) { + chunkSize = Z_DEFAULT_CHUNK; + } else if (chunkSize < Z_MIN_CHUNK) { + throw new ERR_OUT_OF_RANGE( + "options.chunkSize", + `>= ${Z_MIN_CHUNK}`, + chunkSize + ); + } + + flush = checkRangesOrGetDefault( + opts.flush, + "options.flush", + FLUSH_BOUND[flushBoundIdx][0], + FLUSH_BOUND[flushBoundIdx][1], + flush + ); + + finishFlush = checkRangesOrGetDefault( + opts.finishFlush, + "options.finishFlush", + FLUSH_BOUND[flushBoundIdx][0], + FLUSH_BOUND[flushBoundIdx][1], + finishFlush + ); + + maxOutputLength = checkRangesOrGetDefault( + opts.maxOutputLength, + "options.maxOutputLength", + 1, + kMaxLength, + kMaxLength + ); + + if (opts.encoding || opts.objectMode || opts.writableObjectMode) { + opts = { ...opts }; + opts.encoding = null; + opts.objectMode = false; + opts.writableObjectMode = false; + } + } + + ReflectApply(Transform, this, [{ autoDestroy: true, ...opts }]); + this[kError] = null; + this.bytesWritten = 0; + this._handle = handle; + handle[owner_symbol] = this; + // Used by processCallback() and zlibOnError() + handle.onerror = zlibOnError; + this._outBuffer = Buffer.allocUnsafe(chunkSize); + this._outOffset = 0; + + this._chunkSize = chunkSize; + this._defaultFlushFlag = flush; + this._finishFlushFlag = finishFlush; + this._defaultFullFlushFlag = fullFlush; + this._info = opts?.info; + this._maxOutputLength = maxOutputLength; + + this._rejectGarbageAfterEnd = opts?.rejectGarbageAfterEnd === true; +} +ObjectSetPrototypeOf(ZlibBase.prototype, Transform.prototype); +ObjectSetPrototypeOf(ZlibBase, Transform); + +ObjectDefineProperty(ZlibBase.prototype, "_closed", { + __proto__: null, + configurable: true, + enumerable: true, + get() { + return !this._handle; + }, +}); + +/** + * @this {ZlibBase} + * @returns {void} + */ +ZlibBase.prototype.reset = function () { + assert(this._handle, "zlib binding closed"); + return this._handle.reset(); +}; + +/** + * @this {ZlibBase} + * This is the _flush function called by the transform class, + * internally, when the last chunk has been written. + * @returns {void} + */ +ZlibBase.prototype._flush = function (callback) { + this._transform(Buffer.alloc(0), "", callback); +}; + +/** + * @this {ZlibBase} + * Force Transform compat behavior. + * @returns {void} + */ +ZlibBase.prototype._final = function (callback) { + callback(); +}; + +// If a flush is scheduled while another flush is still pending, a way to figure +// out which one is the "stronger" flush is needed. +// This is currently only used to figure out which flush flag to use for the +// last chunk. +// Roughly, the following holds: +// Z_NO_FLUSH < Z_BLOCK < Z_PARTIAL_FLUSH < +// Z_SYNC_FLUSH < Z_FULL_FLUSH < Z_FINISH +const flushiness = []; +const kFlushFlagList = [ + Z_NO_FLUSH, + Z_BLOCK, + Z_PARTIAL_FLUSH, + Z_SYNC_FLUSH, + Z_FULL_FLUSH, + Z_FINISH, +]; +for (let i = 0; i < kFlushFlagList.length; i++) { + flushiness[kFlushFlagList[i]] = i; +} + +function maxFlush(a, b) { + return flushiness[a] > flushiness[b] ? a : b; +} + +// Set up a list of 'special' buffers that can be written using .write() +// from the .flush() code as a way of introducing flushing operations into the +// write sequence. +const kFlushBuffers = []; +{ + const dummyArrayBuffer = new ArrayBuffer(); + for (const flushFlag of kFlushFlagList) { + kFlushBuffers[flushFlag] = Buffer.from(dummyArrayBuffer); + kFlushBuffers[flushFlag][kFlushFlag] = flushFlag; + } +} + +ZlibBase.prototype.flush = function (kind, callback) { + if (typeof kind === "function" || (kind === undefined && !callback)) { + callback = kind; + kind = this._defaultFullFlushFlag; + } + + if (this.writableFinished) { + if (callback) process.nextTick(callback); + } else if (this.writableEnded) { + if (callback) this.once("end", callback); + } else { + this.write(kFlushBuffers[kind], "", callback); + } +}; + +/** + * @this {import('stream').Transform} + * @param {(err?: Error) => any} [callback] + */ +ZlibBase.prototype.close = function (callback) { + if (callback) finished(this, callback); + this.destroy(); +}; + +ZlibBase.prototype._destroy = function (err, callback) { + _close(this); + callback(err); +}; + +ZlibBase.prototype._transform = function (chunk, encoding, cb) { + let flushFlag = this._defaultFlushFlag; + // We use a 'fake' zero-length chunk to carry information about flushes from + // the public API to the actual stream implementation. + if (typeof chunk[kFlushFlag] === "number") { + flushFlag = chunk[kFlushFlag]; + } + + // For the last chunk, also apply `_finishFlushFlag`. + if (this.writableEnded && this.writableLength === chunk.byteLength) { + flushFlag = maxFlush(flushFlag, this._finishFlushFlag); + } + processChunk(this, chunk, flushFlag, cb); +}; + +ZlibBase.prototype._processChunk = function (chunk, flushFlag, cb) { + // _processChunk() is left for backwards compatibility + if (typeof cb === "function") processChunk(this, chunk, flushFlag, cb); + else return processChunkSync(this, chunk, flushFlag); +}; + +function processChunkSync(self, chunk, flushFlag) { + let availInBefore = chunk.byteLength; + let availOutBefore = self._chunkSize - self._outOffset; + let inOff = 0; + let availOutAfter; + let availInAfter; + + const buffers = []; + let nread = 0; + let inputRead = 0; + const state = self._writeState; + const handle = self._handle; + let buffer = self._outBuffer; + let offset = self._outOffset; + const chunkSize = self._chunkSize; + + let error; + self.on("error", function onError(er) { + error = er; + }); + + while (true) { + handle.writeSync( + flushFlag, + chunk, // in + inOff, // in_off + availInBefore, // in_len + buffer, // out + offset, // out_off + availOutBefore + ); // out_len + if (error) throw error; + else if (self[kError]) throw self[kError]; + + availOutAfter = state[0]; + availInAfter = state[1]; + + const inDelta = availInBefore - availInAfter; + inputRead += inDelta; + + const have = availOutBefore - availOutAfter; + if (have > 0) { + const out = buffer.slice(offset, offset + have); + offset += have; + buffers.push(out); + nread += out.byteLength; + + if (nread > self._maxOutputLength) { + _close(self); + throw new ERR_BUFFER_TOO_LARGE(self._maxOutputLength); + } + } else { + assert(have === 0, "have should not go down"); + } + + // Exhausted the output buffer, or used all the input create a new one. + if (availOutAfter === 0 || offset >= chunkSize) { + availOutBefore = chunkSize; + offset = 0; + buffer = Buffer.allocUnsafe(chunkSize); + } + + if (availOutAfter === 0) { + // Not actually done. Need to reprocess. + // Also, update the availInBefore to the availInAfter value, + // so that if we have to hit it a third (fourth, etc.) time, + // it'll have the correct byte counts. + inOff += inDelta; + availInBefore = availInAfter; + } else { + break; + } + } + + self.bytesWritten = inputRead; + _close(self); + + if (nread === 0) return Buffer.alloc(0); + + return buffers.length === 1 ? buffers[0] : Buffer.concat(buffers, nread); +} + +function processChunk(self, chunk, flushFlag, cb) { + const handle = self._handle; + if (!handle) return process.nextTick(cb); + + handle.buffer = chunk; + handle.cb = cb; + handle.availOutBefore = self._chunkSize - self._outOffset; + handle.availInBefore = chunk.byteLength; + handle.inOff = 0; + handle.flushFlag = flushFlag; + + handle.write( + flushFlag, + chunk, // in + 0, // in_off + handle.availInBefore, // in_len + self._outBuffer, // out + self._outOffset, // out_off + handle.availOutBefore + ); // out_len +} + +function processCallback() { + // This callback's context (`this`) is the `_handle` (ZCtx) object. It is + // important to null out the values once they are no longer needed since + // `_handle` can stay in memory long after the buffer is needed. + const handle = this; + const self = this[owner_symbol]; + const state = self._writeState; + + if (self.destroyed) { + this.buffer = null; + this.cb(); + return; + } + + const availOutAfter = state[0]; + const availInAfter = state[1]; + + const inDelta = handle.availInBefore - availInAfter; + self.bytesWritten += inDelta; + + const have = handle.availOutBefore - availOutAfter; + let streamBufferIsFull = false; + if (have > 0) { + const out = self._outBuffer.slice(self._outOffset, self._outOffset + have); + self._outOffset += have; + streamBufferIsFull = !self.push(out); + } else { + assert(have === 0, "have should not go down"); + } + + if (self.destroyed) { + this.cb(); + return; + } + + // Exhausted the output buffer, or used all the input create a new one. + if (availOutAfter === 0 || self._outOffset >= self._chunkSize) { + handle.availOutBefore = self._chunkSize; + self._outOffset = 0; + self._outBuffer = Buffer.allocUnsafe(self._chunkSize); + } + + if (availOutAfter === 0) { + // Not actually done. Need to reprocess. + // Also, update the availInBefore to the availInAfter value, + // so that if we have to hit it a third (fourth, etc.) time, + // it'll have the correct byte counts. + handle.inOff += inDelta; + handle.availInBefore = availInAfter; + + if (!streamBufferIsFull) { + this.write( + handle.flushFlag, + this.buffer, // in + handle.inOff, // in_off + handle.availInBefore, // in_len + self._outBuffer, // out + self._outOffset, // out_off + self._chunkSize + ); // out_len + } else { + const oldRead = self._read; + self._read = (n) => { + self._read = oldRead; + this.write( + handle.flushFlag, + this.buffer, // in + handle.inOff, // in_off + handle.availInBefore, // in_len + self._outBuffer, // out + self._outOffset, // out_off + self._chunkSize + ); // out_len + self._read(n); + }; + } + return; + } + + if (availInAfter > 0) { + // If we have more input that should be written, but we also have output + // space available, that means that the compression library was not + // interested in receiving more data, and in particular that the input + // stream has ended early. + // This applies to streams where we don't check data past the end of + // what was consumed; that is, everything except Gunzip/Unzip. + + if (self._rejectGarbageAfterEnd) { + const err = new ERR_TRAILING_JUNK_AFTER_STREAM_END(); + self.destroy(err); + this.cb(err); + return; + } + + self.push(null); + } + + // Finished with the chunk. + this.buffer = null; + this.cb(); +} + +/** + * @param {ZlibBase} engine + * @private + */ +function _close(engine) { + // Caller may invoke .close after a zlib error (which will null _handle) + engine._handle?.close(); + engine._handle = null; +} + +const zlibDefaultOpts = { + flush: Z_NO_FLUSH, + finishFlush: Z_FINISH, + fullFlush: Z_FULL_FLUSH, +}; +// Base class for all streams actually backed by zlib and using zlib-specific +// parameters. +function Zlib(opts, mode) { + let windowBits = Z_DEFAULT_WINDOWBITS; + let level = Z_DEFAULT_COMPRESSION; + let memLevel = Z_DEFAULT_MEMLEVEL; + let strategy = Z_DEFAULT_STRATEGY; + let dictionary; + + if (opts) { + // windowBits is special. On the compression side, 0 is an invalid value. + // But on the decompression side, a value of 0 for windowBits tells zlib + // to use the window size in the zlib header of the compressed stream. + if ( + (opts.windowBits == null || opts.windowBits === 0) && + (mode === INFLATE || mode === GUNZIP || mode === UNZIP) + ) { + windowBits = 0; + } else { + // `{ windowBits: 8 }` is valid for deflate but not gzip. + const min = Z_MIN_WINDOWBITS + (mode === GZIP ? 1 : 0); + windowBits = checkRangesOrGetDefault( + opts.windowBits, + "options.windowBits", + min, + Z_MAX_WINDOWBITS, + Z_DEFAULT_WINDOWBITS + ); + } + + level = checkRangesOrGetDefault( + opts.level, + "options.level", + Z_MIN_LEVEL, + Z_MAX_LEVEL, + Z_DEFAULT_COMPRESSION + ); + + memLevel = checkRangesOrGetDefault( + opts.memLevel, + "options.memLevel", + Z_MIN_MEMLEVEL, + Z_MAX_MEMLEVEL, + Z_DEFAULT_MEMLEVEL + ); + + strategy = checkRangesOrGetDefault( + opts.strategy, + "options.strategy", + Z_DEFAULT_STRATEGY, + Z_FIXED, + Z_DEFAULT_STRATEGY + ); + + dictionary = opts.dictionary; + if (dictionary !== undefined && !isArrayBufferView(dictionary)) { + if (isAnyArrayBuffer(dictionary)) { + dictionary = Buffer.from(dictionary); + } else { + throw new ERR_INVALID_ARG_TYPE( + "options.dictionary", + ["Buffer", "TypedArray", "DataView", "ArrayBuffer"], + dictionary + ); + } + } + } + + const handle = new binding.Zlib(mode); + // Ideally, we could let ZlibBase() set up _writeState. I haven't been able + // to come up with a good solution that doesn't break our internal API, + // and with it all supported npm versions at the time of writing. + this._writeState = new Uint32Array(2); + handle.init( + windowBits, + level, + memLevel, + strategy, + this._writeState, + processCallback, + dictionary + ); + + ReflectApply(ZlibBase, this, [opts, mode, handle, zlibDefaultOpts]); + + this._level = level; + this._strategy = strategy; + this._mode = mode; +} +ObjectSetPrototypeOf(Zlib.prototype, ZlibBase.prototype); +ObjectSetPrototypeOf(Zlib, ZlibBase); + +// This callback is used by `.params()` to wait until a full flush happened +// before adjusting the parameters. In particular, the call to the native +// `params()` function should not happen while a write is currently in progress +// on the threadpool. +function paramsAfterFlushCallback(level, strategy, callback) { + assert(this._handle, "zlib binding closed"); + this._handle.params(level, strategy); + if (!this.destroyed) { + this._level = level; + this._strategy = strategy; + if (callback) callback(); + } +} + +Zlib.prototype.params = function params(level, strategy, callback) { + checkRangesOrGetDefault(level, "level", Z_MIN_LEVEL, Z_MAX_LEVEL); + checkRangesOrGetDefault(strategy, "strategy", Z_DEFAULT_STRATEGY, Z_FIXED); + + if (this._level !== level || this._strategy !== strategy) { + this.flush( + Z_SYNC_FLUSH, + paramsAfterFlushCallback.bind(this, level, strategy, callback) + ); + } else { + process.nextTick(callback); + } +}; + +// generic zlib +// minimal 2-byte header +function Deflate(opts) { + if (!(this instanceof Deflate)) { + return deprecateInstantiation(Deflate, "DEP0184", opts); + } + ReflectApply(Zlib, this, [opts, DEFLATE]); +} +ObjectSetPrototypeOf(Deflate.prototype, Zlib.prototype); +ObjectSetPrototypeOf(Deflate, Zlib); + +function Inflate(opts) { + if (!(this instanceof Inflate)) { + return deprecateInstantiation(Inflate, "DEP0184", opts); + } + ReflectApply(Zlib, this, [opts, INFLATE]); +} +ObjectSetPrototypeOf(Inflate.prototype, Zlib.prototype); +ObjectSetPrototypeOf(Inflate, Zlib); + +function Gzip(opts) { + if (!(this instanceof Gzip)) { + return deprecateInstantiation(Gzip, "DEP0184", opts); + } + ReflectApply(Zlib, this, [opts, GZIP]); +} +ObjectSetPrototypeOf(Gzip.prototype, Zlib.prototype); +ObjectSetPrototypeOf(Gzip, Zlib); + +function Gunzip(opts) { + if (!(this instanceof Gunzip)) { + return deprecateInstantiation(Gunzip, "DEP0184", opts); + } + ReflectApply(Zlib, this, [opts, GUNZIP]); +} +ObjectSetPrototypeOf(Gunzip.prototype, Zlib.prototype); +ObjectSetPrototypeOf(Gunzip, Zlib); + +function DeflateRaw(opts) { + if (opts && opts.windowBits === 8) opts.windowBits = 9; + if (!(this instanceof DeflateRaw)) { + return deprecateInstantiation(DeflateRaw, "DEP0184", opts); + } + ReflectApply(Zlib, this, [opts, DEFLATERAW]); +} +ObjectSetPrototypeOf(DeflateRaw.prototype, Zlib.prototype); +ObjectSetPrototypeOf(DeflateRaw, Zlib); + +function InflateRaw(opts) { + if (!(this instanceof InflateRaw)) { + return deprecateInstantiation(InflateRaw, "DEP0184", opts); + } + ReflectApply(Zlib, this, [opts, INFLATERAW]); +} +ObjectSetPrototypeOf(InflateRaw.prototype, Zlib.prototype); +ObjectSetPrototypeOf(InflateRaw, Zlib); + +function Unzip(opts) { + if (!(this instanceof Unzip)) { + return deprecateInstantiation(Unzip, "DEP0184", opts); + } + ReflectApply(Zlib, this, [opts, UNZIP]); +} +ObjectSetPrototypeOf(Unzip.prototype, Zlib.prototype); +ObjectSetPrototypeOf(Unzip, Zlib); + +function createConvenienceMethod(ctor, sync) { + if (sync) { + return function syncBufferWrapper(buffer, opts) { + return zlibBufferSync(new ctor(opts), buffer); + }; + } + return function asyncBufferWrapper(buffer, opts, callback) { + if (typeof opts === "function") { + callback = opts; + opts = {}; + } + return zlibBuffer(new ctor(opts), buffer, callback); + }; +} + +const kMaxBrotliParam = MathMax( + ...ObjectEntries(constants).map(({ 0: key, 1: value }) => + key.startsWith("BROTLI_PARAM_") ? value : 0 + ) +); +const brotliInitParamsArray = new Uint32Array(kMaxBrotliParam + 1); + +const brotliDefaultOpts = { + flush: BROTLI_OPERATION_PROCESS, + finishFlush: BROTLI_OPERATION_FINISH, + fullFlush: BROTLI_OPERATION_FLUSH, +}; +function Brotli(opts, mode) { + assert(mode === BROTLI_DECODE || mode === BROTLI_ENCODE); + + brotliInitParamsArray.fill(-1); + if (opts?.params) { + ObjectKeys(opts.params).forEach((origKey) => { + const key = +origKey; + if ( + NumberIsNaN(key) || + key < 0 || + key > kMaxBrotliParam || + (brotliInitParamsArray[key] | 0) !== -1 + ) { + throw new ERR_BROTLI_INVALID_PARAM(origKey); + } + + const value = opts.params[origKey]; + if (typeof value !== "number" && typeof value !== "boolean") { + throw new ERR_INVALID_ARG_TYPE( + "options.params[key]", + "number", + opts.params[origKey] + ); + } + brotliInitParamsArray[key] = value; + }); + } + + const handle = + mode === BROTLI_DECODE + ? new binding.BrotliDecoder(mode) + : new binding.BrotliEncoder(mode); + + this._writeState = new Uint32Array(2); + handle.init(brotliInitParamsArray, this._writeState, processCallback); + + ReflectApply(ZlibBase, this, [opts, mode, handle, brotliDefaultOpts]); +} +ObjectSetPrototypeOf(Brotli.prototype, Zlib.prototype); +ObjectSetPrototypeOf(Brotli, Zlib); + +function BrotliCompress(opts) { + if (!(this instanceof BrotliCompress)) { + return deprecateInstantiation(BrotliCompress, "DEP0184", opts); + } + ReflectApply(Brotli, this, [opts, BROTLI_ENCODE]); +} +ObjectSetPrototypeOf(BrotliCompress.prototype, Brotli.prototype); +ObjectSetPrototypeOf(BrotliCompress, Brotli); + +function BrotliDecompress(opts) { + if (!(this instanceof BrotliDecompress)) { + return deprecateInstantiation(BrotliDecompress, "DEP0184", opts); + } + ReflectApply(Brotli, this, [opts, BROTLI_DECODE]); +} +ObjectSetPrototypeOf(BrotliDecompress.prototype, Brotli.prototype); +ObjectSetPrototypeOf(BrotliDecompress, Brotli); + +const zstdDefaultOpts = { + flush: ZSTD_e_continue, + finishFlush: ZSTD_e_end, + fullFlush: ZSTD_e_flush, +}; +class Zstd extends ZlibBase { + constructor(opts, mode, initParamsArray, maxParam) { + assert(mode === ZSTD_COMPRESS || mode === ZSTD_DECOMPRESS); + + initParamsArray.fill(-1); + if (opts?.params) { + ObjectKeys(opts.params).forEach((origKey) => { + const key = +origKey; + if ( + NumberIsNaN(key) || + key < 0 || + key > maxParam || + (initParamsArray[key] | 0) !== -1 + ) { + throw new ERR_ZSTD_INVALID_PARAM(origKey); + } + + const value = opts.params[origKey]; + if (typeof value !== "number" && typeof value !== "boolean") { + throw new ERR_INVALID_ARG_TYPE( + "options.params[key]", + "number", + opts.params[origKey] + ); + } + initParamsArray[key] = value; + }); + } + + const handle = + mode === ZSTD_COMPRESS + ? new binding.ZstdCompress() + : new binding.ZstdDecompress(); + + const pledgedSrcSize = opts?.pledgedSrcSize ?? undefined; + + const writeState = new Uint32Array(2); + + handle.init( + initParamsArray, + pledgedSrcSize, + writeState, + processCallback, + opts?.dictionary && isArrayBufferView(opts.dictionary) + ? opts.dictionary + : undefined + ); + + super(opts, mode, handle, zstdDefaultOpts); + this._writeState = writeState; + } +} + +const kMaxZstdCParam = MathMax( + ...ObjectKeys(constants).map((key) => + key.startsWith("ZSTD_c_") ? constants[key] : 0 + ) +); + +const zstdInitCParamsArray = new Uint32Array(kMaxZstdCParam + 1); + +class ZstdCompress extends Zstd { + constructor(opts) { + super(opts, ZSTD_COMPRESS, zstdInitCParamsArray, kMaxZstdCParam); + } +} + +const kMaxZstdDParam = MathMax( + ...ObjectKeys(constants).map((key) => + key.startsWith("ZSTD_d_") ? constants[key] : 0 + ) +); + +const zstdInitDParamsArray = new Uint32Array(kMaxZstdDParam + 1); + +class ZstdDecompress extends Zstd { + constructor(opts) { + super(opts, ZSTD_DECOMPRESS, zstdInitDParamsArray, kMaxZstdDParam); + } +} + +function createProperty(ctor) { + return { + __proto__: null, + configurable: true, + enumerable: true, + value: function (options) { + return new ctor(options); + }, + }; +} + +function crc32(data, value = 0) { + if (typeof data !== "string" && !isArrayBufferView(data)) { + throw new ERR_INVALID_ARG_TYPE( + "data", + ["Buffer", "TypedArray", "DataView", "string"], + data + ); + } + validateUint32(value, "value"); + return crc32Native(data, value); +} + +// Legacy alias on the C++ wrapper object. This is not public API, so we may +// want to runtime-deprecate it at some point. There's no hurry, though. +ObjectDefineProperty(binding.Zlib.prototype, "jsref", { + __proto__: null, + get() { + return this[owner_symbol]; + }, + set(v) { + return (this[owner_symbol] = v); + }, +}); + +module.exports = { + crc32, + Deflate, + Inflate, + Gzip, + Gunzip, + DeflateRaw, + InflateRaw, + Unzip, + BrotliCompress, + BrotliDecompress, + ZstdCompress, + ZstdDecompress, + + // Convenience methods. + // compress/decompress a string or buffer in one step. + deflate: createConvenienceMethod(Deflate, false), + deflateSync: createConvenienceMethod(Deflate, true), + gzip: createConvenienceMethod(Gzip, false), + gzipSync: createConvenienceMethod(Gzip, true), + deflateRaw: createConvenienceMethod(DeflateRaw, false), + deflateRawSync: createConvenienceMethod(DeflateRaw, true), + unzip: createConvenienceMethod(Unzip, false), + unzipSync: createConvenienceMethod(Unzip, true), + inflate: createConvenienceMethod(Inflate, false), + inflateSync: createConvenienceMethod(Inflate, true), + gunzip: createConvenienceMethod(Gunzip, false), + gunzipSync: createConvenienceMethod(Gunzip, true), + inflateRaw: createConvenienceMethod(InflateRaw, false), + inflateRawSync: createConvenienceMethod(InflateRaw, true), + brotliCompress: createConvenienceMethod(BrotliCompress, false), + brotliCompressSync: createConvenienceMethod(BrotliCompress, true), + brotliDecompress: createConvenienceMethod(BrotliDecompress, false), + brotliDecompressSync: createConvenienceMethod(BrotliDecompress, true), + zstdCompress: createConvenienceMethod(ZstdCompress, false), + zstdCompressSync: createConvenienceMethod(ZstdCompress, true), + zstdDecompress: createConvenienceMethod(ZstdDecompress, false), + zstdDecompressSync: createConvenienceMethod(ZstdDecompress, true), +}; + +ObjectDefineProperties(module.exports, { + createDeflate: createProperty(Deflate), + createInflate: createProperty(Inflate), + createDeflateRaw: createProperty(DeflateRaw), + createInflateRaw: createProperty(InflateRaw), + createGzip: createProperty(Gzip), + createGunzip: createProperty(Gunzip), + createUnzip: createProperty(Unzip), + createBrotliCompress: createProperty(BrotliCompress), + createBrotliDecompress: createProperty(BrotliDecompress), + createZstdCompress: createProperty(ZstdCompress), + createZstdDecompress: createProperty(ZstdDecompress), + constants: { + __proto__: null, + configurable: false, + enumerable: true, + value: constants, + }, + codes: { + __proto__: null, + enumerable: true, + writable: false, + value: ObjectFreeze(codes), + }, +}); + +// These should be considered deprecated +// expose all the zlib constants +for (const { 0: key, 1: value } of ObjectEntries(constants)) { + if (key.startsWith("BROTLI")) continue; + ObjectDefineProperty(module.exports, key, { + __proto__: null, + enumerable: false, + value, + writable: false, + }); +} diff --git a/package.json b/package.json index df3b9dc1..07b80049 100644 --- a/package.json +++ b/package.json @@ -1,27 +1,34 @@ { - "name": "movies", - "private": true, - "version": "0.0.0", - "type": "module", "scripts": { - "dev": "vite", - "build": "vite build", - "lint": "eslint .", - "preview": "vite preview" + "test": "jest", + "start": "node app.js" }, "dependencies": { - "react": "^19.0.0", - "react-dom": "^19.0.0" + "bcrypt": "^6.0.0", + "connect-sqlite3": "^0.9.16", + "cookie-parser": "^1.4.7", + "dotenv": "^17.2.3", + "express": "^5.1.0", + "express-session": "^1.18.2", + "global": "^2.0.2", + "http-errors": "^2.0.0", + "jsonwebtoken": "^9.0.2", + "localforage": "^1.10.0", + "match-sorter": "^8.1.0", + "mongoose": "^8.18.1", + "morgan": "^1.10.1", + "passport": "^0.7.0", + "passport-facebook": "^3.0.0", + "pluralize": "^8.0.0", + "python": "^0.0.4", + "react": "^19.1.1", + "react-router-dom": "^7.9.1", + "sort-by": "^0.0.2", + "state": "^0.2.0", + "zustand": "^5.0.8" }, "devDependencies": { - "@eslint/js": "^9.21.0", - "@types/react": "^19.0.10", - "@types/react-dom": "^19.0.4", - "@vitejs/plugin-react": "^4.3.4", - "eslint": "^9.21.0", - "eslint-plugin-react-hooks": "^5.1.0", - "eslint-plugin-react-refresh": "^0.4.19", - "globals": "^15.15.0", - "vite": "^6.2.0" + "jest": "^30.1.3", + "ts-node": "^10.9.2" } } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml new file mode 100644 index 00000000..a92f9bce --- /dev/null +++ b/pnpm-lock.yaml @@ -0,0 +1,4578 @@ +lockfileVersion: '6.0' + +settings: + autoInstallPeers: true + excludeLinksFromLockfile: false + +dependencies: + bcrypt: + specifier: ^6.0.0 + version: 6.0.0 + connect-sqlite3: + specifier: ^0.9.16 + version: 0.9.16 + cookie-parser: + specifier: ^1.4.7 + version: 1.4.7 + dotenv: + specifier: ^17.2.3 + version: 17.2.3 + express: + specifier: ^5.1.0 + version: 5.2.1 + express-session: + specifier: ^1.18.2 + version: 1.18.2 + global: + specifier: ^2.0.2 + version: 2.0.7 + http-errors: + specifier: ^2.0.0 + version: 2.0.1 + jsonwebtoken: + specifier: ^9.0.2 + version: 9.0.2 + localforage: + specifier: ^1.10.0 + version: 1.10.0 + match-sorter: + specifier: ^8.1.0 + version: 8.1.0 + mongoose: + specifier: ^8.18.1 + version: 8.19.1 + morgan: + specifier: ^1.10.1 + version: 1.10.1 + passport: + specifier: ^0.7.0 + version: 0.7.0 + passport-facebook: + specifier: ^3.0.0 + version: 3.0.0 + pluralize: + specifier: ^8.0.0 + version: 8.0.0 + python: + specifier: ^0.0.4 + version: 0.0.4 + react: + specifier: ^19.1.1 + version: 19.2.0 + react-router-dom: + specifier: ^7.9.1 + version: 7.9.3(react-dom@19.1.0)(react@19.2.0) + sort-by: + specifier: ^0.0.2 + version: 0.0.2 + state: + specifier: ^0.2.0 + version: 0.2.0 + zustand: + specifier: ^5.0.8 + version: 5.0.8(react@19.2.0) + +devDependencies: + jest: + specifier: ^30.1.3 + version: 30.2.0(@types/node@24.7.0)(ts-node@10.9.2) + ts-node: + specifier: ^10.9.2 + version: 10.9.2(@types/node@24.7.0)(typescript@5.9.3) + +packages: + + /@ampproject/remapping@2.3.0: + resolution: {integrity: sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==} + engines: {node: '>=6.0.0'} + dependencies: + '@jridgewell/gen-mapping': 0.3.8 + '@jridgewell/trace-mapping': 0.3.31 + dev: true + + /@babel/code-frame@7.27.1: + resolution: {integrity: sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/helper-validator-identifier': 7.27.1 + js-tokens: 4.0.0 + picocolors: 1.1.1 + dev: true + + /@babel/compat-data@7.27.2: + resolution: {integrity: sha512-TUtMJYRPyUb/9aU8f3K0mjmjf6M9N5Woshn2CS6nqJSeJtTtQcpLUXjGt9vbF8ZGff0El99sWkLgzwW3VXnxZQ==} + engines: {node: '>=6.9.0'} + dev: true + + /@babel/core@7.27.1: + resolution: {integrity: sha512-IaaGWsQqfsQWVLqMn9OB92MNN7zukfVA4s7KKAI0KfrrDsZ0yhi5uV4baBuLuN7n3vsZpwP8asPPcVwApxvjBQ==} + engines: {node: '>=6.9.0'} + dependencies: + '@ampproject/remapping': 2.3.0 + '@babel/code-frame': 7.27.1 + '@babel/generator': 7.27.1 + '@babel/helper-compilation-targets': 7.27.2 + '@babel/helper-module-transforms': 7.27.1(@babel/core@7.27.1) + '@babel/helpers': 7.27.1 + '@babel/parser': 7.27.2 + '@babel/template': 7.27.2 + '@babel/traverse': 7.27.1 + '@babel/types': 7.27.1 + convert-source-map: 2.0.0 + debug: 4.4.0 + gensync: 1.0.0-beta.2 + json5: 2.2.3 + semver: 6.3.1 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/core@7.28.4: + resolution: {integrity: sha512-2BCOP7TN8M+gVDj7/ht3hsaO/B/n5oDbiAyyvnRlNOs+u1o+JWNYTQrmpuNp1/Wq2gcFrI01JAW+paEKDMx/CA==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/code-frame': 7.27.1 + '@babel/generator': 7.28.3 + '@babel/helper-compilation-targets': 7.27.2 + '@babel/helper-module-transforms': 7.28.3(@babel/core@7.28.4) + '@babel/helpers': 7.28.4 + '@babel/parser': 7.28.4 + '@babel/template': 7.27.2 + '@babel/traverse': 7.28.4 + '@babel/types': 7.28.4 + '@jridgewell/remapping': 2.3.5 + convert-source-map: 2.0.0 + debug: 4.4.0 + gensync: 1.0.0-beta.2 + json5: 2.2.3 + semver: 6.3.1 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/generator@7.27.1: + resolution: {integrity: sha512-UnJfnIpc/+JO0/+KRVQNGU+y5taA5vCbwN8+azkX6beii/ZF+enZJSOKo11ZSzGJjlNfJHfQtmQT8H+9TXPG2w==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/parser': 7.27.2 + '@babel/types': 7.27.1 + '@jridgewell/gen-mapping': 0.3.8 + '@jridgewell/trace-mapping': 0.3.31 + jsesc: 3.1.0 + dev: true + + /@babel/generator@7.28.3: + resolution: {integrity: sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/parser': 7.28.4 + '@babel/types': 7.28.4 + '@jridgewell/gen-mapping': 0.3.13 + '@jridgewell/trace-mapping': 0.3.31 + jsesc: 3.1.0 + dev: true + + /@babel/helper-compilation-targets@7.27.2: + resolution: {integrity: sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/compat-data': 7.27.2 + '@babel/helper-validator-option': 7.27.1 + browserslist: 4.24.5 + lru-cache: 5.1.1 + semver: 6.3.1 + dev: true + + /@babel/helper-globals@7.28.0: + resolution: {integrity: sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==} + engines: {node: '>=6.9.0'} + dev: true + + /@babel/helper-module-imports@7.27.1: + resolution: {integrity: sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/traverse': 7.28.4 + '@babel/types': 7.28.4 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/helper-module-transforms@7.27.1(@babel/core@7.27.1): + resolution: {integrity: sha512-9yHn519/8KvTU5BjTVEEeIM3w9/2yXNKoD82JifINImhpKkARMJKPP59kLo+BafpdN5zgNeIcS4jsGDmd3l58g==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + dependencies: + '@babel/core': 7.27.1 + '@babel/helper-module-imports': 7.27.1 + '@babel/helper-validator-identifier': 7.27.1 + '@babel/traverse': 7.27.1 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/helper-module-transforms@7.28.3(@babel/core@7.28.4): + resolution: {integrity: sha512-gytXUbs8k2sXS9PnQptz5o0QnpLL51SwASIORY6XaBKF88nsOT0Zw9szLqlSGQDP/4TljBAD5y98p2U1fqkdsw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + dependencies: + '@babel/core': 7.28.4 + '@babel/helper-module-imports': 7.27.1 + '@babel/helper-validator-identifier': 7.27.1 + '@babel/traverse': 7.28.4 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/helper-plugin-utils@7.27.1: + resolution: {integrity: sha512-1gn1Up5YXka3YYAHGKpbideQ5Yjf1tDa9qYcgysz+cNCXukyLl6DjPXhD3VRwSb8c0J9tA4b2+rHEZtc6R0tlw==} + engines: {node: '>=6.9.0'} + dev: true + + /@babel/helper-string-parser@7.27.1: + resolution: {integrity: sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==} + engines: {node: '>=6.9.0'} + dev: true + + /@babel/helper-validator-identifier@7.27.1: + resolution: {integrity: sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==} + engines: {node: '>=6.9.0'} + dev: true + + /@babel/helper-validator-option@7.27.1: + resolution: {integrity: sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==} + engines: {node: '>=6.9.0'} + dev: true + + /@babel/helpers@7.27.1: + resolution: {integrity: sha512-FCvFTm0sWV8Fxhpp2McP5/W53GPllQ9QeQ7SiqGWjMf/LVG07lFa5+pgK05IRhVwtvafT22KF+ZSnM9I545CvQ==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/template': 7.27.2 + '@babel/types': 7.27.1 + dev: true + + /@babel/helpers@7.28.4: + resolution: {integrity: sha512-HFN59MmQXGHVyYadKLVumYsA9dBFun/ldYxipEjzA4196jpLZd8UjEEBLkbEkvfYreDqJhZxYAWFPtrfhNpj4w==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/template': 7.27.2 + '@babel/types': 7.28.4 + dev: true + + /@babel/parser@7.27.2: + resolution: {integrity: sha512-QYLs8299NA7WM/bZAdp+CviYYkVoYXlDW2rzliy3chxd1PQjej7JORuMJDJXJUb9g0TT+B99EwaVLKmX+sPXWw==} + engines: {node: '>=6.0.0'} + hasBin: true + dependencies: + '@babel/types': 7.27.1 + dev: true + + /@babel/parser@7.28.4: + resolution: {integrity: sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==} + engines: {node: '>=6.0.0'} + hasBin: true + dependencies: + '@babel/types': 7.28.4 + dev: true + + /@babel/plugin-syntax-async-generators@7.8.4(@babel/core@7.28.4): + resolution: {integrity: sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.28.4 + '@babel/helper-plugin-utils': 7.27.1 + dev: true + + /@babel/plugin-syntax-bigint@7.8.3(@babel/core@7.28.4): + resolution: {integrity: sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.28.4 + '@babel/helper-plugin-utils': 7.27.1 + dev: true + + /@babel/plugin-syntax-class-properties@7.12.13(@babel/core@7.28.4): + resolution: {integrity: sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.28.4 + '@babel/helper-plugin-utils': 7.27.1 + dev: true + + /@babel/plugin-syntax-class-static-block@7.14.5(@babel/core@7.28.4): + resolution: {integrity: sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.28.4 + '@babel/helper-plugin-utils': 7.27.1 + dev: true + + /@babel/plugin-syntax-import-attributes@7.27.1(@babel/core@7.28.4): + resolution: {integrity: sha512-oFT0FrKHgF53f4vOsZGi2Hh3I35PfSmVs4IBFLFj4dnafP+hIWDLg3VyKmUHfLoLHlyxY4C7DGtmHuJgn+IGww==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.28.4 + '@babel/helper-plugin-utils': 7.27.1 + dev: true + + /@babel/plugin-syntax-import-meta@7.10.4(@babel/core@7.28.4): + resolution: {integrity: sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.28.4 + '@babel/helper-plugin-utils': 7.27.1 + dev: true + + /@babel/plugin-syntax-json-strings@7.8.3(@babel/core@7.28.4): + resolution: {integrity: sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.28.4 + '@babel/helper-plugin-utils': 7.27.1 + dev: true + + /@babel/plugin-syntax-jsx@7.27.1(@babel/core@7.28.4): + resolution: {integrity: sha512-y8YTNIeKoyhGd9O0Jiyzyyqk8gdjnumGTQPsz0xOZOQ2RmkVJeZ1vmmfIvFEKqucBG6axJGBZDE/7iI5suUI/w==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.28.4 + '@babel/helper-plugin-utils': 7.27.1 + dev: true + + /@babel/plugin-syntax-logical-assignment-operators@7.10.4(@babel/core@7.28.4): + resolution: {integrity: sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.28.4 + '@babel/helper-plugin-utils': 7.27.1 + dev: true + + /@babel/plugin-syntax-nullish-coalescing-operator@7.8.3(@babel/core@7.28.4): + resolution: {integrity: sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.28.4 + '@babel/helper-plugin-utils': 7.27.1 + dev: true + + /@babel/plugin-syntax-numeric-separator@7.10.4(@babel/core@7.28.4): + resolution: {integrity: sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.28.4 + '@babel/helper-plugin-utils': 7.27.1 + dev: true + + /@babel/plugin-syntax-object-rest-spread@7.8.3(@babel/core@7.28.4): + resolution: {integrity: sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.28.4 + '@babel/helper-plugin-utils': 7.27.1 + dev: true + + /@babel/plugin-syntax-optional-catch-binding@7.8.3(@babel/core@7.28.4): + resolution: {integrity: sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.28.4 + '@babel/helper-plugin-utils': 7.27.1 + dev: true + + /@babel/plugin-syntax-optional-chaining@7.8.3(@babel/core@7.28.4): + resolution: {integrity: sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.28.4 + '@babel/helper-plugin-utils': 7.27.1 + dev: true + + /@babel/plugin-syntax-private-property-in-object@7.14.5(@babel/core@7.28.4): + resolution: {integrity: sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.28.4 + '@babel/helper-plugin-utils': 7.27.1 + dev: true + + /@babel/plugin-syntax-top-level-await@7.14.5(@babel/core@7.28.4): + resolution: {integrity: sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.28.4 + '@babel/helper-plugin-utils': 7.27.1 + dev: true + + /@babel/plugin-syntax-typescript@7.27.1(@babel/core@7.28.4): + resolution: {integrity: sha512-xfYCBMxveHrRMnAWl1ZlPXOZjzkN82THFvLhQhFXFt81Z5HnN+EtUkZhv/zcKpmT3fzmWZB0ywiBrbC3vogbwQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.28.4 + '@babel/helper-plugin-utils': 7.27.1 + dev: true + + /@babel/runtime@7.27.6: + resolution: {integrity: sha512-vbavdySgbTTrmFE+EsiqUTzlOr5bzlnJtUv9PynGCAKvfQqjIXbvFdumPM/GxMDfyuGMJaJAU6TO4zc1Jf1i8Q==} + engines: {node: '>=6.9.0'} + dev: false + + /@babel/template@7.27.2: + resolution: {integrity: sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/code-frame': 7.27.1 + '@babel/parser': 7.28.4 + '@babel/types': 7.28.4 + dev: true + + /@babel/traverse@7.27.1: + resolution: {integrity: sha512-ZCYtZciz1IWJB4U61UPu4KEaqyfj+r5T1Q5mqPo+IBpcG9kHv30Z0aD8LXPgC1trYa6rK0orRyAhqUgk4MjmEg==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/code-frame': 7.27.1 + '@babel/generator': 7.27.1 + '@babel/parser': 7.27.2 + '@babel/template': 7.27.2 + '@babel/types': 7.27.1 + debug: 4.4.0 + globals: 11.12.0 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/traverse@7.28.4: + resolution: {integrity: sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/code-frame': 7.27.1 + '@babel/generator': 7.28.3 + '@babel/helper-globals': 7.28.0 + '@babel/parser': 7.28.4 + '@babel/template': 7.27.2 + '@babel/types': 7.28.4 + debug: 4.4.0 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/types@7.27.1: + resolution: {integrity: sha512-+EzkxvLNfiUeKMgy/3luqfsCWFRXLb7U6wNQTk60tovuckwB15B191tJWvpp4HjiQWdJkCxO3Wbvc6jlk3Xb2Q==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/helper-string-parser': 7.27.1 + '@babel/helper-validator-identifier': 7.27.1 + dev: true + + /@babel/types@7.28.4: + resolution: {integrity: sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/helper-string-parser': 7.27.1 + '@babel/helper-validator-identifier': 7.27.1 + dev: true + + /@bcoe/v8-coverage@0.2.3: + resolution: {integrity: sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==} + dev: true + + /@cspotcode/source-map-support@0.8.1: + resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==} + engines: {node: '>=12'} + dependencies: + '@jridgewell/trace-mapping': 0.3.9 + dev: true + + /@emnapi/core@1.5.0: + resolution: {integrity: sha512-sbP8GzB1WDzacS8fgNPpHlp6C9VZe+SJP3F90W9rLemaQj2PzIuTEl1qDOYQf58YIpyjViI24y9aPWCjEzY2cg==} + requiresBuild: true + dependencies: + '@emnapi/wasi-threads': 1.1.0 + tslib: 2.6.2 + dev: true + optional: true + + /@emnapi/runtime@1.5.0: + resolution: {integrity: sha512-97/BJ3iXHww3djw6hYIfErCZFee7qCtrneuLa20UXFCOTCfBM2cvQHjWJ2EG0s0MtdNwInarqCTz35i4wWXHsQ==} + requiresBuild: true + dependencies: + tslib: 2.6.2 + dev: true + optional: true + + /@emnapi/wasi-threads@1.1.0: + resolution: {integrity: sha512-WI0DdZ8xFSbgMjR1sFsKABJ/C5OnRrjT06JXbZKexJGrDuPTzZdDYfFlsgcCXCyf+suG5QU2e/y1Wo2V/OapLQ==} + requiresBuild: true + dependencies: + tslib: 2.6.2 + dev: true + optional: true + + /@gar/promisify@1.1.3: + resolution: {integrity: sha512-k2Ty1JcVojjJFwrg/ThKi2ujJ7XNLYaFGNB/bWT9wGR+oSMJHMa5w+CUq6p/pVrKeNNgA7pCqEcjSnHVoqJQFw==} + requiresBuild: true + dev: false + optional: true + + /@isaacs/cliui@8.0.2: + resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==} + engines: {node: '>=12'} + dependencies: + string-width: 5.1.2 + string-width-cjs: /string-width@4.2.3 + strip-ansi: 7.1.2 + strip-ansi-cjs: /strip-ansi@6.0.1 + wrap-ansi: 8.1.0 + wrap-ansi-cjs: /wrap-ansi@7.0.0 + dev: true + + /@istanbuljs/load-nyc-config@1.1.0: + resolution: {integrity: sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==} + engines: {node: '>=8'} + dependencies: + camelcase: 5.3.1 + find-up: 4.1.0 + get-package-type: 0.1.0 + js-yaml: 3.14.1 + resolve-from: 5.0.0 + dev: true + + /@istanbuljs/schema@0.1.3: + resolution: {integrity: sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==} + engines: {node: '>=8'} + dev: true + + /@jest/console@30.2.0: + resolution: {integrity: sha512-+O1ifRjkvYIkBqASKWgLxrpEhQAAE7hY77ALLUufSk5717KfOShg6IbqLmdsLMPdUiFvA2kTs0R7YZy+l0IzZQ==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dependencies: + '@jest/types': 30.2.0 + '@types/node': 24.7.0 + chalk: 4.1.2 + jest-message-util: 30.2.0 + jest-util: 30.2.0 + slash: 3.0.0 + dev: true + + /@jest/core@30.2.0(ts-node@10.9.2): + resolution: {integrity: sha512-03W6IhuhjqTlpzh/ojut/pDB2LPRygyWX8ExpgHtQA8H/3K7+1vKmcINx5UzeOX1se6YEsBsOHQ1CRzf3fOwTQ==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + peerDependencies: + node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 + peerDependenciesMeta: + node-notifier: + optional: true + dependencies: + '@jest/console': 30.2.0 + '@jest/pattern': 30.0.1 + '@jest/reporters': 30.2.0 + '@jest/test-result': 30.2.0 + '@jest/transform': 30.2.0 + '@jest/types': 30.2.0 + '@types/node': 24.7.0 + ansi-escapes: 4.3.2 + chalk: 4.1.2 + ci-info: 4.3.1 + exit-x: 0.2.2 + graceful-fs: 4.2.11 + jest-changed-files: 30.2.0 + jest-config: 30.2.0(@types/node@24.7.0)(ts-node@10.9.2) + jest-haste-map: 30.2.0 + jest-message-util: 30.2.0 + jest-regex-util: 30.0.1 + jest-resolve: 30.2.0 + jest-resolve-dependencies: 30.2.0 + jest-runner: 30.2.0 + jest-runtime: 30.2.0 + jest-snapshot: 30.2.0 + jest-util: 30.2.0 + jest-validate: 30.2.0 + jest-watcher: 30.2.0 + micromatch: 4.0.8 + pretty-format: 30.2.0 + slash: 3.0.0 + transitivePeerDependencies: + - babel-plugin-macros + - esbuild-register + - supports-color + - ts-node + dev: true + + /@jest/diff-sequences@30.0.1: + resolution: {integrity: sha512-n5H8QLDJ47QqbCNn5SuFjCRDrOLEZ0h8vAHCK5RL9Ls7Xa8AQLa/YxAc9UjFqoEDM48muwtBGjtMY5cr0PLDCw==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dev: true + + /@jest/environment@30.2.0: + resolution: {integrity: sha512-/QPTL7OBJQ5ac09UDRa3EQes4gt1FTEG/8jZ/4v5IVzx+Cv7dLxlVIvfvSVRiiX2drWyXeBjkMSR8hvOWSog5g==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dependencies: + '@jest/fake-timers': 30.2.0 + '@jest/types': 30.2.0 + '@types/node': 24.7.0 + jest-mock: 30.2.0 + dev: true + + /@jest/expect-utils@30.2.0: + resolution: {integrity: sha512-1JnRfhqpD8HGpOmQp180Fo9Zt69zNtC+9lR+kT7NVL05tNXIi+QC8Csz7lfidMoVLPD3FnOtcmp0CEFnxExGEA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dependencies: + '@jest/get-type': 30.1.0 + dev: true + + /@jest/expect@30.2.0: + resolution: {integrity: sha512-V9yxQK5erfzx99Sf+7LbhBwNWEZ9eZay8qQ9+JSC0TrMR1pMDHLMY+BnVPacWU6Jamrh252/IKo4F1Xn/zfiqA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dependencies: + expect: 30.2.0 + jest-snapshot: 30.2.0 + transitivePeerDependencies: + - supports-color + dev: true + + /@jest/fake-timers@30.2.0: + resolution: {integrity: sha512-HI3tRLjRxAbBy0VO8dqqm7Hb2mIa8d5bg/NJkyQcOk7V118ObQML8RC5luTF/Zsg4474a+gDvhce7eTnP4GhYw==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dependencies: + '@jest/types': 30.2.0 + '@sinonjs/fake-timers': 13.0.5 + '@types/node': 24.7.0 + jest-message-util: 30.2.0 + jest-mock: 30.2.0 + jest-util: 30.2.0 + dev: true + + /@jest/get-type@30.1.0: + resolution: {integrity: sha512-eMbZE2hUnx1WV0pmURZY9XoXPkUYjpc55mb0CrhtdWLtzMQPFvu/rZkTLZFTsdaVQa+Tr4eWAteqcUzoawq/uA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dev: true + + /@jest/globals@30.2.0: + resolution: {integrity: sha512-b63wmnKPaK+6ZZfpYhz9K61oybvbI1aMcIs80++JI1O1rR1vaxHUCNqo3ITu6NU0d4V34yZFoHMn/uoKr/Rwfw==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dependencies: + '@jest/environment': 30.2.0 + '@jest/expect': 30.2.0 + '@jest/types': 30.2.0 + jest-mock: 30.2.0 + transitivePeerDependencies: + - supports-color + dev: true + + /@jest/pattern@30.0.1: + resolution: {integrity: sha512-gWp7NfQW27LaBQz3TITS8L7ZCQ0TLvtmI//4OwlQRx4rnWxcPNIYjxZpDcN4+UlGxgm3jS5QPz8IPTCkb59wZA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dependencies: + '@types/node': 24.7.0 + jest-regex-util: 30.0.1 + dev: true + + /@jest/reporters@30.2.0: + resolution: {integrity: sha512-DRyW6baWPqKMa9CzeiBjHwjd8XeAyco2Vt8XbcLFjiwCOEKOvy82GJ8QQnJE9ofsxCMPjH4MfH8fCWIHHDKpAQ==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + peerDependencies: + node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 + peerDependenciesMeta: + node-notifier: + optional: true + dependencies: + '@bcoe/v8-coverage': 0.2.3 + '@jest/console': 30.2.0 + '@jest/test-result': 30.2.0 + '@jest/transform': 30.2.0 + '@jest/types': 30.2.0 + '@jridgewell/trace-mapping': 0.3.31 + '@types/node': 24.7.0 + chalk: 4.1.2 + collect-v8-coverage: 1.0.2 + exit-x: 0.2.2 + glob: 10.4.5 + graceful-fs: 4.2.11 + istanbul-lib-coverage: 3.2.2 + istanbul-lib-instrument: 6.0.3 + istanbul-lib-report: 3.0.1 + istanbul-lib-source-maps: 5.0.6 + istanbul-reports: 3.2.0 + jest-message-util: 30.2.0 + jest-util: 30.2.0 + jest-worker: 30.2.0 + slash: 3.0.0 + string-length: 4.0.2 + v8-to-istanbul: 9.3.0 + transitivePeerDependencies: + - supports-color + dev: true + + /@jest/schemas@30.0.5: + resolution: {integrity: sha512-DmdYgtezMkh3cpU8/1uyXakv3tJRcmcXxBOcO0tbaozPwpmh4YMsnWrQm9ZmZMfa5ocbxzbFk6O4bDPEc/iAnA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dependencies: + '@sinclair/typebox': 0.34.41 + dev: true + + /@jest/snapshot-utils@30.2.0: + resolution: {integrity: sha512-0aVxM3RH6DaiLcjj/b0KrIBZhSX1373Xci4l3cW5xiUWPctZ59zQ7jj4rqcJQ/Z8JuN/4wX3FpJSa3RssVvCug==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dependencies: + '@jest/types': 30.2.0 + chalk: 4.1.2 + graceful-fs: 4.2.11 + natural-compare: 1.4.0 + dev: true + + /@jest/source-map@30.0.1: + resolution: {integrity: sha512-MIRWMUUR3sdbP36oyNyhbThLHyJ2eEDClPCiHVbrYAe5g3CHRArIVpBw7cdSB5fr+ofSfIb2Tnsw8iEHL0PYQg==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dependencies: + '@jridgewell/trace-mapping': 0.3.31 + callsites: 3.1.0 + graceful-fs: 4.2.11 + dev: true + + /@jest/test-result@30.2.0: + resolution: {integrity: sha512-RF+Z+0CCHkARz5HT9mcQCBulb1wgCP3FBvl9VFokMX27acKphwyQsNuWH3c+ojd1LeWBLoTYoxF0zm6S/66mjg==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dependencies: + '@jest/console': 30.2.0 + '@jest/types': 30.2.0 + '@types/istanbul-lib-coverage': 2.0.6 + collect-v8-coverage: 1.0.2 + dev: true + + /@jest/test-sequencer@30.2.0: + resolution: {integrity: sha512-wXKgU/lk8fKXMu/l5Hog1R61bL4q5GCdT6OJvdAFz1P+QrpoFuLU68eoKuVc4RbrTtNnTL5FByhWdLgOPSph+Q==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dependencies: + '@jest/test-result': 30.2.0 + graceful-fs: 4.2.11 + jest-haste-map: 30.2.0 + slash: 3.0.0 + dev: true + + /@jest/transform@30.2.0: + resolution: {integrity: sha512-XsauDV82o5qXbhalKxD7p4TZYYdwcaEXC77PPD2HixEFF+6YGppjrAAQurTl2ECWcEomHBMMNS9AH3kcCFx8jA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dependencies: + '@babel/core': 7.28.4 + '@jest/types': 30.2.0 + '@jridgewell/trace-mapping': 0.3.31 + babel-plugin-istanbul: 7.0.1 + chalk: 4.1.2 + convert-source-map: 2.0.0 + fast-json-stable-stringify: 2.1.0 + graceful-fs: 4.2.11 + jest-haste-map: 30.2.0 + jest-regex-util: 30.0.1 + jest-util: 30.2.0 + micromatch: 4.0.8 + pirates: 4.0.7 + slash: 3.0.0 + write-file-atomic: 5.0.1 + transitivePeerDependencies: + - supports-color + dev: true + + /@jest/types@30.2.0: + resolution: {integrity: sha512-H9xg1/sfVvyfU7o3zMfBEjQ1gcsdeTMgqHoYdN79tuLqfTtuu7WckRA1R5whDwOzxaZAeMKTYWqP+WCAi0CHsg==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dependencies: + '@jest/pattern': 30.0.1 + '@jest/schemas': 30.0.5 + '@types/istanbul-lib-coverage': 2.0.6 + '@types/istanbul-reports': 3.0.4 + '@types/node': 24.7.0 + '@types/yargs': 17.0.33 + chalk: 4.1.2 + dev: true + + /@jridgewell/gen-mapping@0.3.13: + resolution: {integrity: sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==} + dependencies: + '@jridgewell/sourcemap-codec': 1.5.0 + '@jridgewell/trace-mapping': 0.3.31 + dev: true + + /@jridgewell/gen-mapping@0.3.8: + resolution: {integrity: sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA==} + engines: {node: '>=6.0.0'} + dependencies: + '@jridgewell/set-array': 1.2.1 + '@jridgewell/sourcemap-codec': 1.5.0 + '@jridgewell/trace-mapping': 0.3.31 + dev: true + + /@jridgewell/remapping@2.3.5: + resolution: {integrity: sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==} + dependencies: + '@jridgewell/gen-mapping': 0.3.8 + '@jridgewell/trace-mapping': 0.3.31 + dev: true + + /@jridgewell/resolve-uri@3.1.2: + resolution: {integrity: sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==} + engines: {node: '>=6.0.0'} + dev: true + + /@jridgewell/set-array@1.2.1: + resolution: {integrity: sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==} + engines: {node: '>=6.0.0'} + dev: true + + /@jridgewell/sourcemap-codec@1.5.0: + resolution: {integrity: sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==} + dev: true + + /@jridgewell/trace-mapping@0.3.31: + resolution: {integrity: sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==} + dependencies: + '@jridgewell/resolve-uri': 3.1.2 + '@jridgewell/sourcemap-codec': 1.5.0 + dev: true + + /@jridgewell/trace-mapping@0.3.9: + resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==} + dependencies: + '@jridgewell/resolve-uri': 3.1.2 + '@jridgewell/sourcemap-codec': 1.5.0 + dev: true + + /@mongodb-js/saslprep@1.3.1: + resolution: {integrity: sha512-6nZrq5kfAz0POWyhljnbWQQJQ5uT8oE2ddX303q1uY0tWsivWKgBDXBBvuFPwOqRRalXJuVO9EjOdVtuhLX0zg==} + dependencies: + sparse-bitfield: 3.0.3 + dev: false + + /@napi-rs/wasm-runtime@0.2.12: + resolution: {integrity: sha512-ZVWUcfwY4E/yPitQJl481FjFo3K22D6qF0DuFH6Y/nbnE11GY5uguDxZMGXPQ8WQ0128MXQD7TnfHyK4oWoIJQ==} + requiresBuild: true + dependencies: + '@emnapi/core': 1.5.0 + '@emnapi/runtime': 1.5.0 + '@tybys/wasm-util': 0.10.1 + dev: true + optional: true + + /@npmcli/fs@1.1.1: + resolution: {integrity: sha512-8KG5RD0GVP4ydEzRn/I4BNDuxDtqVbOdm8675T49OIG/NGhaK0pjPX7ZcDlvKYbA+ulvVK3ztfcF4uBdOxuJbQ==} + requiresBuild: true + dependencies: + '@gar/promisify': 1.1.3 + semver: 7.7.2 + dev: false + optional: true + + /@npmcli/move-file@1.1.2: + resolution: {integrity: sha512-1SUf/Cg2GzGDyaf15aR9St9TWlb+XvbZXWpDx8YKs7MLzMH/BCeopv+y9vzrzgkfykCGuWOlSu3mZhj2+FQcrg==} + engines: {node: '>=10'} + deprecated: This functionality has been moved to @npmcli/fs + requiresBuild: true + dependencies: + mkdirp: 1.0.4 + rimraf: 3.0.2 + dev: false + optional: true + + /@pkgjs/parseargs@0.11.0: + resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==} + engines: {node: '>=14'} + requiresBuild: true + dev: true + optional: true + + /@pkgr/core@0.2.9: + resolution: {integrity: sha512-QNqXyfVS2wm9hweSYD2O7F0G06uurj9kZ96TRQE5Y9hU7+tgdZwIkbAKc5Ocy1HxEY2kuDQa6cQ1WRs/O5LFKA==} + engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0} + dev: true + + /@sinclair/typebox@0.34.41: + resolution: {integrity: sha512-6gS8pZzSXdyRHTIqoqSVknxolr1kzfy4/CeDnrzsVz8TTIWUbOBr6gnzOmTYJ3eXQNh4IYHIGi5aIL7sOZ2G/g==} + dev: true + + /@sinonjs/commons@3.0.1: + resolution: {integrity: sha512-K3mCHKQ9sVh8o1C9cxkwxaOmXoAMlDxC1mYyHrjqOWEcBjYr76t96zL2zlj5dUGZ3HSw240X1qgH3Mjf1yJWpQ==} + dependencies: + type-detect: 4.0.8 + dev: true + + /@sinonjs/fake-timers@13.0.5: + resolution: {integrity: sha512-36/hTbH2uaWuGVERyC6da9YwGWnzUZXuPro/F2LfsdOsLnCojz/iSH8MxUt/FD2S5XBSVPhmArFUXcpCQ2Hkiw==} + dependencies: + '@sinonjs/commons': 3.0.1 + dev: true + + /@tootallnate/once@1.1.2: + resolution: {integrity: sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==} + engines: {node: '>= 6'} + requiresBuild: true + dev: false + optional: true + + /@tsconfig/node10@1.0.12: + resolution: {integrity: sha512-UCYBaeFvM11aU2y3YPZ//O5Rhj+xKyzy7mvcIoAjASbigy8mHMryP5cK7dgjlz2hWxh1g5pLw084E0a/wlUSFQ==} + dev: true + + /@tsconfig/node12@1.0.11: + resolution: {integrity: sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==} + dev: true + + /@tsconfig/node14@1.0.3: + resolution: {integrity: sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==} + dev: true + + /@tsconfig/node16@1.0.4: + resolution: {integrity: sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==} + dev: true + + /@tybys/wasm-util@0.10.1: + resolution: {integrity: sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg==} + requiresBuild: true + dependencies: + tslib: 2.6.2 + dev: true + optional: true + + /@types/babel__core@7.20.5: + resolution: {integrity: sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==} + dependencies: + '@babel/parser': 7.27.2 + '@babel/types': 7.27.1 + '@types/babel__generator': 7.27.0 + '@types/babel__template': 7.4.4 + '@types/babel__traverse': 7.20.7 + dev: true + + /@types/babel__generator@7.27.0: + resolution: {integrity: sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg==} + dependencies: + '@babel/types': 7.27.1 + dev: true + + /@types/babel__template@7.4.4: + resolution: {integrity: sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==} + dependencies: + '@babel/parser': 7.27.2 + '@babel/types': 7.27.1 + dev: true + + /@types/babel__traverse@7.20.7: + resolution: {integrity: sha512-dkO5fhS7+/oos4ciWxyEyjWe48zmG6wbCheo/G2ZnHx4fs3EU6YC6UM8rk56gAjNJ9P3MTH2jo5jb92/K6wbng==} + dependencies: + '@babel/types': 7.27.1 + dev: true + + /@types/istanbul-lib-coverage@2.0.6: + resolution: {integrity: sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==} + dev: true + + /@types/istanbul-lib-report@3.0.3: + resolution: {integrity: sha512-NQn7AHQnk/RSLOxrBbGyJM/aVQ+pjj5HCgasFxc0K/KhoATfQ/47AyUl15I2yBUpihjmas+a+VJBOqecrFH+uA==} + dependencies: + '@types/istanbul-lib-coverage': 2.0.6 + dev: true + + /@types/istanbul-reports@3.0.4: + resolution: {integrity: sha512-pk2B1NWalF9toCRu6gjBzR69syFjP4Od8WRAX+0mmf9lAjCRicLOWc+ZrxZHx/0XRjotgkF9t6iaMJ+aXcOdZQ==} + dependencies: + '@types/istanbul-lib-report': 3.0.3 + dev: true + + /@types/node@24.7.0: + resolution: {integrity: sha512-IbKooQVqUBrlzWTi79E8Fw78l8k1RNtlDDNWsFZs7XonuQSJ8oNYfEeclhprUldXISRMLzBpILuKgPlIxm+/Yw==} + dependencies: + undici-types: 7.14.0 + dev: true + + /@types/stack-utils@2.0.3: + resolution: {integrity: sha512-9aEbYZ3TbYMznPdcdr3SmIrLXwC/AKZXQeCf9Pgao5CKb8CyHuEX5jzWPTkvregvhRJHcpRO6BFoGW9ycaOkYw==} + dev: true + + /@types/webidl-conversions@7.0.3: + resolution: {integrity: sha512-CiJJvcRtIgzadHCYXw7dqEnMNRjhGZlYK05Mj9OyktqV8uVT8fD2BFOB7S1uwBE3Kj2Z+4UyPmFw/Ixgw/LAlA==} + dev: false + + /@types/whatwg-url@11.0.5: + resolution: {integrity: sha512-coYR071JRaHa+xoEvvYqvnIHaVqaYrLPbsufM9BF63HkwI5Lgmy2QR8Q5K/lYDYo5AK82wOvSOS0UsLTpTG7uQ==} + dependencies: + '@types/webidl-conversions': 7.0.3 + dev: false + + /@types/yargs-parser@21.0.3: + resolution: {integrity: sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==} + dev: true + + /@types/yargs@17.0.33: + resolution: {integrity: sha512-WpxBCKWPLr4xSsHgz511rFJAM+wS28w2zEO1QDNY5zM/S8ok70NNfztH0xwhqKyaK0OHCbN98LDAZuy1ctxDkA==} + dependencies: + '@types/yargs-parser': 21.0.3 + dev: true + + /@ungap/structured-clone@1.3.0: + resolution: {integrity: sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==} + dev: true + + /@unrs/resolver-binding-android-arm-eabi@1.11.1: + resolution: {integrity: sha512-ppLRUgHVaGRWUx0R0Ut06Mjo9gBaBkg3v/8AxusGLhsIotbBLuRk51rAzqLC8gq6NyyAojEXglNjzf6R948DNw==} + cpu: [arm] + os: [android] + requiresBuild: true + dev: true + optional: true + + /@unrs/resolver-binding-android-arm64@1.11.1: + resolution: {integrity: sha512-lCxkVtb4wp1v+EoN+HjIG9cIIzPkX5OtM03pQYkG+U5O/wL53LC4QbIeazgiKqluGeVEeBlZahHalCaBvU1a2g==} + cpu: [arm64] + os: [android] + requiresBuild: true + dev: true + optional: true + + /@unrs/resolver-binding-darwin-arm64@1.11.1: + resolution: {integrity: sha512-gPVA1UjRu1Y/IsB/dQEsp2V1pm44Of6+LWvbLc9SDk1c2KhhDRDBUkQCYVWe6f26uJb3fOK8saWMgtX8IrMk3g==} + cpu: [arm64] + os: [darwin] + requiresBuild: true + dev: true + optional: true + + /@unrs/resolver-binding-darwin-x64@1.11.1: + resolution: {integrity: sha512-cFzP7rWKd3lZaCsDze07QX1SC24lO8mPty9vdP+YVa3MGdVgPmFc59317b2ioXtgCMKGiCLxJ4HQs62oz6GfRQ==} + cpu: [x64] + os: [darwin] + requiresBuild: true + dev: true + optional: true + + /@unrs/resolver-binding-freebsd-x64@1.11.1: + resolution: {integrity: sha512-fqtGgak3zX4DCB6PFpsH5+Kmt/8CIi4Bry4rb1ho6Av2QHTREM+47y282Uqiu3ZRF5IQioJQ5qWRV6jduA+iGw==} + cpu: [x64] + os: [freebsd] + requiresBuild: true + dev: true + optional: true + + /@unrs/resolver-binding-linux-arm-gnueabihf@1.11.1: + resolution: {integrity: sha512-u92mvlcYtp9MRKmP+ZvMmtPN34+/3lMHlyMj7wXJDeXxuM0Vgzz0+PPJNsro1m3IZPYChIkn944wW8TYgGKFHw==} + cpu: [arm] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /@unrs/resolver-binding-linux-arm-musleabihf@1.11.1: + resolution: {integrity: sha512-cINaoY2z7LVCrfHkIcmvj7osTOtm6VVT16b5oQdS4beibX2SYBwgYLmqhBjA1t51CarSaBuX5YNsWLjsqfW5Cw==} + cpu: [arm] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /@unrs/resolver-binding-linux-arm64-gnu@1.11.1: + resolution: {integrity: sha512-34gw7PjDGB9JgePJEmhEqBhWvCiiWCuXsL9hYphDF7crW7UgI05gyBAi6MF58uGcMOiOqSJ2ybEeCvHcq0BCmQ==} + cpu: [arm64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /@unrs/resolver-binding-linux-arm64-musl@1.11.1: + resolution: {integrity: sha512-RyMIx6Uf53hhOtJDIamSbTskA99sPHS96wxVE/bJtePJJtpdKGXO1wY90oRdXuYOGOTuqjT8ACccMc4K6QmT3w==} + cpu: [arm64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /@unrs/resolver-binding-linux-ppc64-gnu@1.11.1: + resolution: {integrity: sha512-D8Vae74A4/a+mZH0FbOkFJL9DSK2R6TFPC9M+jCWYia/q2einCubX10pecpDiTmkJVUH+y8K3BZClycD8nCShA==} + cpu: [ppc64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /@unrs/resolver-binding-linux-riscv64-gnu@1.11.1: + resolution: {integrity: sha512-frxL4OrzOWVVsOc96+V3aqTIQl1O2TjgExV4EKgRY09AJ9leZpEg8Ak9phadbuX0BA4k8U5qtvMSQQGGmaJqcQ==} + cpu: [riscv64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /@unrs/resolver-binding-linux-riscv64-musl@1.11.1: + resolution: {integrity: sha512-mJ5vuDaIZ+l/acv01sHoXfpnyrNKOk/3aDoEdLO/Xtn9HuZlDD6jKxHlkN8ZhWyLJsRBxfv9GYM2utQ1SChKew==} + cpu: [riscv64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /@unrs/resolver-binding-linux-s390x-gnu@1.11.1: + resolution: {integrity: sha512-kELo8ebBVtb9sA7rMe1Cph4QHreByhaZ2QEADd9NzIQsYNQpt9UkM9iqr2lhGr5afh885d/cB5QeTXSbZHTYPg==} + cpu: [s390x] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /@unrs/resolver-binding-linux-x64-gnu@1.11.1: + resolution: {integrity: sha512-C3ZAHugKgovV5YvAMsxhq0gtXuwESUKc5MhEtjBpLoHPLYM+iuwSj3lflFwK3DPm68660rZ7G8BMcwSro7hD5w==} + cpu: [x64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /@unrs/resolver-binding-linux-x64-musl@1.11.1: + resolution: {integrity: sha512-rV0YSoyhK2nZ4vEswT/QwqzqQXw5I6CjoaYMOX0TqBlWhojUf8P94mvI7nuJTeaCkkds3QE4+zS8Ko+GdXuZtA==} + cpu: [x64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /@unrs/resolver-binding-wasm32-wasi@1.11.1: + resolution: {integrity: sha512-5u4RkfxJm+Ng7IWgkzi3qrFOvLvQYnPBmjmZQ8+szTK/b31fQCnleNl1GgEt7nIsZRIf5PLhPwT0WM+q45x/UQ==} + engines: {node: '>=14.0.0'} + cpu: [wasm32] + requiresBuild: true + dependencies: + '@napi-rs/wasm-runtime': 0.2.12 + dev: true + optional: true + + /@unrs/resolver-binding-win32-arm64-msvc@1.11.1: + resolution: {integrity: sha512-nRcz5Il4ln0kMhfL8S3hLkxI85BXs3o8EYoattsJNdsX4YUU89iOkVn7g0VHSRxFuVMdM4Q1jEpIId1Ihim/Uw==} + cpu: [arm64] + os: [win32] + requiresBuild: true + dev: true + optional: true + + /@unrs/resolver-binding-win32-ia32-msvc@1.11.1: + resolution: {integrity: sha512-DCEI6t5i1NmAZp6pFonpD5m7i6aFrpofcp4LA2i8IIq60Jyo28hamKBxNrZcyOwVOZkgsRp9O2sXWBWP8MnvIQ==} + cpu: [ia32] + os: [win32] + requiresBuild: true + dev: true + optional: true + + /@unrs/resolver-binding-win32-x64-msvc@1.11.1: + resolution: {integrity: sha512-lrW200hZdbfRtztbygyaq/6jP6AKE8qQN2KvPcJ+x7wiD038YtnYtZ82IMNJ69GJibV7bwL3y9FgK+5w/pYt6g==} + cpu: [x64] + os: [win32] + requiresBuild: true + dev: true + optional: true + + /abbrev@1.1.1: + resolution: {integrity: sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==} + requiresBuild: true + dev: false + optional: true + + /accepts@2.0.0: + resolution: {integrity: sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==} + engines: {node: '>= 0.6'} + dependencies: + mime-types: 3.0.2 + negotiator: 1.0.0 + dev: false + + /acorn-walk@8.3.4: + resolution: {integrity: sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==} + engines: {node: '>=0.4.0'} + dependencies: + acorn: 8.15.0 + dev: true + + /acorn@8.15.0: + resolution: {integrity: sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==} + engines: {node: '>=0.4.0'} + hasBin: true + dev: true + + /agent-base@6.0.2: + resolution: {integrity: sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==} + engines: {node: '>= 6.0.0'} + requiresBuild: true + dependencies: + debug: 4.4.0 + transitivePeerDependencies: + - supports-color + dev: false + optional: true + + /agentkeepalive@4.6.0: + resolution: {integrity: sha512-kja8j7PjmncONqaTsB8fQ+wE2mSU2DJ9D4XKoJ5PFWIdRMa6SLSN1ff4mOr4jCbfRSsxR4keIiySJU0N9T5hIQ==} + engines: {node: '>= 8.0.0'} + requiresBuild: true + dependencies: + humanize-ms: 1.2.1 + dev: false + optional: true + + /aggregate-error@3.1.0: + resolution: {integrity: sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==} + engines: {node: '>=8'} + requiresBuild: true + dependencies: + clean-stack: 2.2.0 + indent-string: 4.0.0 + dev: false + optional: true + + /ansi-escapes@4.3.2: + resolution: {integrity: sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==} + engines: {node: '>=8'} + dependencies: + type-fest: 0.21.3 + dev: true + + /ansi-regex@5.0.1: + resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} + engines: {node: '>=8'} + + /ansi-regex@6.2.2: + resolution: {integrity: sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==} + engines: {node: '>=12'} + dev: true + + /ansi-styles@4.3.0: + resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} + engines: {node: '>=8'} + dependencies: + color-convert: 2.0.1 + dev: true + + /ansi-styles@5.2.0: + resolution: {integrity: sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==} + engines: {node: '>=10'} + dev: true + + /ansi-styles@6.2.3: + resolution: {integrity: sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==} + engines: {node: '>=12'} + dev: true + + /anymatch@3.1.3: + resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==} + engines: {node: '>= 8'} + dependencies: + normalize-path: 3.0.0 + picomatch: 2.3.1 + dev: true + + /aproba@2.1.0: + resolution: {integrity: sha512-tLIEcj5GuR2RSTnxNKdkK0dJ/GrC7P38sUkiDmDuHfsHmbagTFAxDVIBltoklXEVIQ/f14IL8IMJ5pn9Hez1Ew==} + requiresBuild: true + dev: false + optional: true + + /are-we-there-yet@3.0.1: + resolution: {integrity: sha512-QZW4EDmGwlYur0Yyf/b2uGucHQMa8aFUP7eu9ddR73vvhFyt4V0Vl3QHPcTNJ8l6qYOBdxgXdnBXQrHilfRQBg==} + engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} + deprecated: This package is no longer supported. + requiresBuild: true + dependencies: + delegates: 1.0.0 + readable-stream: 3.6.2 + dev: false + optional: true + + /arg@4.1.3: + resolution: {integrity: sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==} + dev: true + + /argparse@1.0.10: + resolution: {integrity: sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==} + dependencies: + sprintf-js: 1.0.3 + dev: true + + /babel-jest@30.2.0(@babel/core@7.28.4): + resolution: {integrity: sha512-0YiBEOxWqKkSQWL9nNGGEgndoeL0ZpWrbLMNL5u/Kaxrli3Eaxlt3ZtIDktEvXt4L/R9r3ODr2zKwGM/2BjxVw==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + peerDependencies: + '@babel/core': ^7.11.0 || ^8.0.0-0 + dependencies: + '@babel/core': 7.28.4 + '@jest/transform': 30.2.0 + '@types/babel__core': 7.20.5 + babel-plugin-istanbul: 7.0.1 + babel-preset-jest: 30.2.0(@babel/core@7.28.4) + chalk: 4.1.2 + graceful-fs: 4.2.11 + slash: 3.0.0 + transitivePeerDependencies: + - supports-color + dev: true + + /babel-plugin-istanbul@7.0.1: + resolution: {integrity: sha512-D8Z6Qm8jCvVXtIRkBnqNHX0zJ37rQcFJ9u8WOS6tkYOsRdHBzypCstaxWiu5ZIlqQtviRYbgnRLSoCEvjqcqbA==} + engines: {node: '>=12'} + dependencies: + '@babel/helper-plugin-utils': 7.27.1 + '@istanbuljs/load-nyc-config': 1.1.0 + '@istanbuljs/schema': 0.1.3 + istanbul-lib-instrument: 6.0.3 + test-exclude: 6.0.0 + transitivePeerDependencies: + - supports-color + dev: true + + /babel-plugin-jest-hoist@30.2.0: + resolution: {integrity: sha512-ftzhzSGMUnOzcCXd6WHdBGMyuwy15Wnn0iyyWGKgBDLxf9/s5ABuraCSpBX2uG0jUg4rqJnxsLc5+oYBqoxVaA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dependencies: + '@types/babel__core': 7.20.5 + dev: true + + /babel-preset-current-node-syntax@1.2.0(@babel/core@7.28.4): + resolution: {integrity: sha512-E/VlAEzRrsLEb2+dv8yp3bo4scof3l9nR4lrld+Iy5NyVqgVYUJnDAmunkhPMisRI32Qc4iRiz425d8vM++2fg==} + peerDependencies: + '@babel/core': ^7.0.0 || ^8.0.0-0 + dependencies: + '@babel/core': 7.28.4 + '@babel/plugin-syntax-async-generators': 7.8.4(@babel/core@7.28.4) + '@babel/plugin-syntax-bigint': 7.8.3(@babel/core@7.28.4) + '@babel/plugin-syntax-class-properties': 7.12.13(@babel/core@7.28.4) + '@babel/plugin-syntax-class-static-block': 7.14.5(@babel/core@7.28.4) + '@babel/plugin-syntax-import-attributes': 7.27.1(@babel/core@7.28.4) + '@babel/plugin-syntax-import-meta': 7.10.4(@babel/core@7.28.4) + '@babel/plugin-syntax-json-strings': 7.8.3(@babel/core@7.28.4) + '@babel/plugin-syntax-logical-assignment-operators': 7.10.4(@babel/core@7.28.4) + '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3(@babel/core@7.28.4) + '@babel/plugin-syntax-numeric-separator': 7.10.4(@babel/core@7.28.4) + '@babel/plugin-syntax-object-rest-spread': 7.8.3(@babel/core@7.28.4) + '@babel/plugin-syntax-optional-catch-binding': 7.8.3(@babel/core@7.28.4) + '@babel/plugin-syntax-optional-chaining': 7.8.3(@babel/core@7.28.4) + '@babel/plugin-syntax-private-property-in-object': 7.14.5(@babel/core@7.28.4) + '@babel/plugin-syntax-top-level-await': 7.14.5(@babel/core@7.28.4) + dev: true + + /babel-preset-jest@30.2.0(@babel/core@7.28.4): + resolution: {integrity: sha512-US4Z3NOieAQumwFnYdUWKvUKh8+YSnS/gB3t6YBiz0bskpu7Pine8pPCheNxlPEW4wnUkma2a94YuW2q3guvCQ==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + peerDependencies: + '@babel/core': ^7.11.0 || ^8.0.0-beta.1 + dependencies: + '@babel/core': 7.28.4 + babel-plugin-jest-hoist: 30.2.0 + babel-preset-current-node-syntax: 1.2.0(@babel/core@7.28.4) + dev: true + + /balanced-match@1.0.2: + resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} + + /base64-js@1.5.1: + resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==} + dev: false + + /base64url@3.0.1: + resolution: {integrity: sha512-ir1UPr3dkwexU7FdV8qBBbNDRUhMmIekYMFZfi+C/sLNnRESKPl23nB9b2pltqfOQNnGzsDdId90AEtG5tCx4A==} + engines: {node: '>=6.0.0'} + dev: false + + /basic-auth@2.0.1: + resolution: {integrity: sha512-NF+epuEdnUYVlGuhaxbbq+dvJttwLnGY+YixlXlME5KpQ5W3CnXA5cVTneY3SPbPDRkcjMbifrwmFYcClgOZeg==} + engines: {node: '>= 0.8'} + dependencies: + safe-buffer: 5.1.2 + dev: false + + /bcrypt@6.0.0: + resolution: {integrity: sha512-cU8v/EGSrnH+HnxV2z0J7/blxH8gq7Xh2JFT6Aroax7UohdmiJJlxApMxtKfuI7z68NvvVcmR78k2LbT6efhRg==} + engines: {node: '>= 18'} + requiresBuild: true + dependencies: + node-addon-api: 8.5.0 + node-gyp-build: 4.8.4 + dev: false + + /bindings@1.5.0: + resolution: {integrity: sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==} + dependencies: + file-uri-to-path: 1.0.0 + dev: false + + /bl@4.1.0: + resolution: {integrity: sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==} + dependencies: + buffer: 5.7.1 + inherits: 2.0.4 + readable-stream: 3.6.2 + dev: false + + /body-parser@2.2.2: + resolution: {integrity: sha512-oP5VkATKlNwcgvxi0vM0p/D3n2C3EReYVX+DNYs5TjZFn/oQt2j+4sVJtSMr18pdRr8wjTcBl6LoV+FUwzPmNA==} + engines: {node: '>=18'} + dependencies: + bytes: 3.1.2 + content-type: 1.0.5 + debug: 4.4.3 + http-errors: 2.0.1 + iconv-lite: 0.7.2 + on-finished: 2.4.1 + qs: 6.14.1 + raw-body: 3.0.2 + type-is: 2.0.1 + transitivePeerDependencies: + - supports-color + dev: false + + /brace-expansion@1.1.11: + resolution: {integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==} + requiresBuild: true + dependencies: + balanced-match: 1.0.2 + concat-map: 0.0.1 + + /brace-expansion@2.0.2: + resolution: {integrity: sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==} + dependencies: + balanced-match: 1.0.2 + dev: true + + /braces@3.0.3: + resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==} + engines: {node: '>=8'} + dependencies: + fill-range: 7.1.1 + dev: true + + /browserslist@4.24.5: + resolution: {integrity: sha512-FDToo4Wo82hIdgc1CQ+NQD0hEhmpPjrZ3hiUgwgOG6IuTdlpr8jdjyG24P6cNP1yJpTLzS5OcGgSw0xmDU1/Tw==} + engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} + hasBin: true + dependencies: + caniuse-lite: 1.0.30001717 + electron-to-chromium: 1.5.151 + node-releases: 2.0.19 + update-browserslist-db: 1.1.3(browserslist@4.24.5) + dev: true + + /bser@2.1.1: + resolution: {integrity: sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==} + dependencies: + node-int64: 0.4.0 + dev: true + + /bson@6.10.4: + resolution: {integrity: sha512-WIsKqkSC0ABoBJuT1LEX+2HEvNmNKKgnTAyd0fL8qzK4SH2i9NXg+t08YtdZp/V9IZ33cxe3iV4yM0qg8lMQng==} + engines: {node: '>=16.20.1'} + dev: false + + /buffer-equal-constant-time@1.0.1: + resolution: {integrity: sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==} + dev: false + + /buffer-from@1.1.2: + resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==} + dev: true + + /buffer@5.7.1: + resolution: {integrity: sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==} + dependencies: + base64-js: 1.5.1 + ieee754: 1.2.1 + dev: false + + /bytes@3.1.2: + resolution: {integrity: sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==} + engines: {node: '>= 0.8'} + dev: false + + /cacache@15.3.0: + resolution: {integrity: sha512-VVdYzXEn+cnbXpFgWs5hTT7OScegHVmLhJIR8Ufqk3iFD6A6j5iSX1KuBTfNEv4tdJWE2PzA6IVFtcLC7fN9wQ==} + engines: {node: '>= 10'} + requiresBuild: true + dependencies: + '@npmcli/fs': 1.1.1 + '@npmcli/move-file': 1.1.2 + chownr: 2.0.0 + fs-minipass: 2.1.0 + glob: 7.2.3 + infer-owner: 1.0.4 + lru-cache: 6.0.0 + minipass: 3.3.6 + minipass-collect: 1.0.2 + minipass-flush: 1.0.5 + minipass-pipeline: 1.2.4 + mkdirp: 1.0.4 + p-map: 4.0.0 + promise-inflight: 1.0.1 + rimraf: 3.0.2 + ssri: 8.0.1 + tar: 6.2.1 + unique-filename: 1.1.1 + transitivePeerDependencies: + - bluebird + dev: false + optional: true + + /call-bind-apply-helpers@1.0.2: + resolution: {integrity: sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==} + engines: {node: '>= 0.4'} + dependencies: + es-errors: 1.3.0 + function-bind: 1.1.2 + dev: false + + /call-bound@1.0.4: + resolution: {integrity: sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==} + engines: {node: '>= 0.4'} + dependencies: + call-bind-apply-helpers: 1.0.2 + get-intrinsic: 1.3.0 + dev: false + + /callsites@3.1.0: + resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} + engines: {node: '>=6'} + dev: true + + /camelcase@5.3.1: + resolution: {integrity: sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==} + engines: {node: '>=6'} + dev: true + + /camelcase@6.3.0: + resolution: {integrity: sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==} + engines: {node: '>=10'} + dev: true + + /caniuse-lite@1.0.30001717: + resolution: {integrity: sha512-auPpttCq6BDEG8ZAuHJIplGw6GODhjw+/11e7IjpnYCxZcW/ONgPs0KVBJ0d1bY3e2+7PRe5RCLyP+PfwVgkYw==} + dev: true + + /chalk@4.1.2: + resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} + engines: {node: '>=10'} + dependencies: + ansi-styles: 4.3.0 + supports-color: 7.2.0 + dev: true + + /char-regex@1.0.2: + resolution: {integrity: sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==} + engines: {node: '>=10'} + dev: true + + /chownr@1.1.4: + resolution: {integrity: sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==} + dev: false + + /chownr@2.0.0: + resolution: {integrity: sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==} + engines: {node: '>=10'} + dev: false + + /ci-info@4.3.1: + resolution: {integrity: sha512-Wdy2Igu8OcBpI2pZePZ5oWjPC38tmDVx5WKUXKwlLYkA0ozo85sLsLvkBbBn/sZaSCMFOGZJ14fvW9t5/d7kdA==} + engines: {node: '>=8'} + dev: true + + /cjs-module-lexer@2.1.0: + resolution: {integrity: sha512-UX0OwmYRYQQetfrLEZeewIFFI+wSTofC+pMBLNuH3RUuu/xzG1oz84UCEDOSoQlN3fZ4+AzmV50ZYvGqkMh9yA==} + dev: true + + /clean-stack@2.2.0: + resolution: {integrity: sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==} + engines: {node: '>=6'} + requiresBuild: true + dev: false + optional: true + + /cliui@8.0.1: + resolution: {integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==} + engines: {node: '>=12'} + dependencies: + string-width: 4.2.3 + strip-ansi: 6.0.1 + wrap-ansi: 7.0.0 + dev: true + + /co@4.6.0: + resolution: {integrity: sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==} + engines: {iojs: '>= 1.0.0', node: '>= 0.12.0'} + dev: true + + /collect-v8-coverage@1.0.2: + resolution: {integrity: sha512-lHl4d5/ONEbLlJvaJNtsF/Lz+WvB07u2ycqTYbdrq7UypDXailES4valYb2eWiJFxZlVmpGekfqoxQhzyFdT4Q==} + dev: true + + /color-convert@2.0.1: + resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} + engines: {node: '>=7.0.0'} + dependencies: + color-name: 1.1.4 + dev: true + + /color-name@1.1.4: + resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} + dev: true + + /color-support@1.1.3: + resolution: {integrity: sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==} + hasBin: true + requiresBuild: true + dev: false + optional: true + + /concat-map@0.0.1: + resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} + requiresBuild: true + + /connect-sqlite3@0.9.16: + resolution: {integrity: sha512-2gqo0QmcBBL8p8+eqpBETn7RgM/PaoKvpQGl8PfjEgwlr0VuMYNMxRJRrRCo3KR3fxMYeSsCw2tGNG0JKN9Nvg==} + engines: {node: '>=0.4.x'} + dependencies: + sqlite3: 5.1.7 + transitivePeerDependencies: + - bluebird + - supports-color + dev: false + + /console-control-strings@1.1.0: + resolution: {integrity: sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==} + requiresBuild: true + dev: false + optional: true + + /content-disposition@1.0.1: + resolution: {integrity: sha512-oIXISMynqSqm241k6kcQ5UwttDILMK4BiurCfGEREw6+X9jkkpEe5T9FZaApyLGGOnFuyMWZpdolTXMtvEJ08Q==} + engines: {node: '>=18'} + dev: false + + /content-type@1.0.5: + resolution: {integrity: sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==} + engines: {node: '>= 0.6'} + dev: false + + /convert-source-map@2.0.0: + resolution: {integrity: sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==} + dev: true + + /cookie-parser@1.4.7: + resolution: {integrity: sha512-nGUvgXnotP3BsjiLX2ypbQnWoGUPIIfHQNZkkC668ntrzGWEZVW70HDEB1qnNGMicPje6EttlIgzo51YSwNQGw==} + engines: {node: '>= 0.8.0'} + dependencies: + cookie: 0.7.2 + cookie-signature: 1.0.6 + dev: false + + /cookie-signature@1.0.6: + resolution: {integrity: sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==} + dev: false + + /cookie-signature@1.0.7: + resolution: {integrity: sha512-NXdYc3dLr47pBkpUCHtKSwIOQXLVn8dZEuywboCOJY/osA0wFSLlSawr3KN8qXJEyX66FcONTH8EIlVuK0yyFA==} + dev: false + + /cookie-signature@1.2.2: + resolution: {integrity: sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg==} + engines: {node: '>=6.6.0'} + dev: false + + /cookie@0.7.2: + resolution: {integrity: sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==} + engines: {node: '>= 0.6'} + dev: false + + /cookie@1.0.2: + resolution: {integrity: sha512-9Kr/j4O16ISv8zBBhJoi4bXOYNTkFLOqSL3UDB0njXxCXNezjeyVrJyGOWtgfs/q2km1gwBcfH8q1yEGoMYunA==} + engines: {node: '>=18'} + dev: false + + /create-require@1.1.1: + resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==} + dev: true + + /cross-spawn@7.0.6: + resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==} + engines: {node: '>= 8'} + dependencies: + path-key: 3.1.1 + shebang-command: 2.0.0 + which: 2.0.2 + dev: true + + /debug@2.6.9: + resolution: {integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + dependencies: + ms: 2.0.0 + dev: false + + /debug@4.4.0: + resolution: {integrity: sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + dependencies: + ms: 2.1.3 + + /debug@4.4.3: + resolution: {integrity: sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + dependencies: + ms: 2.1.3 + dev: false + + /decompress-response@6.0.0: + resolution: {integrity: sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==} + engines: {node: '>=10'} + dependencies: + mimic-response: 3.1.0 + dev: false + + /dedent@1.7.0: + resolution: {integrity: sha512-HGFtf8yhuhGhqO07SV79tRp+br4MnbdjeVxotpn1QBl30pcLLCQjX5b2295ll0fv8RKDKsmWYrl05usHM9CewQ==} + peerDependencies: + babel-plugin-macros: ^3.1.0 + peerDependenciesMeta: + babel-plugin-macros: + optional: true + dev: true + + /deep-equal@0.0.0: + resolution: {integrity: sha512-p1bI/kkDPT6auUI0U+WLuIIrzmDIDo80I406J8tT4y6I4ZGtBuMeTudrKDtBdMJFAcxqrQdx27gosqPVyY3IvQ==} + dev: false + + /deep-extend@0.6.0: + resolution: {integrity: sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==} + engines: {node: '>=4.0.0'} + dev: false + + /deepmerge@4.3.1: + resolution: {integrity: sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==} + engines: {node: '>=0.10.0'} + dev: true + + /defined@0.0.0: + resolution: {integrity: sha512-zpqiCT8bODLu3QSmLLic8xJnYWBFjOSu/fBCm189oAiTtPq/PSanNACKZDS7kgSyCJY7P+IcODzlIogBK/9RBg==} + dev: false + + /delegates@1.0.0: + resolution: {integrity: sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==} + requiresBuild: true + dev: false + optional: true + + /depd@2.0.0: + resolution: {integrity: sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==} + engines: {node: '>= 0.8'} + dev: false + + /detect-libc@2.1.2: + resolution: {integrity: sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==} + engines: {node: '>=8'} + dev: false + + /detect-newline@3.1.0: + resolution: {integrity: sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==} + engines: {node: '>=8'} + dev: true + + /diff@4.0.2: + resolution: {integrity: sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==} + engines: {node: '>=0.3.1'} + dev: true + + /dotenv@17.2.3: + resolution: {integrity: sha512-JVUnt+DUIzu87TABbhPmNfVdBDt18BLOWjMUFJMSi/Qqg7NTYtabbvSNJGOJ7afbRuv9D/lngizHtP7QyLQ+9w==} + engines: {node: '>=12'} + dev: false + + /dunder-proto@1.0.1: + resolution: {integrity: sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==} + engines: {node: '>= 0.4'} + dependencies: + call-bind-apply-helpers: 1.0.2 + es-errors: 1.3.0 + gopd: 1.2.0 + dev: false + + /eastasianwidth@0.2.0: + resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} + dev: true + + /ecdsa-sig-formatter@1.0.11: + resolution: {integrity: sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==} + dependencies: + safe-buffer: 5.2.1 + dev: false + + /ee-first@1.1.1: + resolution: {integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==} + dev: false + + /electron-to-chromium@1.5.151: + resolution: {integrity: sha512-Rl6uugut2l9sLojjS4H4SAr3A4IgACMLgpuEMPYCVcKydzfyPrn5absNRju38IhQOf/NwjJY8OGWjlteqYeBCA==} + dev: true + + /emittery@0.13.1: + resolution: {integrity: sha512-DeWwawk6r5yR9jFgnDKYt4sLS0LmHJJi3ZOnb5/JdbYwj3nW+FxQnHIjhBKz8YLC7oRNPVM9NQ47I3CVx34eqQ==} + engines: {node: '>=12'} + dev: true + + /emoji-regex@8.0.0: + resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} + + /emoji-regex@9.2.2: + resolution: {integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==} + dev: true + + /encodeurl@2.0.0: + resolution: {integrity: sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==} + engines: {node: '>= 0.8'} + dev: false + + /encoding@0.1.13: + resolution: {integrity: sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==} + requiresBuild: true + dependencies: + iconv-lite: 0.6.3 + dev: false + optional: true + + /end-of-stream@1.4.5: + resolution: {integrity: sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==} + dependencies: + once: 1.4.0 + dev: false + + /env-paths@2.2.1: + resolution: {integrity: sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==} + engines: {node: '>=6'} + requiresBuild: true + dev: false + optional: true + + /err-code@2.0.3: + resolution: {integrity: sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==} + requiresBuild: true + dev: false + optional: true + + /error-ex@1.3.4: + resolution: {integrity: sha512-sqQamAnR14VgCr1A618A3sGrygcpK+HEbenA/HiEAkkUwcZIIB/tgWqHFxWgOyDh4nB4JCRimh79dR5Ywc9MDQ==} + dependencies: + is-arrayish: 0.2.1 + dev: true + + /es-define-property@1.0.1: + resolution: {integrity: sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==} + engines: {node: '>= 0.4'} + dev: false + + /es-errors@1.3.0: + resolution: {integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==} + engines: {node: '>= 0.4'} + dev: false + + /es-object-atoms@1.1.1: + resolution: {integrity: sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==} + engines: {node: '>= 0.4'} + dependencies: + es-errors: 1.3.0 + dev: false + + /escalade@3.2.0: + resolution: {integrity: sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==} + engines: {node: '>=6'} + dev: true + + /escape-html@1.0.3: + resolution: {integrity: sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==} + dev: false + + /escape-string-regexp@2.0.0: + resolution: {integrity: sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==} + engines: {node: '>=8'} + dev: true + + /esprima@4.0.1: + resolution: {integrity: sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==} + engines: {node: '>=4'} + hasBin: true + dev: true + + /etag@1.8.1: + resolution: {integrity: sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==} + engines: {node: '>= 0.6'} + dev: false + + /execa@5.1.1: + resolution: {integrity: sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==} + engines: {node: '>=10'} + dependencies: + cross-spawn: 7.0.6 + get-stream: 6.0.1 + human-signals: 2.1.0 + is-stream: 2.0.1 + merge-stream: 2.0.0 + npm-run-path: 4.0.1 + onetime: 5.1.2 + signal-exit: 3.0.7 + strip-final-newline: 2.0.0 + dev: true + + /exit-x@0.2.2: + resolution: {integrity: sha512-+I6B/IkJc1o/2tiURyz/ivu/O0nKNEArIUB5O7zBrlDVJr22SCLH3xTeEry428LvFhRzIA1g8izguxJ/gbNcVQ==} + engines: {node: '>= 0.8.0'} + dev: true + + /expand-template@2.0.3: + resolution: {integrity: sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==} + engines: {node: '>=6'} + dev: false + + /expect@30.2.0: + resolution: {integrity: sha512-u/feCi0GPsI+988gU2FLcsHyAHTU0MX1Wg68NhAnN7z/+C5wqG+CY8J53N9ioe8RXgaoz0nBR/TYMf3AycUuPw==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dependencies: + '@jest/expect-utils': 30.2.0 + '@jest/get-type': 30.1.0 + jest-matcher-utils: 30.2.0 + jest-message-util: 30.2.0 + jest-mock: 30.2.0 + jest-util: 30.2.0 + dev: true + + /express-session@1.18.2: + resolution: {integrity: sha512-SZjssGQC7TzTs9rpPDuUrR23GNZ9+2+IkA/+IJWmvQilTr5OSliEHGF+D9scbIpdC6yGtTI0/VhaHoVes2AN/A==} + engines: {node: '>= 0.8.0'} + dependencies: + cookie: 0.7.2 + cookie-signature: 1.0.7 + debug: 2.6.9 + depd: 2.0.0 + on-headers: 1.1.0 + parseurl: 1.3.3 + safe-buffer: 5.2.1 + uid-safe: 2.1.5 + transitivePeerDependencies: + - supports-color + dev: false + + /express@5.2.1: + resolution: {integrity: sha512-hIS4idWWai69NezIdRt2xFVofaF4j+6INOpJlVOLDO8zXGpUVEVzIYk12UUi2JzjEzWL3IOAxcTubgz9Po0yXw==} + engines: {node: '>= 18'} + dependencies: + accepts: 2.0.0 + body-parser: 2.2.2 + content-disposition: 1.0.1 + content-type: 1.0.5 + cookie: 0.7.2 + cookie-signature: 1.2.2 + debug: 4.4.0 + depd: 2.0.0 + encodeurl: 2.0.0 + escape-html: 1.0.3 + etag: 1.8.1 + finalhandler: 2.1.1 + fresh: 2.0.0 + http-errors: 2.0.1 + merge-descriptors: 2.0.0 + mime-types: 3.0.2 + on-finished: 2.4.1 + once: 1.4.0 + parseurl: 1.3.3 + proxy-addr: 2.0.7 + qs: 6.14.1 + range-parser: 1.2.1 + router: 2.2.0 + send: 1.2.1 + serve-static: 2.2.1 + statuses: 2.0.2 + type-is: 2.0.1 + vary: 1.1.2 + transitivePeerDependencies: + - supports-color + dev: false + + /fast-json-stable-stringify@2.1.0: + resolution: {integrity: sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==} + dev: true + + /fb-watchman@2.0.2: + resolution: {integrity: sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA==} + dependencies: + bser: 2.1.1 + dev: true + + /file-uri-to-path@1.0.0: + resolution: {integrity: sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==} + dev: false + + /fill-range@7.1.1: + resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} + engines: {node: '>=8'} + dependencies: + to-regex-range: 5.0.1 + dev: true + + /finalhandler@2.1.1: + resolution: {integrity: sha512-S8KoZgRZN+a5rNwqTxlZZePjT/4cnm0ROV70LedRHZ0p8u9fRID0hJUZQpkKLzro8LfmC8sx23bY6tVNxv8pQA==} + engines: {node: '>= 18.0.0'} + dependencies: + debug: 4.4.0 + encodeurl: 2.0.0 + escape-html: 1.0.3 + on-finished: 2.4.1 + parseurl: 1.3.3 + statuses: 2.0.2 + transitivePeerDependencies: + - supports-color + dev: false + + /find-up@4.1.0: + resolution: {integrity: sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==} + engines: {node: '>=8'} + dependencies: + locate-path: 5.0.0 + path-exists: 4.0.0 + dev: true + + /foreground-child@3.3.1: + resolution: {integrity: sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==} + engines: {node: '>=14'} + dependencies: + cross-spawn: 7.0.6 + signal-exit: 4.1.0 + dev: true + + /forwarded@0.2.0: + resolution: {integrity: sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==} + engines: {node: '>= 0.6'} + dev: false + + /fresh@2.0.0: + resolution: {integrity: sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==} + engines: {node: '>= 0.8'} + dev: false + + /fs-constants@1.0.0: + resolution: {integrity: sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==} + dev: false + + /fs-minipass@2.1.0: + resolution: {integrity: sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==} + engines: {node: '>= 8'} + dependencies: + minipass: 3.3.6 + dev: false + + /fs.realpath@1.0.0: + resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==} + requiresBuild: true + + /fsevents@2.3.3: + resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==} + engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} + os: [darwin] + requiresBuild: true + dev: true + optional: true + + /function-bind@1.1.2: + resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==} + dev: false + + /gauge@4.0.4: + resolution: {integrity: sha512-f9m+BEN5jkg6a0fZjleidjN51VE1X+mPFQ2DJ0uv1V39oCLCbsGe6yjbBnp7eK7z/+GAon99a3nHuqbuuthyPg==} + engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} + deprecated: This package is no longer supported. + requiresBuild: true + dependencies: + aproba: 2.1.0 + color-support: 1.1.3 + console-control-strings: 1.1.0 + has-unicode: 2.0.1 + signal-exit: 3.0.7 + string-width: 4.2.3 + strip-ansi: 6.0.1 + wide-align: 1.1.5 + dev: false + optional: true + + /gensync@1.0.0-beta.2: + resolution: {integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==} + engines: {node: '>=6.9.0'} + dev: true + + /get-caller-file@2.0.5: + resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==} + engines: {node: 6.* || 8.* || >= 10.*} + dev: true + + /get-intrinsic@1.3.0: + resolution: {integrity: sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==} + engines: {node: '>= 0.4'} + dependencies: + call-bind-apply-helpers: 1.0.2 + es-define-property: 1.0.1 + es-errors: 1.3.0 + es-object-atoms: 1.1.1 + function-bind: 1.1.2 + get-proto: 1.0.1 + gopd: 1.2.0 + has-symbols: 1.1.0 + hasown: 2.0.2 + math-intrinsics: 1.1.0 + dev: false + + /get-package-type@0.1.0: + resolution: {integrity: sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==} + engines: {node: '>=8.0.0'} + dev: true + + /get-proto@1.0.1: + resolution: {integrity: sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==} + engines: {node: '>= 0.4'} + dependencies: + dunder-proto: 1.0.1 + es-object-atoms: 1.1.1 + dev: false + + /get-stream@6.0.1: + resolution: {integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==} + engines: {node: '>=10'} + dev: true + + /github-from-package@0.0.0: + resolution: {integrity: sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==} + dev: false + + /glob@10.4.5: + resolution: {integrity: sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==} + hasBin: true + dependencies: + foreground-child: 3.3.1 + jackspeak: 3.4.3 + minimatch: 9.0.5 + minipass: 7.1.2 + package-json-from-dist: 1.0.1 + path-scurry: 1.11.1 + dev: true + + /glob@7.2.3: + resolution: {integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==} + deprecated: Glob versions prior to v9 are no longer supported + dependencies: + fs.realpath: 1.0.0 + inflight: 1.0.6 + inherits: 2.0.4 + minimatch: 3.1.2 + once: 1.4.0 + path-is-absolute: 1.0.1 + + /global@2.0.7: + resolution: {integrity: sha512-uyLyozYI1KDVqDUaEPM/9qdQKtNRxmRXiZ6ENCCID6miO+TnXAVopshc0ym/KJTTK6lH3STKRdP3Vtmyl02n9w==} + dependencies: + min-document: 0.2.8 + process: 0.5.2 + dev: false + + /globals@11.12.0: + resolution: {integrity: sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==} + engines: {node: '>=4'} + dev: true + + /gopd@1.2.0: + resolution: {integrity: sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==} + engines: {node: '>= 0.4'} + dev: false + + /graceful-fs@4.2.11: + resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} + + /has-flag@4.0.0: + resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==} + engines: {node: '>=8'} + dev: true + + /has-symbols@1.1.0: + resolution: {integrity: sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==} + engines: {node: '>= 0.4'} + dev: false + + /has-unicode@2.0.1: + resolution: {integrity: sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==} + requiresBuild: true + dev: false + optional: true + + /hasown@2.0.2: + resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==} + engines: {node: '>= 0.4'} + dependencies: + function-bind: 1.1.2 + dev: false + + /html-escaper@2.0.2: + resolution: {integrity: sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==} + dev: true + + /http-cache-semantics@4.2.0: + resolution: {integrity: sha512-dTxcvPXqPvXBQpq5dUr6mEMJX4oIEFv6bwom3FDwKRDsuIjjJGANqhBuoAn9c1RQJIdAKav33ED65E2ys+87QQ==} + requiresBuild: true + dev: false + optional: true + + /http-errors@2.0.1: + resolution: {integrity: sha512-4FbRdAX+bSdmo4AUFuS0WNiPz8NgFt+r8ThgNWmlrjQjt1Q7ZR9+zTlce2859x4KSXrwIsaeTqDoKQmtP8pLmQ==} + engines: {node: '>= 0.8'} + dependencies: + depd: 2.0.0 + inherits: 2.0.4 + setprototypeof: 1.2.0 + statuses: 2.0.2 + toidentifier: 1.0.1 + dev: false + + /http-proxy-agent@4.0.1: + resolution: {integrity: sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==} + engines: {node: '>= 6'} + requiresBuild: true + dependencies: + '@tootallnate/once': 1.1.2 + agent-base: 6.0.2 + debug: 4.4.0 + transitivePeerDependencies: + - supports-color + dev: false + optional: true + + /https-proxy-agent@5.0.1: + resolution: {integrity: sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==} + engines: {node: '>= 6'} + requiresBuild: true + dependencies: + agent-base: 6.0.2 + debug: 4.4.0 + transitivePeerDependencies: + - supports-color + dev: false + optional: true + + /human-signals@2.1.0: + resolution: {integrity: sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==} + engines: {node: '>=10.17.0'} + dev: true + + /humanize-ms@1.2.1: + resolution: {integrity: sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==} + requiresBuild: true + dependencies: + ms: 2.1.3 + dev: false + optional: true + + /iconv-lite@0.6.3: + resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==} + engines: {node: '>=0.10.0'} + requiresBuild: true + dependencies: + safer-buffer: 2.1.2 + dev: false + optional: true + + /iconv-lite@0.7.2: + resolution: {integrity: sha512-im9DjEDQ55s9fL4EYzOAv0yMqmMBSZp6G0VvFyTMPKWxiSBHUj9NW/qqLmXUwXrrM7AvqSlTCfvqRb0cM8yYqw==} + engines: {node: '>=0.10.0'} + dependencies: + safer-buffer: 2.1.2 + dev: false + + /ieee754@1.2.1: + resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==} + dev: false + + /immediate@3.0.6: + resolution: {integrity: sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ==} + dev: false + + /import-local@3.2.0: + resolution: {integrity: sha512-2SPlun1JUPWoM6t3F0dw0FkCF/jWY8kttcY4f599GLTSjh2OCuuhdTkJQsEcZzBqbXZGKMK2OqW1oZsjtf/gQA==} + engines: {node: '>=8'} + hasBin: true + dependencies: + pkg-dir: 4.2.0 + resolve-cwd: 3.0.0 + dev: true + + /imurmurhash@0.1.4: + resolution: {integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==} + engines: {node: '>=0.8.19'} + + /indent-string@4.0.0: + resolution: {integrity: sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==} + engines: {node: '>=8'} + requiresBuild: true + dev: false + optional: true + + /infer-owner@1.0.4: + resolution: {integrity: sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A==} + requiresBuild: true + dev: false + optional: true + + /inflight@1.0.6: + resolution: {integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==} + deprecated: This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful. + requiresBuild: true + dependencies: + once: 1.4.0 + wrappy: 1.0.2 + + /inherits@2.0.4: + resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} + + /ini@1.3.8: + resolution: {integrity: sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==} + dev: false + + /ip-address@10.0.1: + resolution: {integrity: sha512-NWv9YLW4PoW2B7xtzaS3NCot75m6nK7Icdv0o3lfMceJVRfSoQwqD4wEH5rLwoKJwUiZ/rfpiVBhnaF0FK4HoA==} + engines: {node: '>= 12'} + requiresBuild: true + dev: false + optional: true + + /ipaddr.js@1.9.1: + resolution: {integrity: sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==} + engines: {node: '>= 0.10'} + dev: false + + /is-arrayish@0.2.1: + resolution: {integrity: sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==} + dev: true + + /is-fullwidth-code-point@3.0.0: + resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} + engines: {node: '>=8'} + + /is-generator-fn@2.1.0: + resolution: {integrity: sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==} + engines: {node: '>=6'} + dev: true + + /is-lambda@1.0.1: + resolution: {integrity: sha512-z7CMFGNrENq5iFB9Bqo64Xk6Y9sg+epq1myIcdHaGnbMTYOxvzsEtdYqQUylB7LxfkvgrrjP32T6Ywciio9UIQ==} + requiresBuild: true + dev: false + optional: true + + /is-number@7.0.0: + resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} + engines: {node: '>=0.12.0'} + dev: true + + /is-promise@4.0.0: + resolution: {integrity: sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==} + dev: false + + /is-stream@2.0.1: + resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==} + engines: {node: '>=8'} + dev: true + + /isexe@2.0.0: + resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} + requiresBuild: true + + /istanbul-lib-coverage@3.2.2: + resolution: {integrity: sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==} + engines: {node: '>=8'} + dev: true + + /istanbul-lib-instrument@6.0.3: + resolution: {integrity: sha512-Vtgk7L/R2JHyyGW07spoFlB8/lpjiOLTjMdms6AFMraYt3BaJauod/NGrfnVG/y4Ix1JEuMRPDPEj2ua+zz1/Q==} + engines: {node: '>=10'} + dependencies: + '@babel/core': 7.27.1 + '@babel/parser': 7.27.2 + '@istanbuljs/schema': 0.1.3 + istanbul-lib-coverage: 3.2.2 + semver: 7.7.2 + transitivePeerDependencies: + - supports-color + dev: true + + /istanbul-lib-report@3.0.1: + resolution: {integrity: sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==} + engines: {node: '>=10'} + dependencies: + istanbul-lib-coverage: 3.2.2 + make-dir: 4.0.0 + supports-color: 7.2.0 + dev: true + + /istanbul-lib-source-maps@5.0.6: + resolution: {integrity: sha512-yg2d+Em4KizZC5niWhQaIomgf5WlL4vOOjZ5xGCmF8SnPE/mDWWXgvRExdcpCgh9lLRRa1/fSYp2ymmbJ1pI+A==} + engines: {node: '>=10'} + dependencies: + '@jridgewell/trace-mapping': 0.3.31 + debug: 4.4.0 + istanbul-lib-coverage: 3.2.2 + transitivePeerDependencies: + - supports-color + dev: true + + /istanbul-reports@3.2.0: + resolution: {integrity: sha512-HGYWWS/ehqTV3xN10i23tkPkpH46MLCIMFNCaaKNavAXTF1RkqxawEPtnjnGZ6XKSInBKkiOA5BKS+aZiY3AvA==} + engines: {node: '>=8'} + dependencies: + html-escaper: 2.0.2 + istanbul-lib-report: 3.0.1 + dev: true + + /jackspeak@3.4.3: + resolution: {integrity: sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==} + dependencies: + '@isaacs/cliui': 8.0.2 + optionalDependencies: + '@pkgjs/parseargs': 0.11.0 + dev: true + + /jest-changed-files@30.2.0: + resolution: {integrity: sha512-L8lR1ChrRnSdfeOvTrwZMlnWV8G/LLjQ0nG9MBclwWZidA2N5FviRki0Bvh20WRMOX31/JYvzdqTJrk5oBdydQ==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dependencies: + execa: 5.1.1 + jest-util: 30.2.0 + p-limit: 3.1.0 + dev: true + + /jest-circus@30.2.0: + resolution: {integrity: sha512-Fh0096NC3ZkFx05EP2OXCxJAREVxj1BcW/i6EWqqymcgYKWjyyDpral3fMxVcHXg6oZM7iULer9wGRFvfpl+Tg==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dependencies: + '@jest/environment': 30.2.0 + '@jest/expect': 30.2.0 + '@jest/test-result': 30.2.0 + '@jest/types': 30.2.0 + '@types/node': 24.7.0 + chalk: 4.1.2 + co: 4.6.0 + dedent: 1.7.0 + is-generator-fn: 2.1.0 + jest-each: 30.2.0 + jest-matcher-utils: 30.2.0 + jest-message-util: 30.2.0 + jest-runtime: 30.2.0 + jest-snapshot: 30.2.0 + jest-util: 30.2.0 + p-limit: 3.1.0 + pretty-format: 30.2.0 + pure-rand: 7.0.1 + slash: 3.0.0 + stack-utils: 2.0.6 + transitivePeerDependencies: + - babel-plugin-macros + - supports-color + dev: true + + /jest-cli@30.2.0(@types/node@24.7.0)(ts-node@10.9.2): + resolution: {integrity: sha512-Os9ukIvADX/A9sLt6Zse3+nmHtHaE6hqOsjQtNiugFTbKRHYIYtZXNGNK9NChseXy7djFPjndX1tL0sCTlfpAA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + hasBin: true + peerDependencies: + node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 + peerDependenciesMeta: + node-notifier: + optional: true + dependencies: + '@jest/core': 30.2.0(ts-node@10.9.2) + '@jest/test-result': 30.2.0 + '@jest/types': 30.2.0 + chalk: 4.1.2 + exit-x: 0.2.2 + import-local: 3.2.0 + jest-config: 30.2.0(@types/node@24.7.0)(ts-node@10.9.2) + jest-util: 30.2.0 + jest-validate: 30.2.0 + yargs: 17.7.2 + transitivePeerDependencies: + - '@types/node' + - babel-plugin-macros + - esbuild-register + - supports-color + - ts-node + dev: true + + /jest-config@30.2.0(@types/node@24.7.0)(ts-node@10.9.2): + resolution: {integrity: sha512-g4WkyzFQVWHtu6uqGmQR4CQxz/CH3yDSlhzXMWzNjDx843gYjReZnMRanjRCq5XZFuQrGDxgUaiYWE8BRfVckA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + peerDependencies: + '@types/node': '*' + esbuild-register: '>=3.4.0' + ts-node: '>=9.0.0' + peerDependenciesMeta: + '@types/node': + optional: true + esbuild-register: + optional: true + ts-node: + optional: true + dependencies: + '@babel/core': 7.28.4 + '@jest/get-type': 30.1.0 + '@jest/pattern': 30.0.1 + '@jest/test-sequencer': 30.2.0 + '@jest/types': 30.2.0 + '@types/node': 24.7.0 + babel-jest: 30.2.0(@babel/core@7.28.4) + chalk: 4.1.2 + ci-info: 4.3.1 + deepmerge: 4.3.1 + glob: 10.4.5 + graceful-fs: 4.2.11 + jest-circus: 30.2.0 + jest-docblock: 30.2.0 + jest-environment-node: 30.2.0 + jest-regex-util: 30.0.1 + jest-resolve: 30.2.0 + jest-runner: 30.2.0 + jest-util: 30.2.0 + jest-validate: 30.2.0 + micromatch: 4.0.8 + parse-json: 5.2.0 + pretty-format: 30.2.0 + slash: 3.0.0 + strip-json-comments: 3.1.1 + ts-node: 10.9.2(@types/node@24.7.0)(typescript@5.9.3) + transitivePeerDependencies: + - babel-plugin-macros + - supports-color + dev: true + + /jest-diff@30.2.0: + resolution: {integrity: sha512-dQHFo3Pt4/NLlG5z4PxZ/3yZTZ1C7s9hveiOj+GCN+uT109NC2QgsoVZsVOAvbJ3RgKkvyLGXZV9+piDpWbm6A==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dependencies: + '@jest/diff-sequences': 30.0.1 + '@jest/get-type': 30.1.0 + chalk: 4.1.2 + pretty-format: 30.2.0 + dev: true + + /jest-docblock@30.2.0: + resolution: {integrity: sha512-tR/FFgZKS1CXluOQzZvNH3+0z9jXr3ldGSD8bhyuxvlVUwbeLOGynkunvlTMxchC5urrKndYiwCFC0DLVjpOCA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dependencies: + detect-newline: 3.1.0 + dev: true + + /jest-each@30.2.0: + resolution: {integrity: sha512-lpWlJlM7bCUf1mfmuqTA8+j2lNURW9eNafOy99knBM01i5CQeY5UH1vZjgT9071nDJac1M4XsbyI44oNOdhlDQ==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dependencies: + '@jest/get-type': 30.1.0 + '@jest/types': 30.2.0 + chalk: 4.1.2 + jest-util: 30.2.0 + pretty-format: 30.2.0 + dev: true + + /jest-environment-node@30.2.0: + resolution: {integrity: sha512-ElU8v92QJ9UrYsKrxDIKCxu6PfNj4Hdcktcn0JX12zqNdqWHB0N+hwOnnBBXvjLd2vApZtuLUGs1QSY+MsXoNA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dependencies: + '@jest/environment': 30.2.0 + '@jest/fake-timers': 30.2.0 + '@jest/types': 30.2.0 + '@types/node': 24.7.0 + jest-mock: 30.2.0 + jest-util: 30.2.0 + jest-validate: 30.2.0 + dev: true + + /jest-haste-map@30.2.0: + resolution: {integrity: sha512-sQA/jCb9kNt+neM0anSj6eZhLZUIhQgwDt7cPGjumgLM4rXsfb9kpnlacmvZz3Q5tb80nS+oG/if+NBKrHC+Xw==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dependencies: + '@jest/types': 30.2.0 + '@types/node': 24.7.0 + anymatch: 3.1.3 + fb-watchman: 2.0.2 + graceful-fs: 4.2.11 + jest-regex-util: 30.0.1 + jest-util: 30.2.0 + jest-worker: 30.2.0 + micromatch: 4.0.8 + walker: 1.0.8 + optionalDependencies: + fsevents: 2.3.3 + dev: true + + /jest-leak-detector@30.2.0: + resolution: {integrity: sha512-M6jKAjyzjHG0SrQgwhgZGy9hFazcudwCNovY/9HPIicmNSBuockPSedAP9vlPK6ONFJ1zfyH/M2/YYJxOz5cdQ==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dependencies: + '@jest/get-type': 30.1.0 + pretty-format: 30.2.0 + dev: true + + /jest-matcher-utils@30.2.0: + resolution: {integrity: sha512-dQ94Nq4dbzmUWkQ0ANAWS9tBRfqCrn0bV9AMYdOi/MHW726xn7eQmMeRTpX2ViC00bpNaWXq+7o4lIQ3AX13Hg==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dependencies: + '@jest/get-type': 30.1.0 + chalk: 4.1.2 + jest-diff: 30.2.0 + pretty-format: 30.2.0 + dev: true + + /jest-message-util@30.2.0: + resolution: {integrity: sha512-y4DKFLZ2y6DxTWD4cDe07RglV88ZiNEdlRfGtqahfbIjfsw1nMCPx49Uev4IA/hWn3sDKyAnSPwoYSsAEdcimw==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dependencies: + '@babel/code-frame': 7.27.1 + '@jest/types': 30.2.0 + '@types/stack-utils': 2.0.3 + chalk: 4.1.2 + graceful-fs: 4.2.11 + micromatch: 4.0.8 + pretty-format: 30.2.0 + slash: 3.0.0 + stack-utils: 2.0.6 + dev: true + + /jest-mock@30.2.0: + resolution: {integrity: sha512-JNNNl2rj4b5ICpmAcq+WbLH83XswjPbjH4T7yvGzfAGCPh1rw+xVNbtk+FnRslvt9lkCcdn9i1oAoKUuFsOxRw==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dependencies: + '@jest/types': 30.2.0 + '@types/node': 24.7.0 + jest-util: 30.2.0 + dev: true + + /jest-pnp-resolver@1.2.3(jest-resolve@30.2.0): + resolution: {integrity: sha512-+3NpwQEnRoIBtx4fyhblQDPgJI0H1IEIkX7ShLUjPGA7TtUTvI1oiKi3SR4oBR0hQhQR80l4WAe5RrXBwWMA8w==} + engines: {node: '>=6'} + peerDependencies: + jest-resolve: '*' + peerDependenciesMeta: + jest-resolve: + optional: true + dependencies: + jest-resolve: 30.2.0 + dev: true + + /jest-regex-util@30.0.1: + resolution: {integrity: sha512-jHEQgBXAgc+Gh4g0p3bCevgRCVRkB4VB70zhoAE48gxeSr1hfUOsM/C2WoJgVL7Eyg//hudYENbm3Ne+/dRVVA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dev: true + + /jest-resolve-dependencies@30.2.0: + resolution: {integrity: sha512-xTOIGug/0RmIe3mmCqCT95yO0vj6JURrn1TKWlNbhiAefJRWINNPgwVkrVgt/YaerPzY3iItufd80v3lOrFJ2w==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dependencies: + jest-regex-util: 30.0.1 + jest-snapshot: 30.2.0 + transitivePeerDependencies: + - supports-color + dev: true + + /jest-resolve@30.2.0: + resolution: {integrity: sha512-TCrHSxPlx3tBY3hWNtRQKbtgLhsXa1WmbJEqBlTBrGafd5fiQFByy2GNCEoGR+Tns8d15GaL9cxEzKOO3GEb2A==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dependencies: + chalk: 4.1.2 + graceful-fs: 4.2.11 + jest-haste-map: 30.2.0 + jest-pnp-resolver: 1.2.3(jest-resolve@30.2.0) + jest-util: 30.2.0 + jest-validate: 30.2.0 + slash: 3.0.0 + unrs-resolver: 1.11.1 + dev: true + + /jest-runner@30.2.0: + resolution: {integrity: sha512-PqvZ2B2XEyPEbclp+gV6KO/F1FIFSbIwewRgmROCMBo/aZ6J1w8Qypoj2pEOcg3G2HzLlaP6VUtvwCI8dM3oqQ==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dependencies: + '@jest/console': 30.2.0 + '@jest/environment': 30.2.0 + '@jest/test-result': 30.2.0 + '@jest/transform': 30.2.0 + '@jest/types': 30.2.0 + '@types/node': 24.7.0 + chalk: 4.1.2 + emittery: 0.13.1 + exit-x: 0.2.2 + graceful-fs: 4.2.11 + jest-docblock: 30.2.0 + jest-environment-node: 30.2.0 + jest-haste-map: 30.2.0 + jest-leak-detector: 30.2.0 + jest-message-util: 30.2.0 + jest-resolve: 30.2.0 + jest-runtime: 30.2.0 + jest-util: 30.2.0 + jest-watcher: 30.2.0 + jest-worker: 30.2.0 + p-limit: 3.1.0 + source-map-support: 0.5.13 + transitivePeerDependencies: + - supports-color + dev: true + + /jest-runtime@30.2.0: + resolution: {integrity: sha512-p1+GVX/PJqTucvsmERPMgCPvQJpFt4hFbM+VN3n8TMo47decMUcJbt+rgzwrEme0MQUA/R+1de2axftTHkKckg==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dependencies: + '@jest/environment': 30.2.0 + '@jest/fake-timers': 30.2.0 + '@jest/globals': 30.2.0 + '@jest/source-map': 30.0.1 + '@jest/test-result': 30.2.0 + '@jest/transform': 30.2.0 + '@jest/types': 30.2.0 + '@types/node': 24.7.0 + chalk: 4.1.2 + cjs-module-lexer: 2.1.0 + collect-v8-coverage: 1.0.2 + glob: 10.4.5 + graceful-fs: 4.2.11 + jest-haste-map: 30.2.0 + jest-message-util: 30.2.0 + jest-mock: 30.2.0 + jest-regex-util: 30.0.1 + jest-resolve: 30.2.0 + jest-snapshot: 30.2.0 + jest-util: 30.2.0 + slash: 3.0.0 + strip-bom: 4.0.0 + transitivePeerDependencies: + - supports-color + dev: true + + /jest-snapshot@30.2.0: + resolution: {integrity: sha512-5WEtTy2jXPFypadKNpbNkZ72puZCa6UjSr/7djeecHWOu7iYhSXSnHScT8wBz3Rn8Ena5d5RYRcsyKIeqG1IyA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dependencies: + '@babel/core': 7.28.4 + '@babel/generator': 7.28.3 + '@babel/plugin-syntax-jsx': 7.27.1(@babel/core@7.28.4) + '@babel/plugin-syntax-typescript': 7.27.1(@babel/core@7.28.4) + '@babel/types': 7.28.4 + '@jest/expect-utils': 30.2.0 + '@jest/get-type': 30.1.0 + '@jest/snapshot-utils': 30.2.0 + '@jest/transform': 30.2.0 + '@jest/types': 30.2.0 + babel-preset-current-node-syntax: 1.2.0(@babel/core@7.28.4) + chalk: 4.1.2 + expect: 30.2.0 + graceful-fs: 4.2.11 + jest-diff: 30.2.0 + jest-matcher-utils: 30.2.0 + jest-message-util: 30.2.0 + jest-util: 30.2.0 + pretty-format: 30.2.0 + semver: 7.7.2 + synckit: 0.11.11 + transitivePeerDependencies: + - supports-color + dev: true + + /jest-util@30.2.0: + resolution: {integrity: sha512-QKNsM0o3Xe6ISQU869e+DhG+4CK/48aHYdJZGlFQVTjnbvgpcKyxpzk29fGiO7i/J8VENZ+d2iGnSsvmuHywlA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dependencies: + '@jest/types': 30.2.0 + '@types/node': 24.7.0 + chalk: 4.1.2 + ci-info: 4.3.1 + graceful-fs: 4.2.11 + picomatch: 4.0.2 + dev: true + + /jest-validate@30.2.0: + resolution: {integrity: sha512-FBGWi7dP2hpdi8nBoWxSsLvBFewKAg0+uSQwBaof4Y4DPgBabXgpSYC5/lR7VmnIlSpASmCi/ntRWPbv7089Pw==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dependencies: + '@jest/get-type': 30.1.0 + '@jest/types': 30.2.0 + camelcase: 6.3.0 + chalk: 4.1.2 + leven: 3.1.0 + pretty-format: 30.2.0 + dev: true + + /jest-watcher@30.2.0: + resolution: {integrity: sha512-PYxa28dxJ9g777pGm/7PrbnMeA0Jr7osHP9bS7eJy9DuAjMgdGtxgf0uKMyoIsTWAkIbUW5hSDdJ3urmgXBqxg==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dependencies: + '@jest/test-result': 30.2.0 + '@jest/types': 30.2.0 + '@types/node': 24.7.0 + ansi-escapes: 4.3.2 + chalk: 4.1.2 + emittery: 0.13.1 + jest-util: 30.2.0 + string-length: 4.0.2 + dev: true + + /jest-worker@30.2.0: + resolution: {integrity: sha512-0Q4Uk8WF7BUwqXHuAjc23vmopWJw5WH7w2tqBoUOZpOjW/ZnR44GXXd1r82RvnmI2GZge3ivrYXk/BE2+VtW2g==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dependencies: + '@types/node': 24.7.0 + '@ungap/structured-clone': 1.3.0 + jest-util: 30.2.0 + merge-stream: 2.0.0 + supports-color: 8.1.1 + dev: true + + /jest@30.2.0(@types/node@24.7.0)(ts-node@10.9.2): + resolution: {integrity: sha512-F26gjC0yWN8uAA5m5Ss8ZQf5nDHWGlN/xWZIh8S5SRbsEKBovwZhxGd6LJlbZYxBgCYOtreSUyb8hpXyGC5O4A==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + hasBin: true + peerDependencies: + node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 + peerDependenciesMeta: + node-notifier: + optional: true + dependencies: + '@jest/core': 30.2.0(ts-node@10.9.2) + '@jest/types': 30.2.0 + import-local: 3.2.0 + jest-cli: 30.2.0(@types/node@24.7.0)(ts-node@10.9.2) + transitivePeerDependencies: + - '@types/node' + - babel-plugin-macros + - esbuild-register + - supports-color + - ts-node + dev: true + + /js-tokens@4.0.0: + resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} + dev: true + + /js-yaml@3.14.1: + resolution: {integrity: sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==} + hasBin: true + dependencies: + argparse: 1.0.10 + esprima: 4.0.1 + dev: true + + /jsesc@3.1.0: + resolution: {integrity: sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==} + engines: {node: '>=6'} + hasBin: true + dev: true + + /json-parse-even-better-errors@2.3.1: + resolution: {integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==} + dev: true + + /json5@2.2.3: + resolution: {integrity: sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==} + engines: {node: '>=6'} + hasBin: true + dev: true + + /jsonify@0.0.1: + resolution: {integrity: sha512-2/Ki0GcmuqSrgFyelQq9M05y7PS0mEwuIzrf3f1fPqkVDVRvZrPZtVSMHxdgo8Aq0sxAOb/cr2aqqA3LeWHVPg==} + dev: false + + /jsonwebtoken@9.0.2: + resolution: {integrity: sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ==} + engines: {node: '>=12', npm: '>=6'} + dependencies: + jws: 3.2.2 + lodash.includes: 4.3.0 + lodash.isboolean: 3.0.3 + lodash.isinteger: 4.0.4 + lodash.isnumber: 3.0.3 + lodash.isplainobject: 4.0.6 + lodash.isstring: 4.0.1 + lodash.once: 4.1.1 + ms: 2.1.3 + semver: 7.7.2 + dev: false + + /jwa@1.4.2: + resolution: {integrity: sha512-eeH5JO+21J78qMvTIDdBXidBd6nG2kZjg5Ohz/1fpa28Z4CcsWUzJ1ZZyFq/3z3N17aZy+ZuBoHljASbL1WfOw==} + dependencies: + buffer-equal-constant-time: 1.0.1 + ecdsa-sig-formatter: 1.0.11 + safe-buffer: 5.2.1 + dev: false + + /jws@3.2.2: + resolution: {integrity: sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==} + dependencies: + jwa: 1.4.2 + safe-buffer: 5.2.1 + dev: false + + /kareem@2.6.3: + resolution: {integrity: sha512-C3iHfuGUXK2u8/ipq9LfjFfXFxAZMQJJq7vLS45r3D9Y2xQ/m4S8zaR4zMLFWh9AsNPXmcFfUDhTEO8UIC/V6Q==} + engines: {node: '>=12.0.0'} + dev: false + + /leven@3.1.0: + resolution: {integrity: sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==} + engines: {node: '>=6'} + dev: true + + /lie@3.1.1: + resolution: {integrity: sha512-RiNhHysUjhrDQntfYSfY4MU24coXXdEOgw9WGcKHNeEwffDYbF//u87M1EWaMGzuFoSbqW0C9C6lEEhDOAswfw==} + dependencies: + immediate: 3.0.6 + dev: false + + /lines-and-columns@1.2.4: + resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==} + dev: true + + /localforage@1.10.0: + resolution: {integrity: sha512-14/H1aX7hzBBmmh7sGPd+AOMkkIrHM3Z1PAyGgZigA1H1p5O5ANnMyWzvpAETtG68/dC4pC0ncy3+PPGzXZHPg==} + dependencies: + lie: 3.1.1 + dev: false + + /locate-path@5.0.0: + resolution: {integrity: sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==} + engines: {node: '>=8'} + dependencies: + p-locate: 4.1.0 + dev: true + + /lodash.includes@4.3.0: + resolution: {integrity: sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==} + dev: false + + /lodash.isboolean@3.0.3: + resolution: {integrity: sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==} + dev: false + + /lodash.isinteger@4.0.4: + resolution: {integrity: sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA==} + dev: false + + /lodash.isnumber@3.0.3: + resolution: {integrity: sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw==} + dev: false + + /lodash.isplainobject@4.0.6: + resolution: {integrity: sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==} + dev: false + + /lodash.isstring@4.0.1: + resolution: {integrity: sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw==} + dev: false + + /lodash.once@4.1.1: + resolution: {integrity: sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==} + dev: false + + /lru-cache@10.4.3: + resolution: {integrity: sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==} + dev: true + + /lru-cache@5.1.1: + resolution: {integrity: sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==} + dependencies: + yallist: 3.1.1 + dev: true + + /lru-cache@6.0.0: + resolution: {integrity: sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==} + engines: {node: '>=10'} + requiresBuild: true + dependencies: + yallist: 4.0.0 + dev: false + optional: true + + /make-dir@4.0.0: + resolution: {integrity: sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==} + engines: {node: '>=10'} + dependencies: + semver: 7.7.2 + dev: true + + /make-error@1.3.6: + resolution: {integrity: sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==} + dev: true + + /make-fetch-happen@9.1.0: + resolution: {integrity: sha512-+zopwDy7DNknmwPQplem5lAZX/eCOzSvSNNcSKm5eVwTkOBzoktEfXsa9L23J/GIRhxRsaxzkPEhrJEpE2F4Gg==} + engines: {node: '>= 10'} + requiresBuild: true + dependencies: + agentkeepalive: 4.6.0 + cacache: 15.3.0 + http-cache-semantics: 4.2.0 + http-proxy-agent: 4.0.1 + https-proxy-agent: 5.0.1 + is-lambda: 1.0.1 + lru-cache: 6.0.0 + minipass: 3.3.6 + minipass-collect: 1.0.2 + minipass-fetch: 1.4.1 + minipass-flush: 1.0.5 + minipass-pipeline: 1.2.4 + negotiator: 0.6.4 + promise-retry: 2.0.1 + socks-proxy-agent: 6.2.1 + ssri: 8.0.1 + transitivePeerDependencies: + - bluebird + - supports-color + dev: false + optional: true + + /makeerror@1.0.12: + resolution: {integrity: sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==} + dependencies: + tmpl: 1.0.5 + dev: true + + /match-sorter@8.1.0: + resolution: {integrity: sha512-0HX3BHPixkbECX+Vt7nS1vJ6P2twPgGTU3PMXjWrl1eyVCL24tFHeyYN1FN5RKLzve0TyzNI9qntqQGbebnfPQ==} + dependencies: + '@babel/runtime': 7.27.6 + remove-accents: 0.5.0 + dev: false + + /math-intrinsics@1.1.0: + resolution: {integrity: sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==} + engines: {node: '>= 0.4'} + dev: false + + /media-typer@1.1.0: + resolution: {integrity: sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw==} + engines: {node: '>= 0.8'} + dev: false + + /memory-pager@1.5.0: + resolution: {integrity: sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg==} + dev: false + + /merge-descriptors@2.0.0: + resolution: {integrity: sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g==} + engines: {node: '>=18'} + dev: false + + /merge-stream@2.0.0: + resolution: {integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==} + dev: true + + /micromatch@4.0.8: + resolution: {integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==} + engines: {node: '>=8.6'} + dependencies: + braces: 3.0.3 + picomatch: 2.3.1 + dev: true + + /mime-db@1.54.0: + resolution: {integrity: sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==} + engines: {node: '>= 0.6'} + dev: false + + /mime-types@3.0.2: + resolution: {integrity: sha512-Lbgzdk0h4juoQ9fCKXW4by0UJqj+nOOrI9MJ1sSj4nI8aI2eo1qmvQEie4VD1glsS250n15LsWsYtCugiStS5A==} + engines: {node: '>=18'} + dependencies: + mime-db: 1.54.0 + dev: false + + /mimic-fn@2.1.0: + resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} + engines: {node: '>=6'} + dev: true + + /mimic-response@3.1.0: + resolution: {integrity: sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==} + engines: {node: '>=10'} + dev: false + + /min-document@0.2.8: + resolution: {integrity: sha512-ri+szIofvuuGNJyf9sSpZNflnMoB1Cc+1B/yEksib+YXF/9CdnKABVFMk0n4+RTXP5bRSp8kP5NbpdMHGmabbw==} + dependencies: + tape: 1.0.4 + dev: false + + /minimatch@3.1.2: + resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==} + dependencies: + brace-expansion: 1.1.11 + + /minimatch@9.0.5: + resolution: {integrity: sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==} + engines: {node: '>=16 || 14 >=14.17'} + dependencies: + brace-expansion: 2.0.2 + dev: true + + /minimist@1.2.8: + resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==} + dev: false + + /minipass-collect@1.0.2: + resolution: {integrity: sha512-6T6lH0H8OG9kITm/Jm6tdooIbogG9e0tLgpY6mphXSm/A9u8Nq1ryBG+Qspiub9LjWlBPsPS3tWQ/Botq4FdxA==} + engines: {node: '>= 8'} + requiresBuild: true + dependencies: + minipass: 3.3.6 + dev: false + optional: true + + /minipass-fetch@1.4.1: + resolution: {integrity: sha512-CGH1eblLq26Y15+Azk7ey4xh0J/XfJfrCox5LDJiKqI2Q2iwOLOKrlmIaODiSQS8d18jalF6y2K2ePUm0CmShw==} + engines: {node: '>=8'} + requiresBuild: true + dependencies: + minipass: 3.3.6 + minipass-sized: 1.0.3 + minizlib: 2.1.2 + optionalDependencies: + encoding: 0.1.13 + dev: false + optional: true + + /minipass-flush@1.0.5: + resolution: {integrity: sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw==} + engines: {node: '>= 8'} + requiresBuild: true + dependencies: + minipass: 3.3.6 + dev: false + optional: true + + /minipass-pipeline@1.2.4: + resolution: {integrity: sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A==} + engines: {node: '>=8'} + requiresBuild: true + dependencies: + minipass: 3.3.6 + dev: false + optional: true + + /minipass-sized@1.0.3: + resolution: {integrity: sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g==} + engines: {node: '>=8'} + requiresBuild: true + dependencies: + minipass: 3.3.6 + dev: false + optional: true + + /minipass@3.3.6: + resolution: {integrity: sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==} + engines: {node: '>=8'} + dependencies: + yallist: 4.0.0 + dev: false + + /minipass@5.0.0: + resolution: {integrity: sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==} + engines: {node: '>=8'} + dev: false + + /minipass@7.1.2: + resolution: {integrity: sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==} + engines: {node: '>=16 || 14 >=14.17'} + dev: true + + /minizlib@2.1.2: + resolution: {integrity: sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==} + engines: {node: '>= 8'} + dependencies: + minipass: 3.3.6 + yallist: 4.0.0 + dev: false + + /mkdirp-classic@0.5.3: + resolution: {integrity: sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==} + dev: false + + /mkdirp@1.0.4: + resolution: {integrity: sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==} + engines: {node: '>=10'} + hasBin: true + dev: false + + /mongodb-connection-string-url@3.0.2: + resolution: {integrity: sha512-rMO7CGo/9BFwyZABcKAWL8UJwH/Kc2x0g72uhDWzG48URRax5TCIcJ7Rc3RZqffZzO/Gwff/jyKwCU9TN8gehA==} + dependencies: + '@types/whatwg-url': 11.0.5 + whatwg-url: 14.2.0 + dev: false + + /mongodb@6.20.0: + resolution: {integrity: sha512-Tl6MEIU3K4Rq3TSHd+sZQqRBoGlFsOgNrH5ltAcFBV62Re3Fd+FcaVf8uSEQFOJ51SDowDVttBTONMfoYWrWlQ==} + engines: {node: '>=16.20.1'} + peerDependencies: + '@aws-sdk/credential-providers': ^3.188.0 + '@mongodb-js/zstd': ^1.1.0 || ^2.0.0 + gcp-metadata: ^5.2.0 + kerberos: ^2.0.1 + mongodb-client-encryption: '>=6.0.0 <7' + snappy: ^7.3.2 + socks: ^2.7.1 + peerDependenciesMeta: + '@aws-sdk/credential-providers': + optional: true + '@mongodb-js/zstd': + optional: true + gcp-metadata: + optional: true + kerberos: + optional: true + mongodb-client-encryption: + optional: true + snappy: + optional: true + socks: + optional: true + dependencies: + '@mongodb-js/saslprep': 1.3.1 + bson: 6.10.4 + mongodb-connection-string-url: 3.0.2 + dev: false + + /mongoose@8.19.1: + resolution: {integrity: sha512-oB7hGQJn4f8aebqE7mhE54EReb5cxVgpCxQCQj0K/cK3q4J3Tg08nFP6sM52nJ4Hlm8jsDnhVYpqIITZUAhckQ==} + engines: {node: '>=16.20.1'} + dependencies: + bson: 6.10.4 + kareem: 2.6.3 + mongodb: 6.20.0 + mpath: 0.9.0 + mquery: 5.0.0 + ms: 2.1.3 + sift: 17.1.3 + transitivePeerDependencies: + - '@aws-sdk/credential-providers' + - '@mongodb-js/zstd' + - gcp-metadata + - kerberos + - mongodb-client-encryption + - snappy + - socks + - supports-color + dev: false + + /morgan@1.10.1: + resolution: {integrity: sha512-223dMRJtI/l25dJKWpgij2cMtywuG/WiUKXdvwfbhGKBhy1puASqXwFzmWZ7+K73vUPoR7SS2Qz2cI/g9MKw0A==} + engines: {node: '>= 0.8.0'} + dependencies: + basic-auth: 2.0.1 + debug: 2.6.9 + depd: 2.0.0 + on-finished: 2.3.0 + on-headers: 1.1.0 + transitivePeerDependencies: + - supports-color + dev: false + + /mpath@0.9.0: + resolution: {integrity: sha512-ikJRQTk8hw5DEoFVxHG1Gn9T/xcjtdnOKIU1JTmGjZZlg9LST2mBLmcX3/ICIbgJydT2GOc15RnNy5mHmzfSew==} + engines: {node: '>=4.0.0'} + dev: false + + /mquery@5.0.0: + resolution: {integrity: sha512-iQMncpmEK8R8ncT8HJGsGc9Dsp8xcgYMVSbs5jgnm1lFHTZqMJTUWTDx1LBO8+mK3tPNZWFLBghQEIOULSTHZg==} + engines: {node: '>=14.0.0'} + dependencies: + debug: 4.4.0 + transitivePeerDependencies: + - supports-color + dev: false + + /ms@2.0.0: + resolution: {integrity: sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==} + dev: false + + /ms@2.1.3: + resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} + + /napi-build-utils@2.0.0: + resolution: {integrity: sha512-GEbrYkbfF7MoNaoh2iGG84Mnf/WZfB0GdGEsM8wz7Expx/LlWf5U8t9nvJKXSp3qr5IsEbK04cBGhol/KwOsWA==} + dev: false + + /napi-postinstall@0.3.4: + resolution: {integrity: sha512-PHI5f1O0EP5xJ9gQmFGMS6IZcrVvTjpXjz7Na41gTE7eE2hK11lg04CECCYEEjdc17EV4DO+fkGEtt7TpTaTiQ==} + engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0} + hasBin: true + dev: true + + /natural-compare@1.4.0: + resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==} + dev: true + + /negotiator@0.6.4: + resolution: {integrity: sha512-myRT3DiWPHqho5PrJaIRyaMv2kgYf0mUVgBNOYMuCH5Ki1yEiQaf/ZJuQ62nvpc44wL5WDbTX7yGJi1Neevw8w==} + engines: {node: '>= 0.6'} + requiresBuild: true + dev: false + optional: true + + /negotiator@1.0.0: + resolution: {integrity: sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==} + engines: {node: '>= 0.6'} + dev: false + + /node-abi@3.78.0: + resolution: {integrity: sha512-E2wEyrgX/CqvicaQYU3Ze1PFGjc4QYPGsjUrlYkqAE0WjHEZwgOsGMPMzkMse4LjJbDmaEuDX3CM036j5K2DSQ==} + engines: {node: '>=10'} + dependencies: + semver: 7.7.2 + dev: false + + /node-addon-api@7.1.1: + resolution: {integrity: sha512-5m3bsyrjFWE1xf7nz7YXdN4udnVtXK6/Yfgn5qnahL6bCkf2yKt4k3nuTKAtT4r3IG8JNR2ncsIMdZuAzJjHQQ==} + dev: false + + /node-addon-api@8.5.0: + resolution: {integrity: sha512-/bRZty2mXUIFY/xU5HLvveNHlswNJej+RnxBjOMkidWfwZzgTbPG1E3K5TOxRLOR+5hX7bSofy8yf1hZevMS8A==} + engines: {node: ^18 || ^20 || >= 21} + dev: false + + /node-gyp-build@4.8.4: + resolution: {integrity: sha512-LA4ZjwlnUblHVgq0oBF3Jl/6h/Nvs5fzBLwdEF4nuxnFdsfajde4WfxtJr3CaiH+F6ewcIB/q4jQ4UzPyid+CQ==} + hasBin: true + dev: false + + /node-gyp@8.4.1: + resolution: {integrity: sha512-olTJRgUtAb/hOXG0E93wZDs5YiJlgbXxTwQAFHyNlRsXQnYzUaF2aGgujZbw+hR8aF4ZG/rST57bWMWD16jr9w==} + engines: {node: '>= 10.12.0'} + hasBin: true + requiresBuild: true + dependencies: + env-paths: 2.2.1 + glob: 7.2.3 + graceful-fs: 4.2.11 + make-fetch-happen: 9.1.0 + nopt: 5.0.0 + npmlog: 6.0.2 + rimraf: 3.0.2 + semver: 7.7.2 + tar: 6.2.1 + which: 2.0.2 + transitivePeerDependencies: + - bluebird + - supports-color + dev: false + optional: true + + /node-int64@0.4.0: + resolution: {integrity: sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==} + dev: true + + /node-releases@2.0.19: + resolution: {integrity: sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==} + dev: true + + /nopt@5.0.0: + resolution: {integrity: sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ==} + engines: {node: '>=6'} + hasBin: true + requiresBuild: true + dependencies: + abbrev: 1.1.1 + dev: false + optional: true + + /normalize-path@3.0.0: + resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} + engines: {node: '>=0.10.0'} + dev: true + + /npm-run-path@4.0.1: + resolution: {integrity: sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==} + engines: {node: '>=8'} + dependencies: + path-key: 3.1.1 + dev: true + + /npmlog@6.0.2: + resolution: {integrity: sha512-/vBvz5Jfr9dT/aFWd0FIRf+T/Q2WBsLENygUaFUqstqsycmZAP/t5BvFJTK0viFmSUxiUKTUplWy5vt+rvKIxg==} + engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} + deprecated: This package is no longer supported. + requiresBuild: true + dependencies: + are-we-there-yet: 3.0.1 + console-control-strings: 1.1.0 + gauge: 4.0.4 + set-blocking: 2.0.0 + dev: false + optional: true + + /oauth@0.10.2: + resolution: {integrity: sha512-JtFnB+8nxDEXgNyniwz573xxbKSOu3R8D40xQKqcjwJ2CDkYqUDI53o6IuzDJBx60Z8VKCm271+t8iFjakrl8Q==} + dev: false + + /object-inspect@1.13.4: + resolution: {integrity: sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==} + engines: {node: '>= 0.4'} + dev: false + + /omicron@0.2.0: + resolution: {integrity: sha512-pv4/a3GWzcw7z3xavdjsNGRRVHJ2SjwsU080MoK6RJHwrU8Y5eQXBd9E3Tyx0/hJtQtSPSlEgiyi4UkW+62Zqg==} + dev: false + + /on-finished@2.3.0: + resolution: {integrity: sha512-ikqdkGAAyf/X/gPhXGvfgAytDZtDbr+bkNUJ0N9h5MI/dmdgCs3l6hoHrcUv41sRKew3jIwrp4qQDXiK99Utww==} + engines: {node: '>= 0.8'} + dependencies: + ee-first: 1.1.1 + dev: false + + /on-finished@2.4.1: + resolution: {integrity: sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==} + engines: {node: '>= 0.8'} + dependencies: + ee-first: 1.1.1 + dev: false + + /on-headers@1.1.0: + resolution: {integrity: sha512-737ZY3yNnXy37FHkQxPzt4UZ2UWPWiCZWLvFZ4fu5cueciegX0zGPnrlY6bwRg4FdQOe9YU8MkmJwGhoMybl8A==} + engines: {node: '>= 0.8'} + dev: false + + /once@1.4.0: + resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} + dependencies: + wrappy: 1.0.2 + + /onetime@5.1.2: + resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==} + engines: {node: '>=6'} + dependencies: + mimic-fn: 2.1.0 + dev: true + + /p-limit@2.3.0: + resolution: {integrity: sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==} + engines: {node: '>=6'} + dependencies: + p-try: 2.2.0 + dev: true + + /p-limit@3.1.0: + resolution: {integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==} + engines: {node: '>=10'} + dependencies: + yocto-queue: 0.1.0 + dev: true + + /p-locate@4.1.0: + resolution: {integrity: sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==} + engines: {node: '>=8'} + dependencies: + p-limit: 2.3.0 + dev: true + + /p-map@4.0.0: + resolution: {integrity: sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==} + engines: {node: '>=10'} + requiresBuild: true + dependencies: + aggregate-error: 3.1.0 + dev: false + optional: true + + /p-try@2.2.0: + resolution: {integrity: sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==} + engines: {node: '>=6'} + dev: true + + /package-json-from-dist@1.0.1: + resolution: {integrity: sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==} + dev: true + + /parse-json@5.2.0: + resolution: {integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==} + engines: {node: '>=8'} + dependencies: + '@babel/code-frame': 7.27.1 + error-ex: 1.3.4 + json-parse-even-better-errors: 2.3.1 + lines-and-columns: 1.2.4 + dev: true + + /parseurl@1.3.3: + resolution: {integrity: sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==} + engines: {node: '>= 0.8'} + dev: false + + /passport-facebook@3.0.0: + resolution: {integrity: sha512-K/qNzuFsFISYAyC1Nma4qgY/12V3RSLFdFVsPKXiKZt434wOvthFW1p7zKa1iQihQMRhaWorVE1o3Vi1o+ZgeQ==} + engines: {node: '>= 0.4.0'} + dependencies: + passport-oauth2: 1.8.0 + dev: false + + /passport-oauth2@1.8.0: + resolution: {integrity: sha512-cjsQbOrXIDE4P8nNb3FQRCCmJJ/utnFKEz2NX209f7KOHPoX18gF7gBzBbLLsj2/je4KrgiwLLGjf0lm9rtTBA==} + engines: {node: '>= 0.4.0'} + dependencies: + base64url: 3.0.1 + oauth: 0.10.2 + passport-strategy: 1.0.0 + uid2: 0.0.4 + utils-merge: 1.0.1 + dev: false + + /passport-strategy@1.0.0: + resolution: {integrity: sha512-CB97UUvDKJde2V0KDWWB3lyf6PC3FaZP7YxZ2G8OAtn9p4HI9j9JLP9qjOGZFvyl8uwNT8qM+hGnz/n16NI7oA==} + engines: {node: '>= 0.4.0'} + dev: false + + /passport@0.7.0: + resolution: {integrity: sha512-cPLl+qZpSc+ireUvt+IzqbED1cHHkDoVYMo30jbJIdOOjQ1MQYZBPiNvmi8UM6lJuOpTPXJGZQk0DtC4y61MYQ==} + engines: {node: '>= 0.4.0'} + dependencies: + passport-strategy: 1.0.0 + pause: 0.0.1 + utils-merge: 1.0.1 + dev: false + + /path-exists@4.0.0: + resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==} + engines: {node: '>=8'} + dev: true + + /path-is-absolute@1.0.1: + resolution: {integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==} + engines: {node: '>=0.10.0'} + requiresBuild: true + + /path-key@3.1.1: + resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} + engines: {node: '>=8'} + dev: true + + /path-scurry@1.11.1: + resolution: {integrity: sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==} + engines: {node: '>=16 || 14 >=14.18'} + dependencies: + lru-cache: 10.4.3 + minipass: 7.1.2 + dev: true + + /path-to-regexp@8.3.0: + resolution: {integrity: sha512-7jdwVIRtsP8MYpdXSwOS0YdD0Du+qOoF/AEPIt88PcCFrZCzx41oxku1jD88hZBwbNUIEfpqvuhjFaMAqMTWnA==} + dev: false + + /pause@0.0.1: + resolution: {integrity: sha512-KG8UEiEVkR3wGEb4m5yZkVCzigAD+cVEJck2CzYZO37ZGJfctvVptVO192MwrtPhzONn6go8ylnOdMhKqi4nfg==} + dev: false + + /picocolors@1.1.1: + resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==} + dev: true + + /picomatch@2.3.1: + resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} + engines: {node: '>=8.6'} + dev: true + + /picomatch@4.0.2: + resolution: {integrity: sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==} + engines: {node: '>=12'} + dev: true + + /pirates@4.0.7: + resolution: {integrity: sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==} + engines: {node: '>= 6'} + dev: true + + /pkg-dir@4.2.0: + resolution: {integrity: sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==} + engines: {node: '>=8'} + dependencies: + find-up: 4.1.0 + dev: true + + /pluralize@8.0.0: + resolution: {integrity: sha512-Nc3IT5yHzflTfbjgqWcCPpo7DaKy4FnpB0l/zCAW0Tc7jxAiuqSxHasntB3D7887LSrA93kDJ9IXovxJYxyLCA==} + engines: {node: '>=4'} + dev: false + + /prebuild-install@7.1.3: + resolution: {integrity: sha512-8Mf2cbV7x1cXPUILADGI3wuhfqWvtiLA1iclTDbFRZkgRQS0NqsPZphna9V+HyTEadheuPmjaJMsbzKQFOzLug==} + engines: {node: '>=10'} + hasBin: true + dependencies: + detect-libc: 2.1.2 + expand-template: 2.0.3 + github-from-package: 0.0.0 + minimist: 1.2.8 + mkdirp-classic: 0.5.3 + napi-build-utils: 2.0.0 + node-abi: 3.78.0 + pump: 3.0.3 + rc: 1.2.8 + simple-get: 4.0.1 + tar-fs: 2.1.4 + tunnel-agent: 0.6.0 + dev: false + + /pretty-format@30.2.0: + resolution: {integrity: sha512-9uBdv/B4EefsuAL+pWqueZyZS2Ba+LxfFeQ9DN14HU4bN8bhaxKdkpjpB6fs9+pSjIBu+FXQHImEg8j/Lw0+vA==} + engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0} + dependencies: + '@jest/schemas': 30.0.5 + ansi-styles: 5.2.0 + react-is: 18.3.1 + dev: true + + /process@0.5.2: + resolution: {integrity: sha512-oNpcutj+nYX2FjdEW7PGltWhXulAnFlM0My/k48L90hARCOJtvBbQXc/6itV2jDvU5xAAtonP+r6wmQgCcbAUA==} + engines: {node: '>= 0.6.0'} + dev: false + + /promise-inflight@1.0.1: + resolution: {integrity: sha512-6zWPyEOFaQBJYcGMHBKTKJ3u6TBsnMFOIZSa6ce1e/ZrrsOlnHRHbabMjLiBYKp+n44X9eUI6VUPaukCXHuG4g==} + requiresBuild: true + peerDependencies: + bluebird: '*' + peerDependenciesMeta: + bluebird: + optional: true + dev: false + optional: true + + /promise-retry@2.0.1: + resolution: {integrity: sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g==} + engines: {node: '>=10'} + requiresBuild: true + dependencies: + err-code: 2.0.3 + retry: 0.12.0 + dev: false + optional: true + + /proxy-addr@2.0.7: + resolution: {integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==} + engines: {node: '>= 0.10'} + dependencies: + forwarded: 0.2.0 + ipaddr.js: 1.9.1 + dev: false + + /pump@3.0.3: + resolution: {integrity: sha512-todwxLMY7/heScKmntwQG8CXVkWUOdYxIvY2s0VWAAMh/nd8SoYiRaKjlr7+iCs984f2P8zvrfWcDDYVb73NfA==} + dependencies: + end-of-stream: 1.4.5 + once: 1.4.0 + dev: false + + /punycode@2.3.1: + resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==} + engines: {node: '>=6'} + dev: false + + /pure-rand@7.0.1: + resolution: {integrity: sha512-oTUZM/NAZS8p7ANR3SHh30kXB+zK2r2BPcEn/awJIbOvq82WoMN4p62AWWp3Hhw50G0xMsw1mhIBLqHw64EcNQ==} + dev: true + + /python@0.0.4: + resolution: {integrity: sha512-7avKA/6XxrwcGSDes8xGn7FHAUdAUQXKHtpjDulyv5/nm7TcPblmPRvXjjwx5knWHqeRiipqH/TZR2HhmJ4CGQ==} + engines: {node: '>= 0.4.1'} + dev: false + + /qs@6.14.1: + resolution: {integrity: sha512-4EK3+xJl8Ts67nLYNwqw/dsFVnCf+qR7RgXSK9jEEm9unao3njwMDdmsdvoKBKHzxd7tCYz5e5M+SnMjdtXGQQ==} + engines: {node: '>=0.6'} + dependencies: + side-channel: 1.1.0 + dev: false + + /random-bytes@1.0.0: + resolution: {integrity: sha512-iv7LhNVO047HzYR3InF6pUcUsPQiHTM1Qal51DcGSuZFBil1aBBWG5eHPNek7bvILMaYJ/8RU1e8w1AMdHmLQQ==} + engines: {node: '>= 0.8'} + dev: false + + /range-parser@1.2.1: + resolution: {integrity: sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==} + engines: {node: '>= 0.6'} + dev: false + + /raw-body@3.0.2: + resolution: {integrity: sha512-K5zQjDllxWkf7Z5xJdV0/B0WTNqx6vxG70zJE4N0kBs4LovmEYWJzQGxC9bS9RAKu3bgM40lrd5zoLJ12MQ5BA==} + engines: {node: '>= 0.10'} + dependencies: + bytes: 3.1.2 + http-errors: 2.0.1 + iconv-lite: 0.7.2 + unpipe: 1.0.0 + dev: false + + /rc@1.2.8: + resolution: {integrity: sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==} + hasBin: true + dependencies: + deep-extend: 0.6.0 + ini: 1.3.8 + minimist: 1.2.8 + strip-json-comments: 2.0.1 + dev: false + + /react-dom@19.1.0(react@19.2.0): + resolution: {integrity: sha512-Xs1hdnE+DyKgeHJeJznQmYMIBG3TKIHJJT95Q58nHLSrElKlGQqDTR2HQ9fx5CN/Gk6Vh/kupBTDLU11/nDk/g==} + peerDependencies: + react: ^19.1.0 + dependencies: + react: 19.2.0 + scheduler: 0.26.0 + dev: false + + /react-is@18.3.1: + resolution: {integrity: sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==} + dev: true + + /react-router-dom@7.9.3(react-dom@19.1.0)(react@19.2.0): + resolution: {integrity: sha512-1QSbA0TGGFKTAc/aWjpfW/zoEukYfU4dc1dLkT/vvf54JoGMkW+fNA+3oyo2gWVW1GM7BxjJVHz5GnPJv40rvg==} + engines: {node: '>=20.0.0'} + peerDependencies: + react: '>=18' + react-dom: '>=18' + dependencies: + react: 19.2.0 + react-dom: 19.1.0(react@19.2.0) + react-router: 7.9.3(react-dom@19.1.0)(react@19.2.0) + dev: false + + /react-router@7.9.3(react-dom@19.1.0)(react@19.2.0): + resolution: {integrity: sha512-4o2iWCFIwhI/eYAIL43+cjORXYn/aRQPgtFRRZb3VzoyQ5Uej0Bmqj7437L97N9NJW4wnicSwLOLS+yCXfAPgg==} + engines: {node: '>=20.0.0'} + peerDependencies: + react: '>=18' + react-dom: '>=18' + peerDependenciesMeta: + react-dom: + optional: true + dependencies: + cookie: 1.0.2 + react: 19.2.0 + react-dom: 19.1.0(react@19.2.0) + set-cookie-parser: 2.7.1 + dev: false + + /react@19.2.0: + resolution: {integrity: sha512-tmbWg6W31tQLeB5cdIBOicJDJRR2KzXsV7uSK9iNfLWQ5bIZfxuPEHp7M8wiHyHnn0DD1i7w3Zmin0FtkrwoCQ==} + engines: {node: '>=0.10.0'} + dev: false + + /readable-stream@3.6.2: + resolution: {integrity: sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==} + engines: {node: '>= 6'} + dependencies: + inherits: 2.0.4 + string_decoder: 1.3.0 + util-deprecate: 1.0.2 + dev: false + + /remove-accents@0.5.0: + resolution: {integrity: sha512-8g3/Otx1eJaVD12e31UbJj1YzdtVvzH85HV7t+9MJYk/u3XmkOUJ5Ys9wQrf9PCPK8+xn4ymzqYCiZl6QWKn+A==} + dev: false + + /require-directory@2.1.1: + resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==} + engines: {node: '>=0.10.0'} + dev: true + + /resolve-cwd@3.0.0: + resolution: {integrity: sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==} + engines: {node: '>=8'} + dependencies: + resolve-from: 5.0.0 + dev: true + + /resolve-from@5.0.0: + resolution: {integrity: sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==} + engines: {node: '>=8'} + dev: true + + /retry@0.12.0: + resolution: {integrity: sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==} + engines: {node: '>= 4'} + requiresBuild: true + dev: false + optional: true + + /rimraf@3.0.2: + resolution: {integrity: sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==} + deprecated: Rimraf versions prior to v4 are no longer supported + hasBin: true + requiresBuild: true + dependencies: + glob: 7.2.3 + dev: false + optional: true + + /router@2.2.0: + resolution: {integrity: sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ==} + engines: {node: '>= 18'} + dependencies: + debug: 4.4.0 + depd: 2.0.0 + is-promise: 4.0.0 + parseurl: 1.3.3 + path-to-regexp: 8.3.0 + transitivePeerDependencies: + - supports-color + dev: false + + /safe-buffer@5.1.2: + resolution: {integrity: sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==} + dev: false + + /safe-buffer@5.2.1: + resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} + dev: false + + /safer-buffer@2.1.2: + resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==} + requiresBuild: true + dev: false + + /scheduler@0.26.0: + resolution: {integrity: sha512-NlHwttCI/l5gCPR3D1nNXtWABUmBwvZpEQiD4IXSbIDq8BzLIK/7Ir5gTFSGZDUu37K5cMNp0hFtzO38sC7gWA==} + dev: false + + /semver@6.3.1: + resolution: {integrity: sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==} + hasBin: true + dev: true + + /semver@7.7.2: + resolution: {integrity: sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==} + engines: {node: '>=10'} + hasBin: true + + /send@1.2.1: + resolution: {integrity: sha512-1gnZf7DFcoIcajTjTwjwuDjzuz4PPcY2StKPlsGAQ1+YH20IRVrBaXSWmdjowTJ6u8Rc01PoYOGHXfP1mYcZNQ==} + engines: {node: '>= 18'} + dependencies: + debug: 4.4.3 + encodeurl: 2.0.0 + escape-html: 1.0.3 + etag: 1.8.1 + fresh: 2.0.0 + http-errors: 2.0.1 + mime-types: 3.0.2 + ms: 2.1.3 + on-finished: 2.4.1 + range-parser: 1.2.1 + statuses: 2.0.2 + transitivePeerDependencies: + - supports-color + dev: false + + /serve-static@2.2.1: + resolution: {integrity: sha512-xRXBn0pPqQTVQiC8wyQrKs2MOlX24zQ0POGaj0kultvoOCstBQM5yvOhAVSUwOMjQtTvsPWoNCHfPGwaaQJhTw==} + engines: {node: '>= 18'} + dependencies: + encodeurl: 2.0.0 + escape-html: 1.0.3 + parseurl: 1.3.3 + send: 1.2.1 + transitivePeerDependencies: + - supports-color + dev: false + + /set-blocking@2.0.0: + resolution: {integrity: sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==} + requiresBuild: true + dev: false + optional: true + + /set-cookie-parser@2.7.1: + resolution: {integrity: sha512-IOc8uWeOZgnb3ptbCURJWNjWUPcO3ZnTTdzsurqERrP6nPyv+paC55vJM0LpOlT2ne+Ix+9+CRG1MNLlyZ4GjQ==} + dev: false + + /setprototypeof@1.2.0: + resolution: {integrity: sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==} + dev: false + + /shebang-command@2.0.0: + resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} + engines: {node: '>=8'} + dependencies: + shebang-regex: 3.0.0 + dev: true + + /shebang-regex@3.0.0: + resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} + engines: {node: '>=8'} + dev: true + + /side-channel-list@1.0.0: + resolution: {integrity: sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==} + engines: {node: '>= 0.4'} + dependencies: + es-errors: 1.3.0 + object-inspect: 1.13.4 + dev: false + + /side-channel-map@1.0.1: + resolution: {integrity: sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==} + engines: {node: '>= 0.4'} + dependencies: + call-bound: 1.0.4 + es-errors: 1.3.0 + get-intrinsic: 1.3.0 + object-inspect: 1.13.4 + dev: false + + /side-channel-weakmap@1.0.2: + resolution: {integrity: sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==} + engines: {node: '>= 0.4'} + dependencies: + call-bound: 1.0.4 + es-errors: 1.3.0 + get-intrinsic: 1.3.0 + object-inspect: 1.13.4 + side-channel-map: 1.0.1 + dev: false + + /side-channel@1.1.0: + resolution: {integrity: sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==} + engines: {node: '>= 0.4'} + dependencies: + es-errors: 1.3.0 + object-inspect: 1.13.4 + side-channel-list: 1.0.0 + side-channel-map: 1.0.1 + side-channel-weakmap: 1.0.2 + dev: false + + /sift@17.1.3: + resolution: {integrity: sha512-Rtlj66/b0ICeFzYTuNvX/EF1igRbbnGSvEyT79McoZa/DeGhMyC5pWKOEsZKnpkqtSeovd5FL/bjHWC3CIIvCQ==} + dev: false + + /signal-exit@3.0.7: + resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==} + + /signal-exit@4.1.0: + resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} + engines: {node: '>=14'} + dev: true + + /simple-concat@1.0.1: + resolution: {integrity: sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==} + dev: false + + /simple-get@4.0.1: + resolution: {integrity: sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA==} + dependencies: + decompress-response: 6.0.0 + once: 1.4.0 + simple-concat: 1.0.1 + dev: false + + /slash@3.0.0: + resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} + engines: {node: '>=8'} + dev: true + + /smart-buffer@4.2.0: + resolution: {integrity: sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==} + engines: {node: '>= 6.0.0', npm: '>= 3.0.0'} + requiresBuild: true + dev: false + optional: true + + /socks-proxy-agent@6.2.1: + resolution: {integrity: sha512-a6KW9G+6B3nWZ1yB8G7pJwL3ggLy1uTzKAgCb7ttblwqdz9fMGJUuTy3uFzEP48FAs9FLILlmzDlE2JJhVQaXQ==} + engines: {node: '>= 10'} + requiresBuild: true + dependencies: + agent-base: 6.0.2 + debug: 4.4.0 + socks: 2.8.7 + transitivePeerDependencies: + - supports-color + dev: false + optional: true + + /socks@2.8.7: + resolution: {integrity: sha512-HLpt+uLy/pxB+bum/9DzAgiKS8CX1EvbWxI4zlmgGCExImLdiad2iCwXT5Z4c9c3Eq8rP2318mPW2c+QbtjK8A==} + engines: {node: '>= 10.0.0', npm: '>= 3.0.0'} + requiresBuild: true + dependencies: + ip-address: 10.0.1 + smart-buffer: 4.2.0 + dev: false + optional: true + + /sort-by@0.0.2: + resolution: {integrity: sha512-iOX5oHA4a0eqTMFiWrHYqv924UeRKFBLhym7iwSVG37Egg2wApgZKAjyzM9WZjMwKv6+8Zi+nIaJ7FYsO9EkoA==} + dev: false + + /source-map-support@0.5.13: + resolution: {integrity: sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w==} + dependencies: + buffer-from: 1.1.2 + source-map: 0.6.1 + dev: true + + /source-map@0.6.1: + resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==} + engines: {node: '>=0.10.0'} + dev: true + + /sparse-bitfield@3.0.3: + resolution: {integrity: sha512-kvzhi7vqKTfkh0PZU+2D2PIllw2ymqJKujUcyPMd9Y75Nv4nPbGJZXNhxsgdQab2BmlDct1YnfQCguEvHr7VsQ==} + dependencies: + memory-pager: 1.5.0 + dev: false + + /sprintf-js@1.0.3: + resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==} + dev: true + + /sqlite3@5.1.7: + resolution: {integrity: sha512-GGIyOiFaG+TUra3JIfkI/zGP8yZYLPQ0pl1bH+ODjiX57sPhrLU5sQJn1y9bDKZUFYkX1crlrPfSYt0BKKdkog==} + requiresBuild: true + dependencies: + bindings: 1.5.0 + node-addon-api: 7.1.1 + prebuild-install: 7.1.3 + tar: 6.2.1 + optionalDependencies: + node-gyp: 8.4.1 + transitivePeerDependencies: + - bluebird + - supports-color + dev: false + + /ssri@8.0.1: + resolution: {integrity: sha512-97qShzy1AiyxvPNIkLWoGua7xoQzzPjQ0HAH4B0rWKo7SZ6USuPcrUiAFrws0UH8RrbWmgq3LMTObhPIHbbBeQ==} + engines: {node: '>= 8'} + requiresBuild: true + dependencies: + minipass: 3.3.6 + dev: false + optional: true + + /stack-utils@2.0.6: + resolution: {integrity: sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==} + engines: {node: '>=10'} + dependencies: + escape-string-regexp: 2.0.0 + dev: true + + /state@0.2.0: + resolution: {integrity: sha512-USXyoYrQg6xwsUtKTp613UAebyRdc3hlQwMwrZvMim6tikbw7HsOcaovhGVu+4mmxJOejEiX9ya5j+Y5XhcCcw==} + dependencies: + omicron: 0.2.0 + dev: false + + /statuses@2.0.2: + resolution: {integrity: sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==} + engines: {node: '>= 0.8'} + dev: false + + /string-length@4.0.2: + resolution: {integrity: sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ==} + engines: {node: '>=10'} + dependencies: + char-regex: 1.0.2 + strip-ansi: 6.0.1 + dev: true + + /string-width@4.2.3: + resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} + engines: {node: '>=8'} + dependencies: + emoji-regex: 8.0.0 + is-fullwidth-code-point: 3.0.0 + strip-ansi: 6.0.1 + + /string-width@5.1.2: + resolution: {integrity: sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==} + engines: {node: '>=12'} + dependencies: + eastasianwidth: 0.2.0 + emoji-regex: 9.2.2 + strip-ansi: 7.1.2 + dev: true + + /string_decoder@1.3.0: + resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==} + requiresBuild: true + dependencies: + safe-buffer: 5.2.1 + dev: false + + /strip-ansi@6.0.1: + resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} + engines: {node: '>=8'} + dependencies: + ansi-regex: 5.0.1 + + /strip-ansi@7.1.2: + resolution: {integrity: sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==} + engines: {node: '>=12'} + dependencies: + ansi-regex: 6.2.2 + dev: true + + /strip-bom@4.0.0: + resolution: {integrity: sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==} + engines: {node: '>=8'} + dev: true + + /strip-final-newline@2.0.0: + resolution: {integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==} + engines: {node: '>=6'} + dev: true + + /strip-json-comments@2.0.1: + resolution: {integrity: sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==} + engines: {node: '>=0.10.0'} + dev: false + + /strip-json-comments@3.1.1: + resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} + engines: {node: '>=8'} + dev: true + + /supports-color@7.2.0: + resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==} + engines: {node: '>=8'} + dependencies: + has-flag: 4.0.0 + dev: true + + /supports-color@8.1.1: + resolution: {integrity: sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==} + engines: {node: '>=10'} + dependencies: + has-flag: 4.0.0 + dev: true + + /synckit@0.11.11: + resolution: {integrity: sha512-MeQTA1r0litLUf0Rp/iisCaL8761lKAZHaimlbGK4j0HysC4PLfqygQj9srcs0m2RdtDYnF8UuYyKpbjHYp7Jw==} + engines: {node: ^14.18.0 || >=16.0.0} + dependencies: + '@pkgr/core': 0.2.9 + dev: true + + /tape@1.0.4: + resolution: {integrity: sha512-ZJJxfxdPJWgq45Un77uYlI0/R9AkW7/sByPSb915VED2MgNm5r2UwXQwvECabIrpZKiF3sc4VaeDNljLRgMRhw==} + hasBin: true + dependencies: + deep-equal: 0.0.0 + defined: 0.0.0 + jsonify: 0.0.1 + through: 2.3.8 + dev: false + + /tar-fs@2.1.4: + resolution: {integrity: sha512-mDAjwmZdh7LTT6pNleZ05Yt65HC3E+NiQzl672vQG38jIrehtJk/J3mNwIg+vShQPcLF/LV7CMnDW6vjj6sfYQ==} + dependencies: + chownr: 1.1.4 + mkdirp-classic: 0.5.3 + pump: 3.0.3 + tar-stream: 2.2.0 + dev: false + + /tar-stream@2.2.0: + resolution: {integrity: sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==} + engines: {node: '>=6'} + dependencies: + bl: 4.1.0 + end-of-stream: 1.4.5 + fs-constants: 1.0.0 + inherits: 2.0.4 + readable-stream: 3.6.2 + dev: false + + /tar@6.2.1: + resolution: {integrity: sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==} + engines: {node: '>=10'} + dependencies: + chownr: 2.0.0 + fs-minipass: 2.1.0 + minipass: 5.0.0 + minizlib: 2.1.2 + mkdirp: 1.0.4 + yallist: 4.0.0 + dev: false + + /test-exclude@6.0.0: + resolution: {integrity: sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==} + engines: {node: '>=8'} + dependencies: + '@istanbuljs/schema': 0.1.3 + glob: 7.2.3 + minimatch: 3.1.2 + dev: true + + /through@2.3.8: + resolution: {integrity: sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==} + dev: false + + /tmpl@1.0.5: + resolution: {integrity: sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==} + dev: true + + /to-regex-range@5.0.1: + resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} + engines: {node: '>=8.0'} + dependencies: + is-number: 7.0.0 + dev: true + + /toidentifier@1.0.1: + resolution: {integrity: sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==} + engines: {node: '>=0.6'} + dev: false + + /tr46@5.1.1: + resolution: {integrity: sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw==} + engines: {node: '>=18'} + dependencies: + punycode: 2.3.1 + dev: false + + /ts-node@10.9.2(@types/node@24.7.0)(typescript@5.9.3): + resolution: {integrity: sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==} + hasBin: true + peerDependencies: + '@swc/core': '>=1.2.50' + '@swc/wasm': '>=1.2.50' + '@types/node': '*' + typescript: '>=2.7' + peerDependenciesMeta: + '@swc/core': + optional: true + '@swc/wasm': + optional: true + dependencies: + '@cspotcode/source-map-support': 0.8.1 + '@tsconfig/node10': 1.0.12 + '@tsconfig/node12': 1.0.11 + '@tsconfig/node14': 1.0.3 + '@tsconfig/node16': 1.0.4 + '@types/node': 24.7.0 + acorn: 8.15.0 + acorn-walk: 8.3.4 + arg: 4.1.3 + create-require: 1.1.1 + diff: 4.0.2 + make-error: 1.3.6 + typescript: 5.9.3 + v8-compile-cache-lib: 3.0.1 + yn: 3.1.1 + dev: true + + /tslib@2.6.2: + resolution: {integrity: sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==} + requiresBuild: true + dev: true + optional: true + + /tunnel-agent@0.6.0: + resolution: {integrity: sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==} + dependencies: + safe-buffer: 5.2.1 + dev: false + + /type-detect@4.0.8: + resolution: {integrity: sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==} + engines: {node: '>=4'} + dev: true + + /type-fest@0.21.3: + resolution: {integrity: sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==} + engines: {node: '>=10'} + dev: true + + /type-is@2.0.1: + resolution: {integrity: sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==} + engines: {node: '>= 0.6'} + dependencies: + content-type: 1.0.5 + media-typer: 1.1.0 + mime-types: 3.0.2 + dev: false + + /typescript@5.9.3: + resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==} + engines: {node: '>=14.17'} + hasBin: true + dev: true + + /uid-safe@2.1.5: + resolution: {integrity: sha512-KPHm4VL5dDXKz01UuEd88Df+KzynaohSL9fBh096KWAxSKZQDI2uBrVqtvRM4rwrIrRRKsdLNML/lnaaVSRioA==} + engines: {node: '>= 0.8'} + dependencies: + random-bytes: 1.0.0 + dev: false + + /uid2@0.0.4: + resolution: {integrity: sha512-IevTus0SbGwQzYh3+fRsAMTVVPOoIVufzacXcHPmdlle1jUpq7BRL+mw3dgeLanvGZdwwbWhRV6XrcFNdBmjWA==} + dev: false + + /undici-types@7.14.0: + resolution: {integrity: sha512-QQiYxHuyZ9gQUIrmPo3IA+hUl4KYk8uSA7cHrcKd/l3p1OTpZcM0Tbp9x7FAtXdAYhlasd60ncPpgu6ihG6TOA==} + dev: true + + /unique-filename@1.1.1: + resolution: {integrity: sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ==} + requiresBuild: true + dependencies: + unique-slug: 2.0.2 + dev: false + optional: true + + /unique-slug@2.0.2: + resolution: {integrity: sha512-zoWr9ObaxALD3DOPfjPSqxt4fnZiWblxHIgeWqW8x7UqDzEtHEQLzji2cuJYQFCU6KmoJikOYAZlrTHHebjx2w==} + requiresBuild: true + dependencies: + imurmurhash: 0.1.4 + dev: false + optional: true + + /unpipe@1.0.0: + resolution: {integrity: sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==} + engines: {node: '>= 0.8'} + dev: false + + /unrs-resolver@1.11.1: + resolution: {integrity: sha512-bSjt9pjaEBnNiGgc9rUiHGKv5l4/TGzDmYw3RhnkJGtLhbnnA/5qJj7x3dNDCRx/PJxu774LlH8lCOlB4hEfKg==} + requiresBuild: true + dependencies: + napi-postinstall: 0.3.4 + optionalDependencies: + '@unrs/resolver-binding-android-arm-eabi': 1.11.1 + '@unrs/resolver-binding-android-arm64': 1.11.1 + '@unrs/resolver-binding-darwin-arm64': 1.11.1 + '@unrs/resolver-binding-darwin-x64': 1.11.1 + '@unrs/resolver-binding-freebsd-x64': 1.11.1 + '@unrs/resolver-binding-linux-arm-gnueabihf': 1.11.1 + '@unrs/resolver-binding-linux-arm-musleabihf': 1.11.1 + '@unrs/resolver-binding-linux-arm64-gnu': 1.11.1 + '@unrs/resolver-binding-linux-arm64-musl': 1.11.1 + '@unrs/resolver-binding-linux-ppc64-gnu': 1.11.1 + '@unrs/resolver-binding-linux-riscv64-gnu': 1.11.1 + '@unrs/resolver-binding-linux-riscv64-musl': 1.11.1 + '@unrs/resolver-binding-linux-s390x-gnu': 1.11.1 + '@unrs/resolver-binding-linux-x64-gnu': 1.11.1 + '@unrs/resolver-binding-linux-x64-musl': 1.11.1 + '@unrs/resolver-binding-wasm32-wasi': 1.11.1 + '@unrs/resolver-binding-win32-arm64-msvc': 1.11.1 + '@unrs/resolver-binding-win32-ia32-msvc': 1.11.1 + '@unrs/resolver-binding-win32-x64-msvc': 1.11.1 + dev: true + + /update-browserslist-db@1.1.3(browserslist@4.24.5): + resolution: {integrity: sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==} + hasBin: true + peerDependencies: + browserslist: '>= 4.21.0' + dependencies: + browserslist: 4.24.5 + escalade: 3.2.0 + picocolors: 1.1.1 + dev: true + + /util-deprecate@1.0.2: + resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} + requiresBuild: true + dev: false + + /utils-merge@1.0.1: + resolution: {integrity: sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==} + engines: {node: '>= 0.4.0'} + dev: false + + /v8-compile-cache-lib@3.0.1: + resolution: {integrity: sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==} + dev: true + + /v8-to-istanbul@9.3.0: + resolution: {integrity: sha512-kiGUalWN+rgBJ/1OHZsBtU4rXZOfj/7rKQxULKlIzwzQSvMJUUNgPwJEEh7gU6xEVxC0ahoOBvN2YI8GH6FNgA==} + engines: {node: '>=10.12.0'} + dependencies: + '@jridgewell/trace-mapping': 0.3.31 + '@types/istanbul-lib-coverage': 2.0.6 + convert-source-map: 2.0.0 + dev: true + + /vary@1.1.2: + resolution: {integrity: sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==} + engines: {node: '>= 0.8'} + dev: false + + /walker@1.0.8: + resolution: {integrity: sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==} + dependencies: + makeerror: 1.0.12 + dev: true + + /webidl-conversions@7.0.0: + resolution: {integrity: sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==} + engines: {node: '>=12'} + dev: false + + /whatwg-url@14.2.0: + resolution: {integrity: sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw==} + engines: {node: '>=18'} + dependencies: + tr46: 5.1.1 + webidl-conversions: 7.0.0 + dev: false + + /which@2.0.2: + resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} + engines: {node: '>= 8'} + hasBin: true + dependencies: + isexe: 2.0.0 + + /wide-align@1.1.5: + resolution: {integrity: sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==} + requiresBuild: true + dependencies: + string-width: 4.2.3 + dev: false + optional: true + + /wrap-ansi@7.0.0: + resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} + engines: {node: '>=10'} + dependencies: + ansi-styles: 4.3.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 + dev: true + + /wrap-ansi@8.1.0: + resolution: {integrity: sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==} + engines: {node: '>=12'} + dependencies: + ansi-styles: 6.2.3 + string-width: 5.1.2 + strip-ansi: 7.1.2 + dev: true + + /wrappy@1.0.2: + resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} + + /write-file-atomic@5.0.1: + resolution: {integrity: sha512-+QU2zd6OTD8XWIJCbffaiQeH9U73qIqafo1x6V1snCWYGJf6cVE0cDR4D8xRzcEnfI21IFrUPzPGtcPf8AC+Rw==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + dependencies: + imurmurhash: 0.1.4 + signal-exit: 4.1.0 + dev: true + + /y18n@5.0.8: + resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==} + engines: {node: '>=10'} + dev: true + + /yallist@3.1.1: + resolution: {integrity: sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==} + dev: true + + /yallist@4.0.0: + resolution: {integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==} + dev: false + + /yargs-parser@21.1.1: + resolution: {integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==} + engines: {node: '>=12'} + dev: true + + /yargs@17.7.2: + resolution: {integrity: sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==} + engines: {node: '>=12'} + dependencies: + cliui: 8.0.1 + escalade: 3.2.0 + get-caller-file: 2.0.5 + require-directory: 2.1.1 + string-width: 4.2.3 + y18n: 5.0.8 + yargs-parser: 21.1.1 + dev: true + + /yn@3.1.1: + resolution: {integrity: sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==} + engines: {node: '>=6'} + dev: true + + /yocto-queue@0.1.0: + resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} + engines: {node: '>=10'} + dev: true + + /zustand@5.0.8(react@19.2.0): + resolution: {integrity: sha512-gyPKpIaxY9XcO2vSMrLbiER7QMAMGOQZVRdJ6Zi782jkbzZygq5GI9nG8g+sMgitRtndwaBSl7uiqC49o1SSiw==} + engines: {node: '>=12.20.0'} + peerDependencies: + '@types/react': '>=18.0.0' + immer: '>=9.0.6' + react: '>=18.0.0' + use-sync-external-store: '>=1.2.0' + peerDependenciesMeta: + '@types/react': + optional: true + immer: + optional: true + react: + optional: true + use-sync-external-store: + optional: true + dependencies: + react: 19.2.0 + dev: false diff --git a/routes/auth.js b/routes/auth.js new file mode 100644 index 00000000..d927fc25 --- /dev/null +++ b/routes/auth.js @@ -0,0 +1,5 @@ + +var express = require('express'); +var router = express.Router(); + +module.exports = router; diff --git a/routes/index.js b/routes/index.js new file mode 100644 index 00000000..83aa5210 --- /dev/null +++ b/routes/index.js @@ -0,0 +1,10 @@ + +var express = require('express'); +var router = express.Router(); + +/* GET home page. */ +router.get('/', function(req, res, next) { + res.render('index', { title: 'Express' }); +}); + +module.exports = router; diff --git a/src/$PROFILE b/src/$PROFILE new file mode 100644 index 00000000..f7069873 --- /dev/null +++ b/src/$PROFILE @@ -0,0 +1,6 @@ +echo "function yarn { corepack yarn `$args }" >> $PROFILE +echo "function yarnpkg { corepack yarnpkg `$args }" >> $PROFILE +echo "function pnpm { corepack pnpm `$args }" >> $PROFILE +echo "function pnpx { corepack pnpx `$args }" >> $PROFILE +echo "function npm { corepack npm `$args }" >> $PROFILE +echo "function npx { corepack npx `$args }" >> $PROFILE \ No newline at end of file diff --git a/src/About.js b/src/About.js new file mode 100644 index 00000000..1386da55 --- /dev/null +++ b/src/About.js @@ -0,0 +1,11 @@ +import React from 'react-router-dom' + +export const About = () => { + return ( +
+

About Movie Company

+ +

...

+
+ ) +} \ No newline at end of file diff --git a/src/About.jsx b/src/About.jsx new file mode 100644 index 00000000..930b111d --- /dev/null +++ b/src/About.jsx @@ -0,0 +1,13 @@ +import React from 'react'; + +const About = () => { + return ( +
+

About MovieFlix

+

MovieFlix is a simple web application that allows you to browse popular movies. It is built with React and uses the TMDB API to fetch movie data.

+

This project was created to demonstrate the use of React Router and to practice building a responsive and accessible web application.

+
+ ); +} + +export default About; diff --git a/src/App.js b/src/App.js new file mode 100644 index 00000000..5c3f5128 --- /dev/null +++ b/src/App.js @@ -0,0 +1,38 @@ +import React from 'react-router-dom' +import { BrowserRouter, Routes, Route } from 'react-router-dom' +import { Welcome } from './Welcome' +import { About } from './About' +import { Contact } from './Contact' +import { Nav } from './Nav' +import { MoviesList } from 'pages/MoviesList' +import { ShowMovies } from 'pages/ShowMovies' +import './App.css'; +import SideBar from ".SideBar"; +import +function App () { + return ( +
+ Hallo custumer/member +

+
+ + ); +} + +export const App = () => { + return ( +
+ +
+ ) +} +export default App; \ No newline at end of file diff --git a/src/App.jsx b/src/App.jsx index cb7cdee1..5015e6bf 100644 --- a/src/App.jsx +++ b/src/App.jsx @@ -1,5 +1,27 @@ + +import { BrowserRouter, Routes, Route } from "react-router-dom" +import Header from './components/Header.jsx' +import Movies from './Movies.jsx' +import About from './About.jsx' +import MovieDetails from './components/pages/MovieDetails.jsx' +import NotFound from './components/pages/NotFound.jsx' + +// The Header component will show in all "pages" It is outside of the routing scope. + export const App = () => { return ( -

Movies

+ <> +
+ + + + } /> + } /> + } /> + } /> + + + + ) -} +} \ No newline at end of file diff --git a/src/Contact.js b/src/Contact.js new file mode 100644 index 00000000..1d657d9e --- /dev/null +++ b/src/Contact.js @@ -0,0 +1,11 @@ +import React from 'react' + +export const Contact = () => { + return ( +
+

Contact us

+ +

...

+
+ ); +} \ No newline at end of file diff --git a/src/Movies.jsx b/src/Movies.jsx new file mode 100644 index 00000000..e23cba80 --- /dev/null +++ b/src/Movies.jsx @@ -0,0 +1,53 @@ +import React from 'react' + +// this is the "main" page. When you get to the website you should get to this page. +// It will show the movies that we fetch from the API +// Removed unused variable 'apikey' +import { useEffect, useState } from 'react' +import Card from './components/Card.jsx' +import { Link } from 'react-router-dom' + +const Movies = () => { + + const [movies, setMovies] = useState([]) + const [loading, setLoading] = useState(true) + // Do not expose the API key but add it to an env file instead. + const apiKey = import.meta.env.VITE_TMDB_API_KEY + // Fetch the data in the useEffect hook with empty dependency array [] + + // https://api.themoviedb.org/3/movie/popular?api_key={api_key}&language=en-US&page=1 + + // Do proper error handling, and use asyns/await or .then() + useEffect(() => { + fetch(`https://api.themoviedb.org/3/movie/popular?api_key=${apiKey}&language=en-US&page=1`) + .then((response) => response.json()) + .then((data) => { + setMovies(data.results) // save the movies to the state varable + setLoading(false) + }) + .catch((error) => { + console.error('Error fetching movies:', error) + setLoading(false) + }) + + }, [apiKey]); + + return ( + <> +

Popular Movies

+ {loading ? ( +

Loading movies...

+ ) : ( +
+ {movies.map(movie => ( + + + + ))} +
+ )} + + ); +} + +export default Movies \ No newline at end of file diff --git a/src/Nav.js b/src/Nav.js new file mode 100644 index 00000000..fd018c8f --- /dev/null +++ b/src/Nav.js @@ -0,0 +1,19 @@ +import React from 'react' +import { Link, NavLink } from 'react-router-dom' +export const Nav = () => { + return ( + + ); +} \ No newline at end of file diff --git a/src/ShowMovies.js b/src/ShowMovies.js new file mode 100644 index 00000000..0727ee32 --- /dev/null +++ b/src/ShowMovies.js @@ -0,0 +1,22 @@ +import React from 'react' +import { useParams } from 'react-router-dom' +import { Movies } from 'components/data/data/Movies' +import movies from 'components/data/data/movies.json' +export const ShowMovies = () => { + const params = useParams() + const moviesMatch = movies.find((movie) => movie.slug === params.slug) + console.log(params) + console.log(moviesMatch) + return ( +
+ +
+ +
+ +
+
+
Show movies page
+
+ ) +} \ No newline at end of file diff --git a/src/SideBar.js b/src/SideBar.js new file mode 100644 index 00000000..6d50b0b1 --- /dev/null +++ b/src/SideBar.js @@ -0,0 +1,37 @@ +import { BsPlus, BsFillLIghtningFill, BsGearFill } from react-icons/fa'; +import { FaFire, FaPoo} from 'react-icons/fa'; +const SideBar = () => { + return { +
+ flex flex-col"> + bg-gray-900 text-white shadow-lg"> + A + B + C + + E + +}} + +}/> +
+ }; +}; +const SideBarIcon = ({icon,text='tooltip'})=> (); +
+{icon} + + {text} + +
+}; + +constSideBar =()=> { + return( +
+ ) +} +export default SideBar \ No newline at end of file diff --git a/src/Welcome.jsx b/src/Welcome.jsx new file mode 100644 index 00000000..56d0d7dc --- /dev/null +++ b/src/Welcome.jsx @@ -0,0 +1,10 @@ +import React from 'react' + +export const Welcome = () => { + return ( +
+

Welcome to Movie Company

+

...

+
+ ) +} \ No newline at end of file diff --git a/src/components/Card.css b/src/components/Card.css new file mode 100644 index 00000000..09f03272 --- /dev/null +++ b/src/components/Card.css @@ -0,0 +1,9 @@ +.card { + width: 30%; + height: 100px; + border: 2px solid; + margin: 2rem; + background-color: bisque; + padding: 1rem; + cursor: pointer; + } \ No newline at end of file diff --git a/src/components/Card.jsx b/src/components/Card.jsx new file mode 100644 index 00000000..e86f6054 --- /dev/null +++ b/src/components/Card.jsx @@ -0,0 +1,19 @@ +import React from 'react'; +import { Link } from 'react-router-dom'; + +const Card = ({ movie }) => { + const imageUrl = `https://image.tmdb.org/t/p/w500${movie.poster_path}`; + + return ( +
+ + {movie.title} +
+

{movie.title}

+
+ +
+ ); +}; + +export default Card; diff --git a/src/components/Header.css b/src/components/Header.css new file mode 100644 index 00000000..92024cf8 --- /dev/null +++ b/src/components/Header.css @@ -0,0 +1,5 @@ +header { + height: 8rem; + background: black; + color: white; + } \ No newline at end of file diff --git a/src/components/Header.jsx b/src/components/Header.jsx new file mode 100644 index 00000000..2f6ab0e9 --- /dev/null +++ b/src/components/Header.jsx @@ -0,0 +1,18 @@ +import React from 'react'; +import { Link } from 'react-router-dom'; + +const Header = () => { + return ( +
+
+ MovieFlix + +
+
+ ); +}; + +export default Header; diff --git a/src/components/Movies.js b/src/components/Movies.js new file mode 100644 index 00000000..c4655b54 --- /dev/null +++ b/src/components/Movies.js @@ -0,0 +1,22 @@ +import React from 'react-router-dom' +import { Movies } from 'components/Movies' +import movies from 'components/data/data/movies.json' + +export const Movie = ({ name, image }) => ( +
+ +

{name}

+
+) + +export const MoviesList = () => { + return ( +
+ {movies.map((movie) => ( + + + + ))} +
+ ); +} \ No newline at end of file diff --git a/src/components/MoviesList.js b/src/components/MoviesList.js new file mode 100644 index 00000000..1c17cd47 --- /dev/null +++ b/src/components/MoviesList.js @@ -0,0 +1,16 @@ +import React from 'react' +import { Link } from 'react-router-dom' +import { Movies } from 'components/data/data/Movies' +import movies from 'data/data/movies.json' + +export const MoviesList = () => { + return ( +
+ {movies.map((movies) => ( + + + + ))} +
+ ) +} \ No newline at end of file diff --git a/src/components/data/data/Movies.jsx b/src/components/data/data/Movies.jsx new file mode 100644 index 00000000..c3e613b5 --- /dev/null +++ b/src/components/data/data/Movies.jsx @@ -0,0 +1,10 @@ +useEffect (() => { + https://api.themovied.org/3/movie/popular + .then((response) => response.json()) + .then((data) => set Movies(data.results) +.catch ((error => console.error('Error fetching movies', error)); + +)[]} +return +
Movies
+} \ No newline at end of file diff --git a/src/components/movies.json b/src/components/movies.json new file mode 100644 index 00000000..742fbbc0 --- /dev/null +++ b/src/components/movies.json @@ -0,0 +1,22 @@ +{ + "poster_path": "/IfB9hy4JH1eH6HEfIgIGORXi5h.jpg", + "adult": false, + "overview": "Jack Reacher must uncover the truth behind a major government conspiracy in order to clear his name. On the run as a fugitive from the law, Reacher uncovers a potential secret from his past that could change his life forever.", + "release_date": "2016-10-19", + "genre_ids": [ + 53, + 28, + 80, + 18, + 9648 + ], + "id": 343611, + "original_title": "Jack Reacher: Never Go Back", + "original_language": "en", + "title": "Jack Reacher: Never Go Back", + "backdrop_path": "/4ynQYtSEuU5hyipcGkfD6ncwtwz.jpg", + "popularity": 26.818468, + "vote_count": 201, + "video": false, + "vote_average": 4.19 +} \ No newline at end of file diff --git a/src/components/pages/MovieDetails.jsx b/src/components/pages/MovieDetails.jsx new file mode 100644 index 00000000..4ce1ef0a --- /dev/null +++ b/src/components/pages/MovieDetails.jsx @@ -0,0 +1,49 @@ +import { useEffect, useState } from 'react'; +import { Link, useParams } from 'react-router-dom'; + +const MovieDetails = () => { + const { movieId } = useParams(); + const [movieDetails, setMovieDetails] = useState(null); + const [loading, setLoading] = useState(true); + const apiKey = import.meta.env.VITE_TMDB_API_KEY; + + useEffect(() => { + fetch(`https://api.themoviedb.org/3/movie/${movieId}?api_key=${apiKey}&language=en-US`) + .then((response) => response.json()) + .then((data) => { + setMovieDetails(data); + setLoading(false); + }) + .catch((error) => { + console.error('Error fetching movie details:', error); + setLoading(false); + }); + }, [apiKey, movieId]); + + if (loading) { + return

Loading movie details...

; + } + + if (!movieDetails) { + return

Movie not found.

; + } + + const imageUrl = `https://image.tmdb.org/t/p/w500${movieDetails.poster_path}`; + + return ( +
+ Back to Movies +
+ {movieDetails.title} +
+

{movieDetails.title}

+

{movieDetails.overview}

+

Release Date: {movieDetails.release_date}

+

Rating: {movieDetails.vote_average}

+
+
+
+ ); +}; + +export default MovieDetails; diff --git a/src/components/pages/NotFound.jsx b/src/components/pages/NotFound.jsx new file mode 100644 index 00000000..2524d2d4 --- /dev/null +++ b/src/components/pages/NotFound.jsx @@ -0,0 +1,14 @@ +import React from 'react'; +import { Link } from 'react-router-dom'; + +const NotFound = () => { + return ( +
+

404 - Not Found

+

The page you are looking for does not exist.

+ Go to Home +
+ ); +} + +export default NotFound; diff --git a/src/components/pages/PopularList.jsx b/src/components/pages/PopularList.jsx new file mode 100644 index 00000000..76320bbf --- /dev/null +++ b/src/components/pages/PopularList.jsx @@ -0,0 +1,6 @@ +import React from 'react'; +import { Link } from 'react-router-dom'; +import { Movies} from '../../api/movies'; +// import movies from '../../data/data/movies.json'; + +const API_URL = "https://api.themoviedb.org/3/movie/popular?api_key=fa4e953ba904f2b280d0a4c5b6beb8ba&language=US&page="; \ No newline at end of file diff --git a/src/components/tests/cleanup.sh b/src/components/tests/cleanup.sh new file mode 100644 index 00000000..f7ac5fc3 --- /dev/null +++ b/src/components/tests/cleanup.sh @@ -0,0 +1,3 @@ +#!/bin/sh +rm -f *.{ER,OU} +rm -f hopper_w_*_*.pbs \ No newline at end of file diff --git a/src/components/tests/client.py b/src/components/tests/client.py new file mode 100644 index 00000000..b1a75d47 --- /dev/null +++ b/src/components/tests/client.py @@ -0,0 +1,29 @@ +import socket + + + +# create an ipv4 (AF_INET) socket object using the tcp protocol (SOCK_STREAM) +client = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + +# connect the client +# client.connect((target, port)) +client.connect(('127.0.0.1', 1233)) +response = client.recv(2048) +# Input UserName +name = input(response.decode()) +client.send(str.encode(name)) +response = client.recv(2048) +# Input Password +password = input(response.decode()) +client.send(str.encode(password)) +''' Response : Status of Connection : + 1 : Registeration successful + 2 : Connection Successful + 3 : Login Failed +''' +# Receive response +response = client.recv(2048) +response = response.decode() + +print(response) +client.close() \ No newline at end of file diff --git a/src/components/tests/hopper_w.pbs b/src/components/tests/hopper_w.pbs new file mode 100644 index 00000000..25867caa --- /dev/null +++ b/src/components/tests/hopper_w.pbs @@ -0,0 +1,23 @@ +#!/bin/bash +#PBS -q debug +#PBS -N mongostress +#PBS -l mppwidth=48 +#PBS -l walltime=00:10:00 +#PBS -o $PBS_O_WORKDIR/job.out +#PBS -e $PBS_O_WORKDIR/job.error + +module load python + +export CRAY_ROOTFS=DSL + +cd $PBS_O_WORKDIR + +## Pick a time 3 minutes into the future +#future=`python -c "import time; print(int(time.time()) + 180)"` +# or, for debugging, 10 seconds into the future +future=`python -c "import time; print(int(time.time()) + 10)"` + +docs=10 +clear_flag="-c" + +aprun -a xt -n 48 ./w.py $clear_flag --server=128.55.57.13 --ndocs=$docs --when=$future \ No newline at end of file diff --git a/src/components/tests/hopper_w.stache b/src/components/tests/hopper_w.stache new file mode 100644 index 00000000..5a450fdf --- /dev/null +++ b/src/components/tests/hopper_w.stache @@ -0,0 +1,26 @@ +#!/bin/bash +#PBS -q debug +#PBS -N mongostress_{{run}} +#PBS -l mppwidth={{procs}} +#PBS -l walltime=00:30:00 + +module load python + +export CRAY_ROOTFS=DSL + +cd $PBS_O_WORKDIR + +## Pick a time 3 minutes into the future +future=`python -c "import time; print(int(time.time()) + 180)"` +# or, for debugging, 10 seconds into the future +#future=`python -c "import time; print(int(time.time()) + 10)"` + +{{#clear}} +clear_flag="-c" +{{/clear}} +{{^clear}} +clear_flag="" +{{/clear}} + +aprun -a xt -n {{procs}} ./w.py $clear_flag --server={{server}} --ndocs={{docs}} --when=$future \ + -R hopper_{{run}} \ No newline at end of file diff --git a/src/components/tests/mpi_run.pbs b/src/components/tests/mpi_run.pbs new file mode 100644 index 00000000..c8e4c180 --- /dev/null +++ b/src/components/tests/mpi_run.pbs @@ -0,0 +1,18 @@ +#!/bin/bash + +#PBS -q debug +#PBS -N test_mpi4py +#PBS -l mppwidth=216 +## Number of processes on each node +##PBS -l mppnppn=1 +#PBS -l walltime=00:10:00 +#PBS -o $PBS_O_WORKDIR/job.out +#PBS -e $PBS_O_WORKDIR/job.error + +module load python/2.7.1 + +cd $PBS_O_WORKDIR +# run 216 tasks +aprun -n 216 python ./mpi_stress_test.py --host='X.X.X.X' --port=27018 --nclients=216 +# run 4 tasks with 1 task (MPI process) per node +#aprun -n 4 -N 1 python ./test_01.py \ No newline at end of file diff --git a/src/components/tests/mpi_stress_test.py b/src/components/tests/mpi_stress_test.py new file mode 100644 index 00000000..699e8ddc --- /dev/null +++ b/src/components/tests/mpi_stress_test.py @@ -0,0 +1,80 @@ +#!/usr/bin/env python +" Mongo Stress Test using MPI" +import sys +import time +import datetime +import logging +import socket +from optparse import OptionParser + +import pymongo +from mpi4py import MPI + +DB_NAME = 'db_flex' +COLLECTION_NAME = 'flexers' + +log = logging.getLogger(__name__) + +comm = MPI.COMM_WORLD +rank = comm.Get_rank() +size = comm.Get_size() + +log.info("MPI size {n}".format(n=size)) + + +def mongo_inserter(cid, ndocs, host, port): + """ + cid (int) Client id + ndoc (int) Number of docs to import + host (str) mongo hostname + port (int) mongo port to connect to + """ + s = socket.getfqdn() + log.info("start client {i} on node {n} connecting to {h}:{p} at {d}".format(i=cid, n=s, h=host, p=port, d=datetime.datetime.now())) + conn = pymongo.Connection(host, port) + db = conn[DB_NAME] + collection = db[COLLECTION_NAME] + t0 = time.time() + for n in range(ndocs): + message = "updating mongodb with value {v} from client {i}".format(v=n, i=cid) + log.info(message) + doc = {'doc_id': n, 'created_at': datetime.datetime.now(), 'message': message, 'client_id': cid} + collection.insert(doc) + + tf = time.time() + dt = tf - t0 + ops = ndocs / dt + m = "Client {i} on node {s} took {dt} with op/s {o:2f}".format(i=cid, s=s, dt=dt, o=ops) + print(m) + log.info(m) + log.info("Client {i} completed".format(i=cid)) + + +def main(nclients, ndocs, host, port): + sname = socket.getfqdn() + if rank == 0: + #data could be the db config params + #comm.send(config, tag=11) + print("starting up rank {r} on {s}".format(r=rank, s=sname)) + else: + print("starting up rank {r} on {s}".format(r=rank, s=sname)) + #data = comm.recv(source=0, tag=11) + mongo_inserter(rank, ndocs, host, port) + sys.exit(0) + return 0 + + +if __name__ == '__main__': + parser = OptionParser() + # nclients isn't necessary now. MPI determines the number from PBS script + parser.add_option('-n', '--nclients', type='int', dest='nclients', help='Number of clients to start up') + parser.add_option('-d', '--ndocs', dest='ndocs', type='int', default=1000, help='number of docs to import per client into the db') + parser.add_option('-H', '--host', dest='host', help='db hostname') + parser.add_option('-p', '--port', dest='port', type='int', default=27017, help='db port to connect to') + (options, args) = parser.parse_args() + # nclients, host must be given! + if options.nclients is not None and options.host is not None: + sys.exit(main(options.nclients, options.ndocs, options.host, options.port)) + else: + print("Provide --nclients and --host options") + sys.exit(0) \ No newline at end of file diff --git a/src/components/tests/pbstache.py b/src/components/tests/pbstache.py new file mode 100644 index 00000000..41dec0ce --- /dev/null +++ b/src/components/tests/pbstache.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python +"""Generate PBS run scripts. + +Usage: pbstache.py .stache from:to:by + Output is __.pbs, + where is number of clients and + is the total number of documents. +""" +import pystache +import sys + +def usage(): + print(__doc__) + sys.exit(1) + +if len(sys.argv) < 2 or sys.argv[1] == "-h" or len(sys.argv) != 3: + usage() + +infile = sys.argv[1] +if not infile.endswith(".stache"): + print("Input file format is .stache") + usage() +try: + data = file(infile).read() +except IOError, err: + print("Cannot read file {f}".format(f=infile)) + usage() +base_infile = infile[:-7] + +try: + p_from, p_to, p_step = map(int, sys.argv[2].split(':')) +except ValueError: + print("Client count range must be 3 numbers a:b:c") + usage() + +total_docs = 1000000 +v = {'clear':True, 'server':"128.55.57.13"} + +for procs in xrange(p_from, p_to+1, p_step): + v['docs'] = total_docs / procs + v['procs'] = procs + v['run'] = "{p:d}_{t:d}".format(p=procs, t=total_docs) + script = pystache.render(data, v) + ofile = "{f}_{run}.pbs".format(f=base_infile, **v) + file(ofile, "w").write(script) + print("wrote {f}".format(f=ofile)) \ No newline at end of file diff --git a/src/components/tests/server.py b/src/components/tests/server.py new file mode 100644 index 00000000..05822408 --- /dev/null +++ b/src/components/tests/server.py @@ -0,0 +1,63 @@ +import socket +import os +import threading +import hashlib + + +# Create Socket (TCP) Connection +ServerSocket = socket.socket(family = socket.AF_INET, type = socket.SOCK_STREAM) +host = '127.0.0.1' +port = 1233 +ThreadCount = 0 +try: + ServerSocket.bind((host, port)) +except socket.error as e: + print(str(e)) + +print('Waitiing for a Connection..') +ServerSocket.listen(5) +HashTable = {} + +# Function : For each client +def threaded_client(connection): + connection.send(str.encode('ENTER USERNAME : ')) # Request Username + name = connection.recv(2048) + connection.send(str.encode('ENTER PASSWORD : ')) # Request Password + password = connection.recv(2048) + password = password.decode() + name = name.decode() + password=hashlib.sha256(str.encode(password)).hexdigest() # Password hash using SHA256 +# REGISTERATION PHASE +# If new user, regiter in Hashtable Dictionary + if name not in HashTable: + HashTable[name]=password + connection.send(str.encode('Registeration Successful')) + print('Registered : ',name) + print("{:<8} {:<20}".format('USER','PASSWORD')) + for k, v in HashTable.items(): + label, num = k,v + print("{:<8} {:<20}".format(label, num)) + print("-------------------------------------------") + + else: +# If already existing user, check if the entered password is correct + if(HashTable[name] == password): + connection.send(str.encode('Connection Successful')) # Response Code for Connected Client + print('Connected : ',name) + else: + connection.send(str.encode('Login Failed')) # Response code for login failed + print('Connection denied : ',name) + while True: + break + connection.close() + +while True: + Client, address = ServerSocket.accept() + client_handler = threading.Thread( + target=threaded_client, + args=(Client,) + ) + client_handler.start() + ThreadCount += 1 + print('Connection Request: ' + str(ThreadCount)) +ServerSocket.close() \ No newline at end of file diff --git a/src/components/tests/sharded-mongo b/src/components/tests/sharded-mongo new file mode 100644 index 00000000..2385fd6a --- /dev/null +++ b/src/components/tests/sharded-mongo @@ -0,0 +1,66 @@ +#!/bin/sh + +mode=$1 +shift + +function usage() { + e=$1 + if [ "x$e" != "x" ]; then + printf "** $e\n\n" + fi + p=$(basename $0) + cat - < /tmp/sharda.log & + mongod --shardsvr --dbpath /data/db/b --port 10001 > /tmp/shardb.log & + mongod --configsvr --dbpath /data/db/config --port 20000 > /tmp/configdb.log & + sleep 2 + mongos --chunkSize 1 --port 27017 --configdb 127.0.0.1:20000 > /tmp/mongos1.log & + mongos --chunkSize 1 --port 27018 --configdb 127.0.0.1:20000 > /tmp/mongos2.log & + ;; + + c*) + printf "== Configure sharding ==\n" + shardf=/tmp/configshard.js + cat - >$shardf < 0: + wait_sleep = int(max(0,wait_for_it - t0)) + log.debug("client.wait.start sec={0:d}".format(wait_sleep)) + time.sleep(wait_sleep) + log.debug("client.wait.end sec={0:d}".format(wait_sleep)) + message = "I am legend" + log.debug("loop.start n={n}".format(n=ndocs)) + t0 = time.time() + for i in xrange(ndocs): + doc = {'doc_num': i, 'message': message} + for collection in collections: + collection.insert(doc) + if pause > 0: + time.sleep(pause) + if tracing: + trace(log, "inserted {0:d}".format(i)) + dur = time.time() - t0 + log.debug("loop.end n={n} dur={d:f}".format(n=ndocs, d=dur)) + return dur + +def report(conn, run, delta_time, **kw): + """Report time for a client. + """ + doc = {"host":my_hostname, "pid":my_pid, "run":run, "dt":delta_time} + doc.update(kw) + log.debug("reporting {}".format(doc)) + coll = conn[REPORT_DB][REPORT_COLL] + coll.insert(doc) + +def print_results(coll): + hdr = None + for rec in coll.find(): + if not hdr: + hdr = filter(lambda x: x[0] != "_", rec.keys()) + print(",".join(hdr)) + values = [str(rec.get(key, "")) for key in hdr] + print(",".join(values)) + +def main(): + """Program entry point. + """ + global wait_for_it + + # command-line + parser = OptionParser() + parser.add_option('-c', '--clear', dest='do_clear', help='Clear collection first', + action="store_true", default=False) + parser.add_option('-d', '--ndocs', dest='ndocs', metavar="NUM", type='int', default= 1000, + help='Insert NUM docs per client (default=%default)') + parser.add_option('-m', '--mongoose', dest='mongoose', action='store_true', + help="Use sleepy mongoose REST api instead of mongodb native protocol") + parser.add_option('-n', '--nclients', dest='nclients', metavar="NUM", type='int', default=1, + help="Number of clients to connect to server (default=%default)") + parser.add_option('-p', '--port', dest='port', type='int', default=27018, + help='Connect to MongoDB server on PORT (default=%default)') + parser.add_option('-P', '--pause', dest='pause', type='int', default=0, + help="Pause MS milliseconds between each write (default=%default)", + metavar="MS") + parser.add_option('-q', '--quiet', dest='quiet', action='store_true', help="No logging") + parser.add_option('-r', '--results', dest='do_check', help="Print results and exit. " + "With -c/--clear also clears results.", + action="store_true") + parser.add_option("-R", "--run", dest="runid", metavar="ID", help="Run identifier", default=None) + parser.add_option('-s', '--server', dest='host', help='MongoDB server host (required)') + parser.add_option('-v', '--verbose', dest='vb', action='count', help="More logging") + parser.add_option('-w', '--when', dest='when', type='int', default=0, + help="Start at future time SEC seconds since 1/1/1970 (default=now)", + metavar="SEC") + (options, args) = parser.parse_args() + if options.host is None: + parser.error("-s/--server is required") + return 1 + + # set run identifier string + if options.runid is None: + if options.when > 0: + tm = options.when + else: + tm = int(time.time()) + run_id = "{0:d}".format(tm) + else: + run_id = options.runid + + # init logging + hdlr = logging.StreamHandler() + formatter = logging.Formatter("{r} %(asctime)s {h} {p:d} " + "%(levelname)s %(message)s".format(r=run_id, h=my_hostname, p=my_pid)) + hdlr.setFormatter(formatter) + log.addHandler(hdlr) + if options.quiet: + log.setLevel(logging.ERROR) + elif options.vb > 1: + log.setLevel(TRACE) + elif options.vb == 1: + log.setLevel(logging.DEBUG) + else: + if options.do_check: + log.setLevel(logging.WARN) + else: + log.setLevel(logging.INFO) + + # get vars from options + db_name, coll_name = STRESS_DB, STRESS_COLL + ndocs, host, port, wait_for_it, ncli = (options.ndocs, options.host, + options.port, options.when, options.nclients) + pause = options.pause / 1000.0 + + # start + log.info("run.start docs={m} clients={c} server={h}:{p:d} " + "db={db} collection={coll}".format( + r=run_id, m=ndocs, h=host, p=port, db=db_name, + coll=coll_name, c=ncli)) + + log.debug("pre.start") + # connect (once for each client) + if options.do_check: + ncli = 1 + if options.mongoose: + # Assume sleepymongoose is listening on port 27080 + # and connecting via localhost to the mongodb server. + if mongate is None: + parser.error("-m/--mongoose option requires 'mongate' Python " + "module, which was not found.") + return 2 + from mongate import connection + try: + connections = [connection.Connection(host, 27080) + for _ in range(ncli)] + except socket.error as err: + log.critical("mongoose.connection.error msg={}".format(err)) + return -1 + for conn in connections: + conn.connect_to_mongo("mongodb://localhost", port) + else: + connections = [pymongo.Connection(host, port) + for _ in xrange(ncli)] + # check mode, just print results and stop + conn = connections[0] + if options.do_check: + coll = conn[db_name][REPORT_COLL] + print_results(coll) + if options.do_clear: + conn[db_name][REPORT_COLL].remove() + log.debug("pre.end status=0") + return 0 + # with clear flag, empty db first + if options.do_clear: + conn[db_name][coll_name].remove() + log.debug("pre.end status=0") + + log.debug("main.start") + dur = stress_test(connections, ndocs=ndocs, db_name=db_name, + coll_name=coll_name, pause=pause) + log.debug("main.end status=0") + + log.debug("post.start") + # re-use a previous connection + report(conn, run_id, dur, docs=ndocs, clients=ncli) + log.debug("post.end status=0") + + # done + log.info("run.end status=0") + return 0 + +if __name__ == '__main__': + sys.exit(main()) \ No newline at end of file diff --git a/src/components/tests/write_pbs.py b/src/components/tests/write_pbs.py new file mode 100644 index 00000000..6a0fbf78 --- /dev/null +++ b/src/components/tests/write_pbs.py @@ -0,0 +1,57 @@ +import sys +import os +from optparse import OptionParser + + +name = 'stress_test.py' +EXE = os.path.join(os.path.abspath(os.path.dirname(__file__)), name) + + +def write_file(file_name, nclients, ndocs, host, port, walltime): + """ + ncores (int) to use + file_name (str) to write pbs to + walltime (int) walltime (in min) used + """ + outs = [] + outs.append("#!/bin/bash") + outs.append("") + outs.append("#PBS -q debug") + outs.append("#PBS -N test") + # add one for the driving process? + outs.append("#PBS -l mppwidth={n}".format(n=nclients + 1)) + outs.append("#PBS -l walltime=00:0{i}:00".format(i=walltime)) + outs.append("#PBS -o {f}/RUN.out".format(f=os.getcwd())) + outs.append("#PBS -e {f}/RUN.error".format(f=os.getcwd())) + outs.append("") + outs.append("module load python/2.7.1") + outs.append("") + outs.append("cd {d}".format(d=os.getcwd())) + outs.append("aprun -n {n} python {e} --nclient {n} --port {p} --ndocs {ndocs} --host {h}".format(e=EXE, n=nclients, h=host, p=port, ndocs=ndocs)) + outs.append("") + + with open(file_name, 'w+') as f: + f.write("\n".join(outs)) + + +def main(output_file, nclients, ndocs, host, port, walltime): + write_file(output_file, nclients, ndocs, host, port, walltime) + return 0 + +if __name__ == '__main__': + parser = OptionParser() + parser.add_option('-n', '--nclients', type='int', dest='nclients', help='Number of clients to start up') + parser.add_option('-d', '--ndocs', dest='ndocs', type='int', default= 1000, help='number of docs to import per client into the db') + parser.add_option('-H', '--host', dest='host', help='db hostname') + parser.add_option('-o', '--out', dest='output_file', default='run.pbs', help='Output file to write to (e.g, "my_file.pbs"') + parser.add_option('-p', '--port', dest='port', type='int', default=27017, help='db port to connect to') + # 30 min is the max walltime allowed in the Hopper Debug queue + parser.add_option('-t', '--walltime', dest='walltime', type='int', default=29, help='pbs walltime') + (options, args) = parser.parse_args() + + # nclients, host must be given! + if options.nclients is not None and options.host is not None: + sys.exit(main(options.output_file, options.nclients, options.ndocs, options.host, options.port, options.walltime)) + else: + print "Provide --nclients and --host options" + sys.exit(0) \ No newline at end of file diff --git a/src/craco.config.js b/src/craco.config.js new file mode 100644 index 00000000..880ea085 --- /dev/null +++ b/src/craco.config.js @@ -0,0 +1,11 @@ +module.exports = { + style: { + postcss: { + plugins: [ + require('tailwindcss'), + require('autoprefixer'), + ], + + }, + }, +} \ No newline at end of file diff --git a/src/edit.jsx b/src/edit.jsx new file mode 100644 index 00000000..02ad4608 --- /dev/null +++ b/src/edit.jsx @@ -0,0 +1,115 @@ +import { Form, useLoaderData } from "react-router-dom"; + +export default function EditContact() { + const { contact } = useLoaderData(); + + return ( +
+

+ Name + + +

+ + +