diff --git a/.autorestic.yml.old b/.autorestic.yml.old new file mode 100644 index 0000000000..ae86b33cdc --- /dev/null +++ b/.autorestic.yml.old @@ -0,0 +1,21 @@ + +version: 2 +locations: + home: + from: / + to: local + options: + backup: + exclude: + - /dev + - /proc + - /sys + - /tmp + - /run + - /mnt + - /media + - /lost+found +backends: + local: + type: local + path: /tmp/autorestic-test diff --git a/.bash_aliases b/.bash_aliases new file mode 100644 index 0000000000..e562182892 --- /dev/null +++ b/.bash_aliases @@ -0,0 +1,6 @@ +alias yarn="corepack yarn" +alias yarnpkg="corepack yarnpkg" +alias pnpm="corepack pnpm" +alias pnpx="corepack pnpx" +alias npm="corepack npm" +alias npx="corepack npx" \ No newline at end of file diff --git a/.codesandbox/node/assert.js b/.codesandbox/node/assert.js new file mode 100644 index 0000000000..2ddc44d9b8 --- /dev/null +++ b/.codesandbox/node/assert.js @@ -0,0 +1,824 @@ +'use strict'; + +const { + ArrayPrototypeIndexOf, + ArrayPrototypeJoin, + ArrayPrototypePush, + ArrayPrototypeSlice, + Error, + NumberIsNaN, + ObjectAssign, + ObjectIs, + ObjectKeys, + ObjectPrototypeIsPrototypeOf, + ReflectApply, + RegExpPrototypeExec, + String, + StringPrototypeIndexOf, + StringPrototypeSlice, + StringPrototypeSplit, +} = primordials; + +const { + codes: { + ERR_AMBIGUOUS_ARGUMENT, + ERR_INVALID_ARG_TYPE, + ERR_INVALID_ARG_VALUE, + ERR_INVALID_RETURN_VALUE, + ERR_MISSING_ARGS, + }, +} = require('internal/errors'); +const AssertionError = require('internal/assert/assertion_error'); +const { inspect } = require('internal/util/inspect'); +const { + isPromise, + isRegExp, +} = require('internal/util/types'); +const { isError, deprecate } = require('internal/util'); +const { innerOk } = require('internal/assert/utils'); + +const CallTracker = require('internal/assert/calltracker'); +const { + validateFunction, +} = require('internal/validators'); + +let isDeepEqual; +let isDeepStrictEqual; +let isPartialStrictEqual; + +function lazyLoadComparison() { + const comparison = require('internal/util/comparisons'); + isDeepEqual = comparison.isDeepEqual; + isDeepStrictEqual = comparison.isDeepStrictEqual; + isPartialStrictEqual = comparison.isPartialStrictEqual; +} + +let warned = false; + +// The assert module provides functions that throw +// AssertionError's when particular conditions are not met. The +// assert module must conform to the following interface. + +const assert = module.exports = ok; + +const NO_EXCEPTION_SENTINEL = {}; + +// All of the following functions must throw an AssertionError +// when a corresponding condition is not met, with a message that +// may be undefined if not provided. All assertion methods provide +// both the actual and expected values to the assertion error for +// display purposes. + +function innerFail(obj) { + if (obj.message instanceof Error) throw obj.message; + + throw new AssertionError(obj); +} + +/** + * @param {any} actual + * @param {any} expected + * @param {string | Error} [message] + * @param {string} [operator] + * @param {Function} [stackStartFn] + */ +function fail(actual, expected, message, operator, stackStartFn) { + const argsLen = arguments.length; + + let internalMessage = false; + if (actual == null && argsLen <= 1) { + internalMessage = true; + message = 'Failed'; + } else if (argsLen === 1) { + message = actual; + actual = undefined; + } else { + if (warned === false) { + warned = true; + process.emitWarning( + 'assert.fail() with more than one argument is deprecated. ' + + 'Please use assert.strictEqual() instead or only pass a message.', + 'DeprecationWarning', + 'DEP0094', + ); + } + if (argsLen === 2) + operator = '!='; + } + + if (message instanceof Error) throw message; + + const errArgs = { + actual, + expected, + operator: operator === undefined ? 'fail' : operator, + stackStartFn: stackStartFn || fail, + message, + }; + const err = new AssertionError(errArgs); + if (internalMessage) { + err.generatedMessage = true; + } + throw err; +} + +assert.fail = fail; + +// The AssertionError is defined in internal/error. +assert.AssertionError = AssertionError; + +/** + * Pure assertion tests whether a value is truthy, as determined + * by !!value. + * @param {...any} args + * @returns {void} + */ +function ok(...args) { + innerOk(ok, args.length, ...args); +} +assert.ok = ok; + +/** + * The equality assertion tests shallow, coercive equality with ==. + * @param {any} actual + * @param {any} expected + * @param {string | Error} [message] + * @returns {void} + */ +/* eslint-disable no-restricted-properties */ +assert.equal = function equal(actual, expected, message) { + if (arguments.length < 2) { + throw new ERR_MISSING_ARGS('actual', 'expected'); + } + // eslint-disable-next-line eqeqeq + if (actual != expected && (!NumberIsNaN(actual) || !NumberIsNaN(expected))) { + innerFail({ + actual, + expected, + message, + operator: '==', + stackStartFn: equal, + }); + } +}; + +/** + * The non-equality assertion tests for whether two objects are not + * equal with !=. + * @param {any} actual + * @param {any} expected + * @param {string | Error} [message] + * @returns {void} + */ +assert.notEqual = function notEqual(actual, expected, message) { + if (arguments.length < 2) { + throw new ERR_MISSING_ARGS('actual', 'expected'); + } + // eslint-disable-next-line eqeqeq + if (actual == expected || (NumberIsNaN(actual) && NumberIsNaN(expected))) { + innerFail({ + actual, + expected, + message, + operator: '!=', + stackStartFn: notEqual, + }); + } +}; + +/** + * The deep equivalence assertion tests a deep equality relation. + * @param {any} actual + * @param {any} expected + * @param {string | Error} [message] + * @returns {void} + */ +assert.deepEqual = function deepEqual(actual, expected, message) { + if (arguments.length < 2) { + throw new ERR_MISSING_ARGS('actual', 'expected'); + } + if (isDeepEqual === undefined) lazyLoadComparison(); + if (!isDeepEqual(actual, expected)) { + innerFail({ + actual, + expected, + message, + operator: 'deepEqual', + stackStartFn: deepEqual, + }); + } +}; + +/** + * The deep non-equivalence assertion tests for any deep inequality. + * @param {any} actual + * @param {any} expected + * @param {string | Error} [message] + * @returns {void} + */ +assert.notDeepEqual = function notDeepEqual(actual, expected, message) { + if (arguments.length < 2) { + throw new ERR_MISSING_ARGS('actual', 'expected'); + } + if (isDeepEqual === undefined) lazyLoadComparison(); + if (isDeepEqual(actual, expected)) { + innerFail({ + actual, + expected, + message, + operator: 'notDeepEqual', + stackStartFn: notDeepEqual, + }); + } +}; +/* eslint-enable */ + +/** + * The deep strict equivalence assertion tests a deep strict equality + * relation. + * @param {any} actual + * @param {any} expected + * @param {string | Error} [message] + * @returns {void} + */ +assert.deepStrictEqual = function deepStrictEqual(actual, expected, message) { + if (arguments.length < 2) { + throw new ERR_MISSING_ARGS('actual', 'expected'); + } + if (isDeepEqual === undefined) lazyLoadComparison(); + if (!isDeepStrictEqual(actual, expected)) { + innerFail({ + actual, + expected, + message, + operator: 'deepStrictEqual', + stackStartFn: deepStrictEqual, + }); + } +}; + +/** + * The deep strict non-equivalence assertion tests for any deep strict + * inequality. + * @param {any} actual + * @param {any} expected + * @param {string | Error} [message] + * @returns {void} + */ +assert.notDeepStrictEqual = notDeepStrictEqual; +function notDeepStrictEqual(actual, expected, message) { + if (arguments.length < 2) { + throw new ERR_MISSING_ARGS('actual', 'expected'); + } + if (isDeepEqual === undefined) lazyLoadComparison(); + if (isDeepStrictEqual(actual, expected)) { + innerFail({ + actual, + expected, + message, + operator: 'notDeepStrictEqual', + stackStartFn: notDeepStrictEqual, + }); + } +} + +/** + * The strict equivalence assertion tests a strict equality relation. + * @param {any} actual + * @param {any} expected + * @param {string | Error} [message] + * @returns {void} + */ +assert.strictEqual = function strictEqual(actual, expected, message) { + if (arguments.length < 2) { + throw new ERR_MISSING_ARGS('actual', 'expected'); + } + if (!ObjectIs(actual, expected)) { + innerFail({ + actual, + expected, + message, + operator: 'strictEqual', + stackStartFn: strictEqual, + }); + } +}; + +/** + * The strict non-equivalence assertion tests for any strict inequality. + * @param {any} actual + * @param {any} expected + * @param {string | Error} [message] + * @returns {void} + */ +assert.notStrictEqual = function notStrictEqual(actual, expected, message) { + if (arguments.length < 2) { + throw new ERR_MISSING_ARGS('actual', 'expected'); + } + if (ObjectIs(actual, expected)) { + innerFail({ + actual, + expected, + message, + operator: 'notStrictEqual', + stackStartFn: notStrictEqual, + }); + } +}; + +/** + * The strict equivalence assertion test between two objects + * @param {any} actual + * @param {any} expected + * @param {string | Error} [message] + * @returns {void} + */ +assert.partialDeepStrictEqual = function partialDeepStrictEqual( + actual, + expected, + message, +) { + if (arguments.length < 2) { + throw new ERR_MISSING_ARGS('actual', 'expected'); + } + if (isDeepEqual === undefined) lazyLoadComparison(); + if (!isPartialStrictEqual(actual, expected)) { + innerFail({ + actual, + expected, + message, + operator: 'partialDeepStrictEqual', + stackStartFn: partialDeepStrictEqual, + }); + } +}; + +class Comparison { + constructor(obj, keys, actual) { + for (const key of keys) { + if (key in obj) { + if (actual !== undefined && + typeof actual[key] === 'string' && + isRegExp(obj[key]) && + RegExpPrototypeExec(obj[key], actual[key]) !== null) { + this[key] = actual[key]; + } else { + this[key] = obj[key]; + } + } + } + } +} + +function compareExceptionKey(actual, expected, key, message, keys, fn) { + if (!(key in actual) || !isDeepStrictEqual(actual[key], expected[key])) { + if (!message) { + // Create placeholder objects to create a nice output. + const a = new Comparison(actual, keys); + const b = new Comparison(expected, keys, actual); + + const err = new AssertionError({ + actual: a, + expected: b, + operator: 'deepStrictEqual', + stackStartFn: fn, + }); + err.actual = actual; + err.expected = expected; + err.operator = fn.name; + throw err; + } + innerFail({ + actual, + expected, + message, + operator: fn.name, + stackStartFn: fn, + }); + } +} + +function expectedException(actual, expected, message, fn) { + let generatedMessage = false; + let throwError = false; + + if (typeof expected !== 'function') { + // Handle regular expressions. + if (isRegExp(expected)) { + const str = String(actual); + if (RegExpPrototypeExec(expected, str) !== null) + return; + + if (!message) { + generatedMessage = true; + message = 'The input did not match the regular expression ' + + `${inspect(expected)}. Input:\n\n${inspect(str)}\n`; + } + throwError = true; + // Handle primitives properly. + } else if (typeof actual !== 'object' || actual === null) { + const err = new AssertionError({ + actual, + expected, + message, + operator: 'deepStrictEqual', + stackStartFn: fn, + }); + err.operator = fn.name; + throw err; + } else { + // Handle validation objects. + const keys = ObjectKeys(expected); + // Special handle errors to make sure the name and the message are + // compared as well. + if (expected instanceof Error) { + ArrayPrototypePush(keys, 'name', 'message'); + } else if (keys.length === 0) { + throw new ERR_INVALID_ARG_VALUE('error', + expected, 'may not be an empty object'); + } + if (isDeepEqual === undefined) lazyLoadComparison(); + for (const key of keys) { + if (typeof actual[key] === 'string' && + isRegExp(expected[key]) && + RegExpPrototypeExec(expected[key], actual[key]) !== null) { + continue; + } + compareExceptionKey(actual, expected, key, message, keys, fn); + } + return; + } + // Guard instanceof against arrow functions as they don't have a prototype. + // Check for matching Error classes. + } else if (expected.prototype !== undefined && actual instanceof expected) { + return; + } else if (ObjectPrototypeIsPrototypeOf(Error, expected)) { + if (!message) { + generatedMessage = true; + message = 'The error is expected to be an instance of ' + + `"${expected.name}". Received `; + if (isError(actual)) { + const name = (actual.constructor?.name) || + actual.name; + if (expected.name === name) { + message += 'an error with identical name but a different prototype.'; + } else { + message += `"${name}"`; + } + if (actual.message) { + message += `\n\nError message:\n\n${actual.message}`; + } + } else { + message += `"${inspect(actual, { depth: -1 })}"`; + } + } + throwError = true; + } else { + // Check validation functions return value. + const res = ReflectApply(expected, {}, [actual]); + if (res !== true) { + if (!message) { + generatedMessage = true; + const name = expected.name ? `"${expected.name}" ` : ''; + message = `The ${name}validation function is expected to return` + + ` "true". Received ${inspect(res)}`; + + if (isError(actual)) { + message += `\n\nCaught error:\n\n${actual}`; + } + } + throwError = true; + } + } + + if (throwError) { + const err = new AssertionError({ + actual, + expected, + message, + operator: fn.name, + stackStartFn: fn, + }); + err.generatedMessage = generatedMessage; + throw err; + } +} + +function getActual(fn) { + validateFunction(fn, 'fn'); + try { + fn(); + } catch (e) { + return e; + } + return NO_EXCEPTION_SENTINEL; +} + +function checkIsPromise(obj) { + // Accept native ES6 promises and promises that are implemented in a similar + // way. Do not accept thenables that use a function as `obj` and that have no + // `catch` handler. + return isPromise(obj) || + (obj !== null && typeof obj === 'object' && + typeof obj.then === 'function' && + typeof obj.catch === 'function'); +} + +async function waitForActual(promiseFn) { + let resultPromise; + if (typeof promiseFn === 'function') { + // Return a rejected promise if `promiseFn` throws synchronously. + resultPromise = promiseFn(); + // Fail in case no promise is returned. + if (!checkIsPromise(resultPromise)) { + throw new ERR_INVALID_RETURN_VALUE('instance of Promise', + 'promiseFn', resultPromise); + } + } else if (checkIsPromise(promiseFn)) { + resultPromise = promiseFn; + } else { + throw new ERR_INVALID_ARG_TYPE( + 'promiseFn', ['Function', 'Promise'], promiseFn); + } + + try { + await resultPromise; + } catch (e) { + return e; + } + return NO_EXCEPTION_SENTINEL; +} + +function expectsError(stackStartFn, actual, error, message) { + if (typeof error === 'string') { + if (arguments.length === 4) { + throw new ERR_INVALID_ARG_TYPE('error', + ['Object', 'Error', 'Function', 'RegExp'], + error); + } + if (typeof actual === 'object' && actual !== null) { + if (actual.message === error) { + throw new ERR_AMBIGUOUS_ARGUMENT( + 'error/message', + `The error message "${actual.message}" is identical to the message.`, + ); + } + } else if (actual === error) { + throw new ERR_AMBIGUOUS_ARGUMENT( + 'error/message', + `The error "${actual}" is identical to the message.`, + ); + } + message = error; + error = undefined; + } else if (error != null && + typeof error !== 'object' && + typeof error !== 'function') { + throw new ERR_INVALID_ARG_TYPE('error', + ['Object', 'Error', 'Function', 'RegExp'], + error); + } + + if (actual === NO_EXCEPTION_SENTINEL) { + let details = ''; + if (error?.name) { + details += ` (${error.name})`; + } + details += message ? `: ${message}` : '.'; + const fnType = stackStartFn === assert.rejects ? 'rejection' : 'exception'; + innerFail({ + actual: undefined, + expected: error, + operator: stackStartFn.name, + message: `Missing expected ${fnType}${details}`, + stackStartFn, + }); + } + + if (!error) + return; + + expectedException(actual, error, message, stackStartFn); +} + +function hasMatchingError(actual, expected) { + if (typeof expected !== 'function') { + if (isRegExp(expected)) { + const str = String(actual); + return RegExpPrototypeExec(expected, str) !== null; + } + throw new ERR_INVALID_ARG_TYPE( + 'expected', ['Function', 'RegExp'], expected, + ); + } + // Guard instanceof against arrow functions as they don't have a prototype. + if (expected.prototype !== undefined && actual instanceof expected) { + return true; + } + if (ObjectPrototypeIsPrototypeOf(Error, expected)) { + return false; + } + return ReflectApply(expected, {}, [actual]) === true; +} + +function expectsNoError(stackStartFn, actual, error, message) { + if (actual === NO_EXCEPTION_SENTINEL) + return; + + if (typeof error === 'string') { + message = error; + error = undefined; + } + + if (!error || hasMatchingError(actual, error)) { + const details = message ? `: ${message}` : '.'; + const fnType = stackStartFn === assert.doesNotReject ? + 'rejection' : 'exception'; + innerFail({ + actual, + expected: error, + operator: stackStartFn.name, + message: `Got unwanted ${fnType}${details}\n` + + `Actual message: "${actual?.message}"`, + stackStartFn, + }); + } + throw actual; +} + +/** + * Expects the function `promiseFn` to throw an error. + * @param {() => any} promiseFn + * @param {...any} [args] + * @returns {void} + */ +assert.throws = function throws(promiseFn, ...args) { + expectsError(throws, getActual(promiseFn), ...args); +}; + +/** + * Expects `promiseFn` function or its value to reject. + * @param {() => Promise} promiseFn + * @param {...any} [args] + * @returns {Promise} + */ +assert.rejects = async function rejects(promiseFn, ...args) { + expectsError(rejects, await waitForActual(promiseFn), ...args); +}; + +/** + * Asserts that the function `fn` does not throw an error. + * @param {() => any} fn + * @param {...any} [args] + * @returns {void} + */ +assert.doesNotThrow = function doesNotThrow(fn, ...args) { + expectsNoError(doesNotThrow, getActual(fn), ...args); +}; + +/** + * Expects `fn` or its value to not reject. + * @param {() => Promise} fn + * @param {...any} [args] + * @returns {Promise} + */ +assert.doesNotReject = async function doesNotReject(fn, ...args) { + expectsNoError(doesNotReject, await waitForActual(fn), ...args); +}; + +/** + * Throws `AssertionError` if the value is not `null` or `undefined`. + * @param {any} err + * @returns {void} + */ +assert.ifError = function ifError(err) { + if (err !== null && err !== undefined) { + let message = 'ifError got unwanted exception: '; + if (typeof err === 'object' && typeof err.message === 'string') { + if (err.message.length === 0 && err.constructor) { + message += err.constructor.name; + } else { + message += err.message; + } + } else { + message += inspect(err); + } + + const newErr = new AssertionError({ + actual: err, + expected: null, + operator: 'ifError', + message, + stackStartFn: ifError, + }); + + // Make sure we actually have a stack trace! + const origStack = err.stack; + + if (typeof origStack === 'string') { + // This will remove any duplicated frames from the error frames taken + // from within `ifError` and add the original error frames to the newly + // created ones. + const origStackStart = StringPrototypeIndexOf(origStack, '\n at'); + if (origStackStart !== -1) { + const originalFrames = StringPrototypeSplit( + StringPrototypeSlice(origStack, origStackStart + 1), + '\n', + ); + // Filter all frames existing in err.stack. + let newFrames = StringPrototypeSplit(newErr.stack, '\n'); + for (const errFrame of originalFrames) { + // Find the first occurrence of the frame. + const pos = ArrayPrototypeIndexOf(newFrames, errFrame); + if (pos !== -1) { + // Only keep new frames. + newFrames = ArrayPrototypeSlice(newFrames, 0, pos); + break; + } + } + const stackStart = ArrayPrototypeJoin(newFrames, '\n'); + const stackEnd = ArrayPrototypeJoin(originalFrames, '\n'); + newErr.stack = `${stackStart}\n${stackEnd}`; + } + } + + throw newErr; + } +}; + +function internalMatch(string, regexp, message, fn) { + if (!isRegExp(regexp)) { + throw new ERR_INVALID_ARG_TYPE( + 'regexp', 'RegExp', regexp, + ); + } + const match = fn === assert.match; + if (typeof string !== 'string' || + RegExpPrototypeExec(regexp, string) !== null !== match) { + if (message instanceof Error) { + throw message; + } + + const generatedMessage = !message; + + // 'The input was expected to not match the regular expression ' + + message ||= (typeof string !== 'string' ? + 'The "string" argument must be of type string. Received type ' + + `${typeof string} (${inspect(string)})` : + (match ? + 'The input did not match the regular expression ' : + 'The input was expected to not match the regular expression ') + + `${inspect(regexp)}. Input:\n\n${inspect(string)}\n`); + const err = new AssertionError({ + actual: string, + expected: regexp, + message, + operator: fn.name, + stackStartFn: fn, + }); + err.generatedMessage = generatedMessage; + throw err; + } +} + +/** + * Expects the `string` input to match the regular expression. + * @param {string} string + * @param {RegExp} regexp + * @param {string | Error} [message] + * @returns {void} + */ +assert.match = function match(string, regexp, message) { + internalMatch(string, regexp, message, match); +}; + +/** + * Expects the `string` input not to match the regular expression. + * @param {string} string + * @param {RegExp} regexp + * @param {string | Error} [message] + * @returns {void} + */ +assert.doesNotMatch = function doesNotMatch(string, regexp, message) { + internalMatch(string, regexp, message, doesNotMatch); +}; + +assert.CallTracker = deprecate(CallTracker, 'assert.CallTracker is deprecated.', 'DEP0173'); + +/** + * Expose a strict only variant of assert. + * @param {...any} args + * @returns {void} + */ +function strict(...args) { + innerOk(strict, args.length, ...args); +} + +assert.strict = ObjectAssign(strict, assert, { + equal: assert.strictEqual, + deepEqual: assert.deepStrictEqual, + notEqual: assert.notStrictEqual, + notDeepEqual: assert.notDeepStrictEqual, +}); + +assert.strict.strict = assert.strict; \ No newline at end of file diff --git a/.codesandbox/node/async_hooks.js b/.codesandbox/node/async_hooks.js new file mode 100644 index 0000000000..8c57bc67fe --- /dev/null +++ b/.codesandbox/node/async_hooks.js @@ -0,0 +1,296 @@ +'use strict'; + +const { + ArrayPrototypeIncludes, + ArrayPrototypeIndexOf, + ArrayPrototypePush, + ArrayPrototypeSplice, + ArrayPrototypeUnshift, + FunctionPrototypeBind, + NumberIsSafeInteger, + ObjectDefineProperties, + ObjectFreeze, + ReflectApply, + Symbol, +} = primordials; + +const { + ERR_ASYNC_CALLBACK, + ERR_ASYNC_TYPE, + ERR_INVALID_ASYNC_ID, +} = require('internal/errors').codes; +const { + deprecate, + kEmptyObject, +} = require('internal/util'); +const { + validateFunction, + validateString, +} = require('internal/validators'); +const internal_async_hooks = require('internal/async_hooks'); + +const AsyncContextFrame = require('internal/async_context_frame'); + +// Get functions +// For userland AsyncResources, make sure to emit a destroy event when the +// resource gets gced. +const { registerDestroyHook, kNoPromiseHook } = internal_async_hooks; +const { + asyncWrap, + executionAsyncId, + triggerAsyncId, + // Private API + hasAsyncIdStack, + getHookArrays, + enableHooks, + disableHooks, + updatePromiseHookMode, + executionAsyncResource, + // Internal Embedder API + newAsyncId, + getDefaultTriggerAsyncId, + emitInit, + emitBefore, + emitAfter, + emitDestroy, + enabledHooksExist, + initHooksExist, + destroyHooksExist, +} = internal_async_hooks; + +// Get symbols +const { + async_id_symbol, trigger_async_id_symbol, + init_symbol, before_symbol, after_symbol, destroy_symbol, + promise_resolve_symbol, +} = internal_async_hooks.symbols; + +// Get constants +const { + kInit, kBefore, kAfter, kDestroy, kTotals, kPromiseResolve, +} = internal_async_hooks.constants; + +// Listener API // + +class AsyncHook { + constructor({ init, before, after, destroy, promiseResolve }) { + if (init !== undefined && typeof init !== 'function') + throw new ERR_ASYNC_CALLBACK('hook.init'); + if (before !== undefined && typeof before !== 'function') + throw new ERR_ASYNC_CALLBACK('hook.before'); + if (after !== undefined && typeof after !== 'function') + throw new ERR_ASYNC_CALLBACK('hook.after'); + if (destroy !== undefined && typeof destroy !== 'function') + throw new ERR_ASYNC_CALLBACK('hook.destroy'); + if (promiseResolve !== undefined && typeof promiseResolve !== 'function') + throw new ERR_ASYNC_CALLBACK('hook.promiseResolve'); + + this[init_symbol] = init; + this[before_symbol] = before; + this[after_symbol] = after; + this[destroy_symbol] = destroy; + this[promise_resolve_symbol] = promiseResolve; + this[kNoPromiseHook] = false; + } + + enable() { + // The set of callbacks for a hook should be the same regardless of whether + // enable()/disable() are run during their execution. The following + // references are reassigned to the tmp arrays if a hook is currently being + // processed. + const { 0: hooks_array, 1: hook_fields } = getHookArrays(); + + // Each hook is only allowed to be added once. + if (ArrayPrototypeIncludes(hooks_array, this)) + return this; + + const prev_kTotals = hook_fields[kTotals]; + + // createHook() has already enforced that the callbacks are all functions, + // so here simply increment the count of whether each callbacks exists or + // not. + hook_fields[kTotals] = hook_fields[kInit] += +!!this[init_symbol]; + hook_fields[kTotals] += hook_fields[kBefore] += +!!this[before_symbol]; + hook_fields[kTotals] += hook_fields[kAfter] += +!!this[after_symbol]; + hook_fields[kTotals] += hook_fields[kDestroy] += +!!this[destroy_symbol]; + hook_fields[kTotals] += + hook_fields[kPromiseResolve] += +!!this[promise_resolve_symbol]; + ArrayPrototypePush(hooks_array, this); + + if (prev_kTotals === 0 && hook_fields[kTotals] > 0) { + enableHooks(); + } + + if (!this[kNoPromiseHook]) { + updatePromiseHookMode(); + } + + return this; + } + + disable() { + const { 0: hooks_array, 1: hook_fields } = getHookArrays(); + + const index = ArrayPrototypeIndexOf(hooks_array, this); + if (index === -1) + return this; + + const prev_kTotals = hook_fields[kTotals]; + + hook_fields[kTotals] = hook_fields[kInit] -= +!!this[init_symbol]; + hook_fields[kTotals] += hook_fields[kBefore] -= +!!this[before_symbol]; + hook_fields[kTotals] += hook_fields[kAfter] -= +!!this[after_symbol]; + hook_fields[kTotals] += hook_fields[kDestroy] -= +!!this[destroy_symbol]; + hook_fields[kTotals] += + hook_fields[kPromiseResolve] -= +!!this[promise_resolve_symbol]; + ArrayPrototypeSplice(hooks_array, index, 1); + + if (prev_kTotals > 0 && hook_fields[kTotals] === 0) { + disableHooks(); + } + + return this; + } +} + + +function createHook(fns) { + return new AsyncHook(fns); +} + + +// Embedder API // + +const destroyedSymbol = Symbol('destroyed'); +const contextFrameSymbol = Symbol('context_frame'); + +class AsyncResource { + constructor(type, opts = kEmptyObject) { + validateString(type, 'type'); + + let triggerAsyncId = opts; + let requireManualDestroy = false; + if (typeof opts !== 'number') { + triggerAsyncId = opts.triggerAsyncId === undefined ? + getDefaultTriggerAsyncId() : opts.triggerAsyncId; + requireManualDestroy = !!opts.requireManualDestroy; + } + + // Unlike emitInitScript, AsyncResource doesn't supports null as the + // triggerAsyncId. + if (!NumberIsSafeInteger(triggerAsyncId) || triggerAsyncId < -1) { + throw new ERR_INVALID_ASYNC_ID('triggerAsyncId', triggerAsyncId); + } + + this[contextFrameSymbol] = AsyncContextFrame.current(); + + const asyncId = newAsyncId(); + this[async_id_symbol] = asyncId; + this[trigger_async_id_symbol] = triggerAsyncId; + + if (initHooksExist()) { + if (enabledHooksExist() && type.length === 0) { + throw new ERR_ASYNC_TYPE(type); + } + + emitInit(asyncId, type, triggerAsyncId, this); + } + + if (!requireManualDestroy && destroyHooksExist()) { + // This prop name (destroyed) has to be synchronized with C++ + const destroyed = { destroyed: false }; + this[destroyedSymbol] = destroyed; + registerDestroyHook(this, asyncId, destroyed); + } + } + + runInAsyncScope(fn, thisArg, ...args) { + const asyncId = this[async_id_symbol]; + emitBefore(asyncId, this[trigger_async_id_symbol], this); + + const contextFrame = this[contextFrameSymbol]; + const prior = AsyncContextFrame.exchange(contextFrame); + try { + return ReflectApply(fn, thisArg, args); + } finally { + AsyncContextFrame.set(prior); + if (hasAsyncIdStack()) + emitAfter(asyncId); + } + } + + emitDestroy() { + if (this[destroyedSymbol] !== undefined) { + this[destroyedSymbol].destroyed = true; + } + emitDestroy(this[async_id_symbol]); + return this; + } + + asyncId() { + return this[async_id_symbol]; + } + + triggerAsyncId() { + return this[trigger_async_id_symbol]; + } + + bind(fn, thisArg) { + validateFunction(fn, 'fn'); + let bound; + if (thisArg === undefined) { + const resource = this; + bound = function(...args) { + ArrayPrototypeUnshift(args, fn, this); + return ReflectApply(resource.runInAsyncScope, resource, args); + }; + } else { + bound = FunctionPrototypeBind(this.runInAsyncScope, this, fn, thisArg); + } + let self = this; + ObjectDefineProperties(bound, { + 'length': { + __proto__: null, + configurable: true, + enumerable: false, + value: fn.length, + writable: false, + }, + 'asyncResource': { + __proto__: null, + configurable: true, + enumerable: true, + get: deprecate(function() { + return self; + }, 'The asyncResource property on bound functions is deprecated', 'DEP0172'), + set: deprecate(function(val) { + self = val; + }, 'The asyncResource property on bound functions is deprecated', 'DEP0172'), + }, + }); + return bound; + } + + static bind(fn, type, thisArg) { + type ||= fn.name; + return (new AsyncResource(type || 'bound-anonymous-fn')).bind(fn, thisArg); + } +} + +// Placing all exports down here because the exported classes won't export +// otherwise. +module.exports = { + // Public API + get AsyncLocalStorage() { + return AsyncContextFrame.enabled ? + require('internal/async_local_storage/async_context_frame') : + require('internal/async_local_storage/async_hooks'); + }, + createHook, + executionAsyncId, + triggerAsyncId, + executionAsyncResource, + asyncWrapProviders: ObjectFreeze({ __proto__: null, ...asyncWrap.Providers }), + // Embedder API + AsyncResource, +}; \ No newline at end of file diff --git a/.codesandbox/node/buffer.js b/.codesandbox/node/buffer.js new file mode 100644 index 0000000000..06bfebc7e1 --- /dev/null +++ b/.codesandbox/node/buffer.js @@ -0,0 +1,1365 @@ +'use strict'; + +const { + Array, + ArrayBufferIsView, + ArrayIsArray, + ArrayPrototypeForEach, + MathFloor, + MathMin, + MathTrunc, + NumberIsInteger, + NumberIsNaN, + NumberMAX_SAFE_INTEGER, + NumberMIN_SAFE_INTEGER, + ObjectDefineProperties, + ObjectDefineProperty, + ObjectPrototypeHasOwnProperty, + ObjectSetPrototypeOf, + RegExpPrototypeSymbolReplace, + StringPrototypeCharCodeAt, + StringPrototypeSlice, + StringPrototypeToLowerCase, + StringPrototypeTrim, + SymbolSpecies, + SymbolToPrimitive, + TypedArrayPrototypeFill, + TypedArrayPrototypeGetBuffer, + TypedArrayPrototypeGetByteLength, + TypedArrayPrototypeGetByteOffset, + TypedArrayPrototypeGetLength, + TypedArrayPrototypeSet, + TypedArrayPrototypeSlice, + Uint8Array, + Uint8ArrayPrototype, +} = primordials; + +const { + byteLengthUtf8, + compare: _compare, + compareOffset, + copy: _copy, + fill: bindingFill, + isAscii: bindingIsAscii, + isUtf8: bindingIsUtf8, + indexOfBuffer, + indexOfNumber, + indexOfString, + swap16: _swap16, + swap32: _swap32, + swap64: _swap64, + kMaxLength, + kStringMaxLength, + atob: _atob, + btoa: _btoa, +} = internalBinding('buffer'); +const { + constants: { + ALL_PROPERTIES, + ONLY_ENUMERABLE, + }, + getOwnNonIndexProperties, + isInsideNodeModules, +} = internalBinding('util'); +const { + customInspectSymbol, + lazyDOMException, + normalizeEncoding, + kIsEncodingSymbol, + defineLazyProperties, + encodingsMap, + deprecate, +} = require('internal/util'); +const { + isAnyArrayBuffer, + isArrayBufferView, + isUint8Array, + isTypedArray, +} = require('internal/util/types'); +const { + inspect: utilInspect, +} = require('internal/util/inspect'); + +const { + codes: { + ERR_BUFFER_OUT_OF_BOUNDS, + ERR_INVALID_ARG_TYPE, + ERR_INVALID_ARG_VALUE, + ERR_INVALID_BUFFER_SIZE, + ERR_MISSING_ARGS, + ERR_OUT_OF_RANGE, + ERR_UNKNOWN_ENCODING, + }, + genericNodeError, +} = require('internal/errors'); +const { + validateArray, + validateBuffer, + validateInteger, + validateNumber, + validateString, +} = require('internal/validators'); +// Provide validateInteger() but with kMaxLength as the default maximum value. +const validateOffset = (value, name, min = 0, max = kMaxLength) => + validateInteger(value, name, min, max); + +const { + FastBuffer, + markAsUntransferable, + addBufferPrototypeMethods, + createUnsafeBuffer, +} = require('internal/buffer'); + +FastBuffer.prototype.constructor = Buffer; +Buffer.prototype = FastBuffer.prototype; +addBufferPrototypeMethods(Buffer.prototype); + +const constants = ObjectDefineProperties({}, { + MAX_LENGTH: { + __proto__: null, + value: kMaxLength, + writable: false, + enumerable: true, + }, + MAX_STRING_LENGTH: { + __proto__: null, + value: kStringMaxLength, + writable: false, + enumerable: true, + }, +}); + +Buffer.poolSize = 8 * 1024; +let poolSize, poolOffset, allocPool, allocBuffer; + +function createPool() { + poolSize = Buffer.poolSize; + allocBuffer = createUnsafeBuffer(poolSize); + allocPool = allocBuffer.buffer; + markAsUntransferable(allocPool); + poolOffset = 0; +} +createPool(); + +function alignPool() { + // Ensure aligned slices + if (poolOffset & 0x7) { + poolOffset |= 0x7; + poolOffset++; + } +} + +let bufferWarningAlreadyEmitted = false; +let nodeModulesCheckCounter = 0; +const bufferWarning = 'Buffer() is deprecated due to security and usability ' + + 'issues. Please use the Buffer.alloc(), ' + + 'Buffer.allocUnsafe(), or Buffer.from() methods instead.'; + +function showFlaggedDeprecation() { + if (bufferWarningAlreadyEmitted || + ++nodeModulesCheckCounter > 10000 || + (!require('internal/options').getOptionValue('--pending-deprecation') && + isInsideNodeModules(100, true))) { + // We don't emit a warning, because we either: + // - Already did so, or + // - Already checked too many times whether a call is coming + // from node_modules and want to stop slowing down things, or + // - We aren't running with `--pending-deprecation` enabled, + // and the code is inside `node_modules`. + // - We found node_modules in up to the topmost 100 frames, or + // there are more than 100 frames and we don't want to search anymore. + return; + } + + process.emitWarning(bufferWarning, 'DeprecationWarning', 'DEP0005'); + bufferWarningAlreadyEmitted = true; +} + +function toInteger(n, defaultVal) { + n = +n; + if (!NumberIsNaN(n) && + n >= NumberMIN_SAFE_INTEGER && + n <= NumberMAX_SAFE_INTEGER) { + return ((n % 1) === 0 ? n : MathFloor(n)); + } + return defaultVal; +} + +function copyImpl(source, target, targetStart, sourceStart, sourceEnd) { + if (!ArrayBufferIsView(source)) + throw new ERR_INVALID_ARG_TYPE('source', ['Buffer', 'Uint8Array'], source); + if (!ArrayBufferIsView(target)) + throw new ERR_INVALID_ARG_TYPE('target', ['Buffer', 'Uint8Array'], target); + + if (targetStart === undefined) { + targetStart = 0; + } else { + targetStart = NumberIsInteger(targetStart) ? targetStart : toInteger(targetStart, 0); + if (targetStart < 0) + throw new ERR_OUT_OF_RANGE('targetStart', '>= 0', targetStart); + } + + if (sourceStart === undefined) { + sourceStart = 0; + } else { + sourceStart = NumberIsInteger(sourceStart) ? sourceStart : toInteger(sourceStart, 0); + if (sourceStart < 0 || sourceStart > source.byteLength) + throw new ERR_OUT_OF_RANGE('sourceStart', `>= 0 && <= ${source.byteLength}`, sourceStart); + } + + if (sourceEnd === undefined) { + sourceEnd = source.byteLength; + } else { + sourceEnd = NumberIsInteger(sourceEnd) ? sourceEnd : toInteger(sourceEnd, 0); + if (sourceEnd < 0) + throw new ERR_OUT_OF_RANGE('sourceEnd', '>= 0', sourceEnd); + } + + if (targetStart >= target.byteLength || sourceStart >= sourceEnd) + return 0; + + return _copyActual(source, target, targetStart, sourceStart, sourceEnd); +} + +function _copyActual(source, target, targetStart, sourceStart, sourceEnd) { + if (sourceEnd - sourceStart > target.byteLength - targetStart) + sourceEnd = sourceStart + target.byteLength - targetStart; + + let nb = sourceEnd - sourceStart; + const sourceLen = source.byteLength - sourceStart; + if (nb > sourceLen) + nb = sourceLen; + + if (nb <= 0) + return 0; + + _copy(source, target, targetStart, sourceStart, nb); + + return nb; +} + +/** + * The Buffer() constructor is deprecated in documentation and should not be + * used moving forward. Rather, developers should use one of the three new + * factory APIs: Buffer.from(), Buffer.allocUnsafe() or Buffer.alloc() based on + * their specific needs. There is no runtime deprecation because of the extent + * to which the Buffer constructor is used in the ecosystem currently -- a + * runtime deprecation would introduce too much breakage at this time. It's not + * likely that the Buffer constructors would ever actually be removed. + * Deprecation Code: DEP0005 + * @returns {Buffer} + */ +function Buffer(arg, encodingOrOffset, length) { + showFlaggedDeprecation(); + // Common case. + if (typeof arg === 'number') { + if (typeof encodingOrOffset === 'string') { + throw new ERR_INVALID_ARG_TYPE('string', 'string', arg); + } + return Buffer.alloc(arg); + } + return Buffer.from(arg, encodingOrOffset, length); +} + +ObjectDefineProperty(Buffer, SymbolSpecies, { + __proto__: null, + enumerable: false, + configurable: true, + get() { return FastBuffer; }, +}); + +/** + * Functionally equivalent to Buffer(arg, encoding) but throws a TypeError + * if value is a number. + * Buffer.from(str[, encoding]) + * Buffer.from(array) + * Buffer.from(buffer) + * Buffer.from(arrayBuffer[, byteOffset[, length]]) + * @param {any} value + * @param {BufferEncoding|number} encodingOrOffset + * @param {number} [length] + * @returns {Buffer} + */ +Buffer.from = function from(value, encodingOrOffset, length) { + if (typeof value === 'string') + return fromString(value, encodingOrOffset); + + if (typeof value === 'object' && value !== null) { + if (isAnyArrayBuffer(value)) + return fromArrayBuffer(value, encodingOrOffset, length); + + const valueOf = value.valueOf && value.valueOf(); + if (valueOf != null && + valueOf !== value && + (typeof valueOf === 'string' || typeof valueOf === 'object')) { + return from(valueOf, encodingOrOffset, length); + } + + const b = fromObject(value); + if (b) + return b; + + if (typeof value[SymbolToPrimitive] === 'function') { + const primitive = value[SymbolToPrimitive]('string'); + if (typeof primitive === 'string') { + return fromString(primitive, encodingOrOffset); + } + } + } + + throw new ERR_INVALID_ARG_TYPE( + 'first argument', + ['string', 'Buffer', 'ArrayBuffer', 'Array', 'Array-like Object'], + value, + ); +}; + +/** + * Creates the Buffer as a copy of the underlying ArrayBuffer of the view + * rather than the contents of the view. + * @param {TypedArray} view + * @param {number} [offset] + * @param {number} [length] + * @returns {Buffer} + */ +Buffer.copyBytesFrom = function copyBytesFrom(view, offset, length) { + if (!isTypedArray(view)) { + throw new ERR_INVALID_ARG_TYPE('view', [ 'TypedArray' ], view); + } + + const viewLength = TypedArrayPrototypeGetLength(view); + if (viewLength === 0) { + return Buffer.alloc(0); + } + + if (offset !== undefined || length !== undefined) { + if (offset !== undefined) { + validateInteger(offset, 'offset', 0); + if (offset >= viewLength) return Buffer.alloc(0); + } else { + offset = 0; + } + let end; + if (length !== undefined) { + validateInteger(length, 'length', 0); + end = offset + length; + } else { + end = viewLength; + } + + view = TypedArrayPrototypeSlice(view, offset, end); + } + + return fromArrayLike(new Uint8Array( + TypedArrayPrototypeGetBuffer(view), + TypedArrayPrototypeGetByteOffset(view), + TypedArrayPrototypeGetByteLength(view))); +}; + +// Identical to the built-in %TypedArray%.of(), but avoids using the deprecated +// Buffer() constructor. Must use arrow function syntax to avoid automatically +// adding a `prototype` property and making the function a constructor. +// +// Refs: https://tc39.github.io/ecma262/#sec-%typedarray%.of +// Refs: https://esdiscuss.org/topic/isconstructor#content-11 +const of = (...items) => { + const newObj = createUnsafeBuffer(items.length); + for (let k = 0; k < items.length; k++) + newObj[k] = items[k]; + return newObj; +}; +Buffer.of = of; + +ObjectSetPrototypeOf(Buffer, Uint8Array); + +/** + * Creates a new filled Buffer instance. + * alloc(size[, fill[, encoding]]) + * @returns {FastBuffer} + */ +Buffer.alloc = function alloc(size, fill, encoding) { + validateNumber(size, 'size', 0, kMaxLength); + if (fill !== undefined && fill !== 0 && size > 0) { + const buf = createUnsafeBuffer(size); + return _fill(buf, fill, 0, buf.length, encoding); + } + return new FastBuffer(size); +}; + +/** + * Equivalent to Buffer(num), by default creates a non-zero-filled Buffer + * instance. If `--zero-fill-buffers` is set, will zero-fill the buffer. + * @returns {FastBuffer} + */ +Buffer.allocUnsafe = function allocUnsafe(size) { + validateNumber(size, 'size', 0, kMaxLength); + return allocate(size); +}; + +/** + * Equivalent to SlowBuffer(num), by default creates a non-zero-filled + * Buffer instance that is not allocated off the pre-initialized pool. + * If `--zero-fill-buffers` is set, will zero-fill the buffer. + * @param {number} size + * @returns {FastBuffer|undefined} + */ +Buffer.allocUnsafeSlow = function allocUnsafeSlow(size) { + validateNumber(size, 'size', 0, kMaxLength); + return createUnsafeBuffer(size); +}; + +// If --zero-fill-buffers command line argument is set, a zero-filled +// buffer is returned. +function SlowBuffer(size) { + validateNumber(size, 'size', 0, kMaxLength); + return createUnsafeBuffer(size); +} + +ObjectSetPrototypeOf(SlowBuffer.prototype, Uint8ArrayPrototype); +ObjectSetPrototypeOf(SlowBuffer, Uint8Array); + +function allocate(size) { + if (size <= 0) { + return new FastBuffer(); + } + if (size < (Buffer.poolSize >>> 1)) { + if (size > (poolSize - poolOffset)) + createPool(); + const b = new FastBuffer(allocPool, poolOffset, size); + poolOffset += size; + alignPool(); + return b; + } + return createUnsafeBuffer(size); +} + +function fromStringFast(string, ops) { + const maxLength = Buffer.poolSize >>> 1; + + let length = string.length; // Min length + + if (length >= maxLength) + return createFromString(string, ops); + + length *= 4; // Max length (4 bytes per character) + + if (length >= maxLength) + length = ops.byteLength(string); // Actual length + + if (length >= maxLength) + return createFromString(string, ops, length); + + if (length > (poolSize - poolOffset)) + createPool(); + + const actual = ops.write(allocBuffer, string, poolOffset, length); + const b = new FastBuffer(allocPool, poolOffset, actual); + + poolOffset += actual; + alignPool(); + return b; +} + +function createFromString(string, ops, length = ops.byteLength(string)) { + const buf = Buffer.allocUnsafeSlow(length); + const actual = ops.write(buf, string, 0, length); + return actual < length ? new FastBuffer(buf.buffer, 0, actual) : buf; +} + +function fromString(string, encoding) { + let ops; + if (!encoding || encoding === 'utf8' || typeof encoding !== 'string') { + ops = encodingOps.utf8; + } else { + ops = getEncodingOps(encoding); + if (ops === undefined) + throw new ERR_UNKNOWN_ENCODING(encoding); + } + + return string.length === 0 ? new FastBuffer() : fromStringFast(string, ops); +} + +function fromArrayBuffer(obj, byteOffset, length) { + // Convert byteOffset to integer + if (byteOffset === undefined) { + byteOffset = 0; + } else { + byteOffset = +byteOffset; + if (NumberIsNaN(byteOffset)) + byteOffset = 0; + } + + const maxLength = obj.byteLength - byteOffset; + + if (maxLength < 0) + throw new ERR_BUFFER_OUT_OF_BOUNDS('offset'); + + if (length !== undefined) { + // Convert length to non-negative integer. + length = +length; + if (length > 0) { + if (length > maxLength) + throw new ERR_BUFFER_OUT_OF_BOUNDS('length'); + } else { + length = 0; + } + } + + return new FastBuffer(obj, byteOffset, length); +} + +function fromArrayLike(obj) { + if (obj.length <= 0) + return new FastBuffer(); + if (obj.length < (Buffer.poolSize >>> 1)) { + if (obj.length > (poolSize - poolOffset)) + createPool(); + const b = new FastBuffer(allocPool, poolOffset, obj.length); + TypedArrayPrototypeSet(b, obj, 0); + poolOffset += obj.length; + alignPool(); + return b; + } + return new FastBuffer(obj); +} + +function fromObject(obj) { + if (obj.length !== undefined || isAnyArrayBuffer(obj.buffer)) { + if (typeof obj.length !== 'number') { + return new FastBuffer(); + } + return fromArrayLike(obj); + } + + if (obj.type === 'Buffer' && ArrayIsArray(obj.data)) { + return fromArrayLike(obj.data); + } +} + +// Static methods + +Buffer.isBuffer = function isBuffer(b) { + return b instanceof Buffer; +}; + +Buffer.compare = function compare(buf1, buf2) { + if (!isUint8Array(buf1)) { + throw new ERR_INVALID_ARG_TYPE('buf1', ['Buffer', 'Uint8Array'], buf1); + } + + if (!isUint8Array(buf2)) { + throw new ERR_INVALID_ARG_TYPE('buf2', ['Buffer', 'Uint8Array'], buf2); + } + + if (buf1 === buf2) { + return 0; + } + + return _compare(buf1, buf2); +}; + +Buffer.isEncoding = function isEncoding(encoding) { + return typeof encoding === 'string' && encoding.length !== 0 && + normalizeEncoding(encoding) !== undefined; +}; +Buffer[kIsEncodingSymbol] = Buffer.isEncoding; + +Buffer.concat = function concat(list, length) { + validateArray(list, 'list'); + + if (list.length === 0) + return new FastBuffer(); + + if (length === undefined) { + length = 0; + for (let i = 0; i < list.length; i++) { + if (list[i].length) { + length += list[i].length; + } + } + } else { + validateOffset(length, 'length'); + } + + const buffer = Buffer.allocUnsafe(length); + let pos = 0; + for (let i = 0; i < list.length; i++) { + const buf = list[i]; + if (!isUint8Array(buf)) { + // TODO(BridgeAR): This should not be of type ERR_INVALID_ARG_TYPE. + // Instead, find the proper error code for this. + throw new ERR_INVALID_ARG_TYPE( + `list[${i}]`, ['Buffer', 'Uint8Array'], list[i]); + } + pos += _copyActual(buf, buffer, pos, 0, buf.length); + } + + // Note: `length` is always equal to `buffer.length` at this point + if (pos < length) { + // Zero-fill the remaining bytes if the specified `length` was more than + // the actual total length, i.e. if we have some remaining allocated bytes + // there were not initialized. + TypedArrayPrototypeFill(buffer, 0, pos, length); + } + + return buffer; +}; + +function base64ByteLength(str, bytes) { + // Handle padding + if (StringPrototypeCharCodeAt(str, bytes - 1) === 0x3D) + bytes--; + if (bytes > 1 && StringPrototypeCharCodeAt(str, bytes - 1) === 0x3D) + bytes--; + + // Base64 ratio: 3/4 + return (bytes * 3) >>> 2; +} + +const encodingOps = { + utf8: { + encoding: 'utf8', + encodingVal: encodingsMap.utf8, + byteLength: byteLengthUtf8, + write: (buf, string, offset, len) => buf.utf8Write(string, offset, len), + slice: (buf, start, end) => buf.utf8Slice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfString(buf, val, byteOffset, encodingsMap.utf8, dir), + }, + ucs2: { + encoding: 'ucs2', + encodingVal: encodingsMap.utf16le, + byteLength: (string) => string.length * 2, + write: (buf, string, offset, len) => buf.ucs2Write(string, offset, len), + slice: (buf, start, end) => buf.ucs2Slice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfString(buf, val, byteOffset, encodingsMap.utf16le, dir), + }, + utf16le: { + encoding: 'utf16le', + encodingVal: encodingsMap.utf16le, + byteLength: (string) => string.length * 2, + write: (buf, string, offset, len) => buf.ucs2Write(string, offset, len), + slice: (buf, start, end) => buf.ucs2Slice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfString(buf, val, byteOffset, encodingsMap.utf16le, dir), + }, + latin1: { + encoding: 'latin1', + encodingVal: encodingsMap.latin1, + byteLength: (string) => string.length, + write: (buf, string, offset, len) => buf.latin1Write(string, offset, len), + slice: (buf, start, end) => buf.latin1Slice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfString(buf, val, byteOffset, encodingsMap.latin1, dir), + }, + ascii: { + encoding: 'ascii', + encodingVal: encodingsMap.ascii, + byteLength: (string) => string.length, + write: (buf, string, offset, len) => buf.asciiWrite(string, offset, len), + slice: (buf, start, end) => buf.asciiSlice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfBuffer(buf, + fromStringFast(val, encodingOps.ascii), + byteOffset, + encodingsMap.ascii, + dir), + }, + base64: { + encoding: 'base64', + encodingVal: encodingsMap.base64, + byteLength: (string) => base64ByteLength(string, string.length), + write: (buf, string, offset, len) => buf.base64Write(string, offset, len), + slice: (buf, start, end) => buf.base64Slice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfBuffer(buf, + fromStringFast(val, encodingOps.base64), + byteOffset, + encodingsMap.base64, + dir), + }, + base64url: { + encoding: 'base64url', + encodingVal: encodingsMap.base64url, + byteLength: (string) => base64ByteLength(string, string.length), + write: (buf, string, offset, len) => + buf.base64urlWrite(string, offset, len), + slice: (buf, start, end) => buf.base64urlSlice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfBuffer(buf, + fromStringFast(val, encodingOps.base64url), + byteOffset, + encodingsMap.base64url, + dir), + }, + hex: { + encoding: 'hex', + encodingVal: encodingsMap.hex, + byteLength: (string) => string.length >>> 1, + write: (buf, string, offset, len) => buf.hexWrite(string, offset, len), + slice: (buf, start, end) => buf.hexSlice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfBuffer(buf, + fromStringFast(val, encodingOps.hex), + byteOffset, + encodingsMap.hex, + dir), + }, +}; +function getEncodingOps(encoding) { + encoding += ''; + switch (encoding.length) { + case 4: + if (encoding === 'utf8') return encodingOps.utf8; + if (encoding === 'ucs2') return encodingOps.ucs2; + encoding = StringPrototypeToLowerCase(encoding); + if (encoding === 'utf8') return encodingOps.utf8; + if (encoding === 'ucs2') return encodingOps.ucs2; + break; + case 5: + if (encoding === 'utf-8') return encodingOps.utf8; + if (encoding === 'ascii') return encodingOps.ascii; + if (encoding === 'ucs-2') return encodingOps.ucs2; + encoding = StringPrototypeToLowerCase(encoding); + if (encoding === 'utf-8') return encodingOps.utf8; + if (encoding === 'ascii') return encodingOps.ascii; + if (encoding === 'ucs-2') return encodingOps.ucs2; + break; + case 7: + if (encoding === 'utf16le' || + StringPrototypeToLowerCase(encoding) === 'utf16le') + return encodingOps.utf16le; + break; + case 8: + if (encoding === 'utf-16le' || + StringPrototypeToLowerCase(encoding) === 'utf-16le') + return encodingOps.utf16le; + break; + case 6: + if (encoding === 'latin1' || encoding === 'binary') + return encodingOps.latin1; + if (encoding === 'base64') return encodingOps.base64; + encoding = StringPrototypeToLowerCase(encoding); + if (encoding === 'latin1' || encoding === 'binary') + return encodingOps.latin1; + if (encoding === 'base64') return encodingOps.base64; + break; + case 3: + if (encoding === 'hex' || StringPrototypeToLowerCase(encoding) === 'hex') + return encodingOps.hex; + break; + case 9: + if (encoding === 'base64url' || + StringPrototypeToLowerCase(encoding) === 'base64url') + return encodingOps.base64url; + break; + } +} + +function byteLength(string, encoding) { + if (typeof string !== 'string') { + if (isArrayBufferView(string) || isAnyArrayBuffer(string)) { + return string.byteLength; + } + + throw new ERR_INVALID_ARG_TYPE( + 'string', ['string', 'Buffer', 'ArrayBuffer'], string, + ); + } + + const len = string.length; + if (len === 0) + return 0; + + if (!encoding || encoding === 'utf8') { + return byteLengthUtf8(string); + } + + if (encoding === 'ascii') { + return len; + } + + const ops = getEncodingOps(encoding); + if (ops === undefined) { + // TODO (ronag): Makes more sense to throw here. + // throw new ERR_UNKNOWN_ENCODING(encoding); + return byteLengthUtf8(string); + } + + return ops.byteLength(string); +} + +Buffer.byteLength = byteLength; + +// For backwards compatibility. +ObjectDefineProperty(Buffer.prototype, 'parent', { + __proto__: null, + enumerable: true, + get() { + if (!(this instanceof Buffer)) + return undefined; + return this.buffer; + }, +}); +ObjectDefineProperty(Buffer.prototype, 'offset', { + __proto__: null, + enumerable: true, + get() { + if (!(this instanceof Buffer)) + return undefined; + return this.byteOffset; + }, +}); + +Buffer.prototype.copy = + function copy(target, targetStart, sourceStart, sourceEnd) { + return copyImpl(this, target, targetStart, sourceStart, sourceEnd); + }; + +// No need to verify that "buf.length <= MAX_UINT32" since it's a read-only +// property of a typed array. +// This behaves neither like String nor Uint8Array in that we set start/end +// to their upper/lower bounds if the value passed is out of range. +Buffer.prototype.toString = function toString(encoding, start, end) { + if (arguments.length === 0) { + return this.utf8Slice(0, this.length); + } + + const len = this.length; + + if (start <= 0) + start = 0; + else if (start >= len) + return ''; + else + start = MathTrunc(start) || 0; + + if (end === undefined || end > len) + end = len; + else + end = MathTrunc(end) || 0; + + if (end <= start) + return ''; + + if (encoding === undefined) + return this.utf8Slice(start, end); + + const ops = getEncodingOps(encoding); + if (ops === undefined) + throw new ERR_UNKNOWN_ENCODING(encoding); + + return ops.slice(this, start, end); +}; + +Buffer.prototype.equals = function equals(otherBuffer) { + if (!isUint8Array(otherBuffer)) { + throw new ERR_INVALID_ARG_TYPE( + 'otherBuffer', ['Buffer', 'Uint8Array'], otherBuffer); + } + + if (this === otherBuffer) + return true; + const len = TypedArrayPrototypeGetByteLength(this); + if (len !== TypedArrayPrototypeGetByteLength(otherBuffer)) + return false; + + return len === 0 || _compare(this, otherBuffer) === 0; +}; + +let INSPECT_MAX_BYTES = 50; +// Override how buffers are presented by util.inspect(). +Buffer.prototype[customInspectSymbol] = function inspect(recurseTimes, ctx) { + const max = INSPECT_MAX_BYTES; + const actualMax = MathMin(max, this.length); + const remaining = this.length - max; + let str = StringPrototypeTrim(RegExpPrototypeSymbolReplace( + /(.{2})/g, this.hexSlice(0, actualMax), '$1 ')); + if (remaining > 0) + str += ` ... ${remaining} more byte${remaining > 1 ? 's' : ''}`; + // Inspect special properties as well, if possible. + if (ctx) { + let extras = false; + const filter = ctx.showHidden ? ALL_PROPERTIES : ONLY_ENUMERABLE; + const obj = { __proto__: null }; + ArrayPrototypeForEach(getOwnNonIndexProperties(this, filter), + (key) => { + extras = true; + obj[key] = this[key]; + }); + if (extras) { + if (this.length !== 0) + str += ', '; + // '[Object: null prototype] {'.length === 26 + // This is guarded with a test. + str += StringPrototypeSlice(utilInspect(obj, { + ...ctx, + breakLength: Infinity, + compact: true, + }), 27, -2); + } + } + let constructorName = 'Buffer'; + try { + const { constructor } = this; + if (typeof constructor === 'function' && ObjectPrototypeHasOwnProperty(constructor, 'name')) { + constructorName = constructor.name; + } + } catch { /* Ignore error and use default name */ } + return `<${constructorName} ${str}>`; +}; +Buffer.prototype.inspect = Buffer.prototype[customInspectSymbol]; + +Buffer.prototype.compare = function compare(target, + targetStart, + targetEnd, + sourceStart, + sourceEnd) { + if (!isUint8Array(target)) { + throw new ERR_INVALID_ARG_TYPE('target', ['Buffer', 'Uint8Array'], target); + } + if (arguments.length === 1) + return _compare(this, target); + + if (targetStart === undefined) + targetStart = 0; + else + validateOffset(targetStart, 'targetStart'); + + if (targetEnd === undefined) + targetEnd = target.length; + else + validateOffset(targetEnd, 'targetEnd', 0, target.length); + + if (sourceStart === undefined) + sourceStart = 0; + else + validateOffset(sourceStart, 'sourceStart'); + + if (sourceEnd === undefined) + sourceEnd = this.length; + else + validateOffset(sourceEnd, 'sourceEnd', 0, this.length); + + if (sourceStart >= sourceEnd) + return (targetStart >= targetEnd ? 0 : -1); + if (targetStart >= targetEnd) + return 1; + + return compareOffset(this, target, targetStart, sourceStart, targetEnd, + sourceEnd); +}; + +// Finds either the first index of `val` in `buffer` at offset >= `byteOffset`, +// OR the last index of `val` in `buffer` at offset <= `byteOffset`. +// +// Arguments: +// - buffer - a Buffer to search +// - val - a string, Buffer, or number +// - byteOffset - an index into `buffer`; will be clamped to an int32 +// - encoding - an optional encoding, relevant if val is a string +// - dir - true for indexOf, false for lastIndexOf +function bidirectionalIndexOf(buffer, val, byteOffset, encoding, dir) { + validateBuffer(buffer); + + if (typeof byteOffset === 'string') { + encoding = byteOffset; + byteOffset = undefined; + } else if (byteOffset > 0x7fffffff) { + byteOffset = 0x7fffffff; + } else if (byteOffset < -0x80000000) { + byteOffset = -0x80000000; + } + // Coerce to Number. Values like null and [] become 0. + byteOffset = +byteOffset; + // If the offset is undefined, "foo", {}, coerces to NaN, search whole buffer. + if (NumberIsNaN(byteOffset)) { + byteOffset = dir ? 0 : (buffer.length || buffer.byteLength); + } + dir = !!dir; // Cast to bool. + + if (typeof val === 'number') + return indexOfNumber(buffer, val >>> 0, byteOffset, dir); + + let ops; + if (encoding === undefined) + ops = encodingOps.utf8; + else + ops = getEncodingOps(encoding); + + if (typeof val === 'string') { + if (ops === undefined) + throw new ERR_UNKNOWN_ENCODING(encoding); + return ops.indexOf(buffer, val, byteOffset, dir); + } + + if (isUint8Array(val)) { + const encodingVal = + (ops === undefined ? encodingsMap.utf8 : ops.encodingVal); + return indexOfBuffer(buffer, val, byteOffset, encodingVal, dir); + } + + throw new ERR_INVALID_ARG_TYPE( + 'value', ['number', 'string', 'Buffer', 'Uint8Array'], val, + ); +} + +Buffer.prototype.indexOf = function indexOf(val, byteOffset, encoding) { + return bidirectionalIndexOf(this, val, byteOffset, encoding, true); +}; + +Buffer.prototype.lastIndexOf = function lastIndexOf(val, byteOffset, encoding) { + return bidirectionalIndexOf(this, val, byteOffset, encoding, false); +}; + +Buffer.prototype.includes = function includes(val, byteOffset, encoding) { + return this.indexOf(val, byteOffset, encoding) !== -1; +}; + +// Usage: +// buffer.fill(number[, offset[, end]]) +// buffer.fill(buffer[, offset[, end]]) +// buffer.fill(string[, offset[, end]][, encoding]) +Buffer.prototype.fill = function fill(value, offset, end, encoding) { + return _fill(this, value, offset, end, encoding); +}; + +function _fill(buf, value, offset, end, encoding) { + if (typeof value === 'string') { + if (offset === undefined || typeof offset === 'string') { + encoding = offset; + offset = 0; + end = buf.length; + } else if (typeof end === 'string') { + encoding = end; + end = buf.length; + } + + const normalizedEncoding = normalizeEncoding(encoding); + if (normalizedEncoding === undefined) { + validateString(encoding, 'encoding'); + throw new ERR_UNKNOWN_ENCODING(encoding); + } + + if (value.length === 0) { + // If value === '' default to zero. + value = 0; + } else if (value.length === 1) { + // Fast path: If `value` fits into a single byte, use that numeric value. + if (normalizedEncoding === 'utf8') { + const code = StringPrototypeCharCodeAt(value, 0); + if (code < 128) { + value = code; + } + } else if (normalizedEncoding === 'latin1') { + value = StringPrototypeCharCodeAt(value, 0); + } + } + } else { + encoding = undefined; + } + + if (offset === undefined) { + offset = 0; + end = buf.length; + } else { + validateOffset(offset, 'offset'); + // Invalid ranges are not set to a default, so can range check early. + if (end === undefined) { + end = buf.length; + } else { + validateOffset(end, 'end', 0, buf.length); + } + if (offset >= end) + return buf; + } + + + if (typeof value === 'number') { + // OOB check + const byteLen = TypedArrayPrototypeGetByteLength(buf); + const fillLength = end - offset; + if (offset > end || fillLength + offset > byteLen) + throw new ERR_BUFFER_OUT_OF_BOUNDS(); + + TypedArrayPrototypeFill(buf, value, offset, end); + } else { + const res = bindingFill(buf, value, offset, end, encoding); + if (res < 0) { + if (res === -1) + throw new ERR_INVALID_ARG_VALUE('value', value); + throw new ERR_BUFFER_OUT_OF_BOUNDS(); + } + } + + return buf; +} + +Buffer.prototype.write = function write(string, offset, length, encoding) { + // Buffer#write(string); + if (offset === undefined) { + return this.utf8Write(string, 0, this.length); + } + // Buffer#write(string, encoding) + if (length === undefined && typeof offset === 'string') { + encoding = offset; + length = this.length; + offset = 0; + + // Buffer#write(string, offset[, length][, encoding]) + } else { + validateOffset(offset, 'offset', 0, this.length); + + const remaining = this.length - offset; + + if (length === undefined) { + length = remaining; + } else if (typeof length === 'string') { + encoding = length; + length = remaining; + } else { + validateOffset(length, 'length', 0, this.length); + if (length > remaining) + length = remaining; + } + } + + if (!encoding || encoding === 'utf8') + return this.utf8Write(string, offset, length); + if (encoding === 'ascii') + return this.asciiWrite(string, offset, length); + + const ops = getEncodingOps(encoding); + if (ops === undefined) + throw new ERR_UNKNOWN_ENCODING(encoding); + return ops.write(this, string, offset, length); +}; + +Buffer.prototype.toJSON = function toJSON() { + if (this.length > 0) { + const data = new Array(this.length); + for (let i = 0; i < this.length; ++i) + data[i] = this[i]; + return { type: 'Buffer', data }; + } + return { type: 'Buffer', data: [] }; +}; + +function adjustOffset(offset, length) { + // Use Math.trunc() to convert offset to an integer value that can be larger + // than an Int32. Hence, don't use offset | 0 or similar techniques. + offset = MathTrunc(offset); + if (offset === 0) { + return 0; + } + if (offset < 0) { + offset += length; + return offset > 0 ? offset : 0; + } + if (offset < length) { + return offset; + } + return NumberIsNaN(offset) ? 0 : length; +} + +Buffer.prototype.subarray = function subarray(start, end) { + const srcLength = this.length; + start = adjustOffset(start, srcLength); + end = end !== undefined ? adjustOffset(end, srcLength) : srcLength; + const newLength = end > start ? end - start : 0; + return new FastBuffer(this.buffer, this.byteOffset + start, newLength); +}; + +Buffer.prototype.slice = function slice(start, end) { + return this.subarray(start, end); +}; + +function swap(b, n, m) { + const i = b[n]; + b[n] = b[m]; + b[m] = i; +} + +Buffer.prototype.swap16 = function swap16() { + // For Buffer.length < 128, it's generally faster to + // do the swap in javascript. For larger buffers, + // dropping down to the native code is faster. + const len = this.length; + if (len % 2 !== 0) + throw new ERR_INVALID_BUFFER_SIZE('16-bits'); + if (len < 128) { + for (let i = 0; i < len; i += 2) + swap(this, i, i + 1); + return this; + } + return _swap16(this); +}; + +Buffer.prototype.swap32 = function swap32() { + // For Buffer.length < 192, it's generally faster to + // do the swap in javascript. For larger buffers, + // dropping down to the native code is faster. + const len = this.length; + if (len % 4 !== 0) + throw new ERR_INVALID_BUFFER_SIZE('32-bits'); + if (len < 192) { + for (let i = 0; i < len; i += 4) { + swap(this, i, i + 3); + swap(this, i + 1, i + 2); + } + return this; + } + return _swap32(this); +}; + +Buffer.prototype.swap64 = function swap64() { + // For Buffer.length < 192, it's generally faster to + // do the swap in javascript. For larger buffers, + // dropping down to the native code is faster. + const len = this.length; + if (len % 8 !== 0) + throw new ERR_INVALID_BUFFER_SIZE('64-bits'); + if (len < 192) { + for (let i = 0; i < len; i += 8) { + swap(this, i, i + 7); + swap(this, i + 1, i + 6); + swap(this, i + 2, i + 5); + swap(this, i + 3, i + 4); + } + return this; + } + return _swap64(this); +}; + +Buffer.prototype.toLocaleString = Buffer.prototype.toString; + +let transcode; +if (internalBinding('config').hasIntl) { + const { + icuErrName, + transcode: _transcode, + } = internalBinding('icu'); + + // Transcodes the Buffer from one encoding to another, returning a new + // Buffer instance. + transcode = function transcode(source, fromEncoding, toEncoding) { + if (!isUint8Array(source)) { + throw new ERR_INVALID_ARG_TYPE('source', + ['Buffer', 'Uint8Array'], source); + } + if (source.length === 0) return Buffer.alloc(0); + + fromEncoding = normalizeEncoding(fromEncoding) || fromEncoding; + toEncoding = normalizeEncoding(toEncoding) || toEncoding; + const result = _transcode(source, fromEncoding, toEncoding); + if (typeof result !== 'number') + return result; + + const code = icuErrName(result); + const err = genericNodeError( + `Unable to transcode Buffer [${code}]`, + { code: code, errno: result }, + ); + throw err; + }; +} + +function btoa(input) { + // The implementation here has not been performance optimized in any way and + // should not be. + // Refs: https://github.com/nodejs/node/pull/38433#issuecomment-828426932 + if (arguments.length === 0) { + throw new ERR_MISSING_ARGS('input'); + } + const result = _btoa(`${input}`); + if (result === -1) { + throw lazyDOMException('Invalid character', 'InvalidCharacterError'); + } + return result; +} + +function atob(input) { + if (arguments.length === 0) { + throw new ERR_MISSING_ARGS('input'); + } + + const result = _atob(`${input}`); + + switch (result) { + case -2: // Invalid character + throw lazyDOMException('Invalid character', 'InvalidCharacterError'); + case -1: // Single character remained + throw lazyDOMException( + 'The string to be decoded is not correctly encoded.', + 'InvalidCharacterError'); + case -3: // Possible overflow + // TODO(@anonrig): Throw correct error in here. + throw lazyDOMException('The input causes overflow.', 'InvalidCharacterError'); + default: + return result; + } +} + +function isUtf8(input) { + if (isTypedArray(input) || isAnyArrayBuffer(input)) { + return bindingIsUtf8(input); + } + + throw new ERR_INVALID_ARG_TYPE('input', ['ArrayBuffer', 'Buffer', 'TypedArray'], input); +} + +function isAscii(input) { + if (isTypedArray(input) || isAnyArrayBuffer(input)) { + return bindingIsAscii(input); + } + + throw new ERR_INVALID_ARG_TYPE('input', ['ArrayBuffer', 'Buffer', 'TypedArray'], input); +} + +module.exports = { + Buffer, + SlowBuffer: deprecate( + SlowBuffer, + 'SlowBuffer() is deprecated. Please use Buffer.allocUnsafeSlow()', + 'DEP0030'), + transcode, + isUtf8, + isAscii, + + // Legacy + kMaxLength, + kStringMaxLength, + btoa, + atob, +}; + +ObjectDefineProperties(module.exports, { + constants: { + __proto__: null, + configurable: false, + enumerable: true, + value: constants, + }, + INSPECT_MAX_BYTES: { + __proto__: null, + configurable: true, + enumerable: true, + get() { return INSPECT_MAX_BYTES; }, + set(val) { + validateNumber(val, 'INSPECT_MAX_BYTES', 0); + INSPECT_MAX_BYTES = val; + }, + }, +}); + +defineLazyProperties( + module.exports, + 'internal/blob', + ['Blob', 'resolveObjectURL'], +); +defineLazyProperties( + module.exports, + 'internal/file', + ['File'], +); \ No newline at end of file diff --git a/.codesandbox/node/child_process.js b/.codesandbox/node/child_process.js new file mode 100644 index 0000000000..f531ed5117 --- /dev/null +++ b/.codesandbox/node/child_process.js @@ -0,0 +1,1021 @@ +"use strict"; + +const { + ArrayIsArray, + ArrayPrototypeFilter, + ArrayPrototypeIncludes, + ArrayPrototypeJoin, + ArrayPrototypeLastIndexOf, + ArrayPrototypePush, + ArrayPrototypePushApply, + ArrayPrototypeSlice, + ArrayPrototypeSort, + ArrayPrototypeSplice, + ArrayPrototypeUnshift, + ObjectAssign, + ObjectDefineProperty, + ObjectPrototypeHasOwnProperty, + PromiseWithResolvers, + RegExpPrototypeExec, + SafeSet, + StringPrototypeIncludes, + StringPrototypeSlice, + StringPrototypeToUpperCase, + SymbolDispose, +} = primordials; + +const { + assignFunctionName, + convertToValidSignal, + getSystemErrorName, + kEmptyObject, + promisify, +} = require("internal/util"); +const { isArrayBufferView } = require("internal/util/types"); +let debug = require("internal/util/debuglog").debuglog( + "child_process", + (fn) => { + debug = fn; + } +); +const { Buffer } = require("buffer"); +const { Pipe, constants: PipeConstants } = internalBinding("pipe_wrap"); + +const { + AbortError, + codes: { + ERR_CHILD_PROCESS_IPC_REQUIRED, + ERR_CHILD_PROCESS_STDIO_MAXBUFFER, + ERR_INVALID_ARG_TYPE, + ERR_INVALID_ARG_VALUE, + }, + genericNodeError, +} = require("internal/errors"); +const { clearTimeout, setTimeout } = require("timers"); +const { getValidatedPath } = require("internal/fs/utils"); +const { + validateAbortSignal, + validateArray, + validateBoolean, + validateFunction, + validateInteger, + validateInt32, + validateNumber, + validateObject, + validateString, +} = require("internal/validators"); +const child_process = require("internal/child_process"); +const { getValidStdio, setupChannel, ChildProcess, stdioStringToArray } = + child_process; + +const MAX_BUFFER = 1024 * 1024; + +const permission = require("internal/process/permission"); + +const isZOS = process.platform === "os390"; +let addAbortListener; + +/** + * Spawns a new Node.js process + fork. + * @param {string|URL} modulePath + * @param {string[]} [args] + * @param {{ + * cwd?: string | URL; + * detached?: boolean; + * env?: Record; + * execPath?: string; + * execArgv?: string[]; + * gid?: number; + * serialization?: string; + * signal?: AbortSignal; + * killSignal?: string | number; + * silent?: boolean; + * stdio?: Array | string; + * uid?: number; + * windowsVerbatimArguments?: boolean; + * timeout?: number; + * }} [options] + * @returns {ChildProcess} + */ +function fork(modulePath, args = [], options) { + modulePath = getValidatedPath(modulePath, "modulePath"); + + // Get options and args arguments. + let execArgv; + + if (args == null) { + args = []; + } else if (typeof args === "object" && !ArrayIsArray(args)) { + options = args; + args = []; + } else { + validateArray(args, "args"); + } + + if (options != null) { + validateObject(options, "options"); + } + options = { __proto__: null, ...options, shell: false }; + options.execPath ||= process.execPath; + validateArgumentNullCheck(options.execPath, "options.execPath"); + + // Prepare arguments for fork: + execArgv = options.execArgv || process.execArgv; + validateArgumentsNullCheck(execArgv, "options.execArgv"); + + if (execArgv === process.execArgv && process._eval != null) { + const index = ArrayPrototypeLastIndexOf(execArgv, process._eval); + if (index > 0) { + // Remove the -e switch to avoid fork bombing ourselves. + execArgv = ArrayPrototypeSlice(execArgv); + ArrayPrototypeSplice(execArgv, index - 1, 2); + } + } + + args = [...execArgv, modulePath, ...args]; + + if (typeof options.stdio === "string") { + options.stdio = stdioStringToArray(options.stdio, "ipc"); + } else if (!ArrayIsArray(options.stdio)) { + // Use a separate fd=3 for the IPC channel. Inherit stdin, stdout, + // and stderr from the parent if silent isn't set. + options.stdio = stdioStringToArray( + options.silent ? "pipe" : "inherit", + "ipc" + ); + } else if (!ArrayPrototypeIncludes(options.stdio, "ipc")) { + throw new ERR_CHILD_PROCESS_IPC_REQUIRED("options.stdio"); + } + + return spawn(options.execPath, args, options); +} + +function _forkChild(fd, serializationMode) { + // set process.send() + const p = new Pipe(PipeConstants.IPC); + p.open(fd); + p.unref(); + const control = setupChannel(process, p, serializationMode); + process.on("newListener", function onNewListener(name) { + if (name === "message" || name === "disconnect") control.refCounted(); + }); + process.on("removeListener", function onRemoveListener(name) { + if (name === "message" || name === "disconnect") control.unrefCounted(); + }); +} + +function normalizeExecArgs(command, options, callback) { + validateString(command, "command"); + validateArgumentNullCheck(command, "command"); + + if (typeof options === "function") { + callback = options; + options = undefined; + } + + // Make a shallow copy so we don't clobber the user's options object. + options = { __proto__: null, ...options }; + options.shell = typeof options.shell === "string" ? options.shell : true; + + return { + file: command, + options: options, + callback: callback, + }; +} + +/** + * Spawns a shell executing the given command. + * @param {string} command + * @param {{ + * cmd?: string; + * env?: Record; + * encoding?: string; + * shell?: string; + * signal?: AbortSignal; + * timeout?: number; + * maxBuffer?: number; + * killSignal?: string | number; + * uid?: number; + * gid?: number; + * windowsHide?: boolean; + * }} [options] + * @param {( + * error?: Error, + * stdout?: string | Buffer, + * stderr?: string | Buffer + * ) => any} [callback] + * @returns {ChildProcess} + */ +function exec(command, options, callback) { + const opts = normalizeExecArgs(command, options, callback); + return module.exports.execFile(opts.file, opts.options, opts.callback); +} + +const customPromiseExecFunction = (orig) => { + return assignFunctionName(orig.name, function (...args) { + const { promise, resolve, reject } = PromiseWithResolvers(); + + promise.child = orig(...args, (err, stdout, stderr) => { + if (err !== null) { + err.stdout = stdout; + err.stderr = stderr; + reject(err); + } else { + resolve({ stdout, stderr }); + } + }); + + return promise; + }); +}; + +ObjectDefineProperty(exec, promisify.custom, { + __proto__: null, + enumerable: false, + value: customPromiseExecFunction(exec), +}); + +function normalizeExecFileArgs(file, args, options, callback) { + if (ArrayIsArray(args)) { + args = ArrayPrototypeSlice(args); + } else if (args != null && typeof args === "object") { + callback = options; + options = args; + args = null; + } else if (typeof args === "function") { + callback = args; + options = null; + args = null; + } + + args ??= []; + + if (typeof options === "function") { + callback = options; + } else if (options != null) { + validateObject(options, "options"); + } + + options ??= kEmptyObject; + + if (callback != null) { + validateFunction(callback, "callback"); + } + + // Validate argv0, if present. + if (options.argv0 != null) { + validateString(options.argv0, "options.argv0"); + validateArgumentNullCheck(options.argv0, "options.argv0"); + } + + return { file, args, options, callback }; +} + +/** + * Spawns the specified file as a shell. + * @param {string} file + * @param {string[]} [args] + * @param {{ + * cwd?: string | URL; + * env?: Record; + * encoding?: string; + * timeout?: number; + * maxBuffer?: number; + * killSignal?: string | number; + * uid?: number; + * gid?: number; + * windowsHide?: boolean; + * windowsVerbatimArguments?: boolean; + * shell?: boolean | string; + * signal?: AbortSignal; + * }} [options] + * @param {( + * error?: Error, + * stdout?: string | Buffer, + * stderr?: string | Buffer + * ) => any} [callback] + * @returns {ChildProcess} + */ +function execFile(file, args, options, callback) { + ({ file, args, options, callback } = normalizeExecFileArgs( + file, + args, + options, + callback + )); + + options = { + __proto__: null, + encoding: "utf8", + timeout: 0, + maxBuffer: MAX_BUFFER, + killSignal: "SIGTERM", + cwd: null, + env: null, + shell: false, + ...options, + }; + + // Validate the timeout, if present. + validateTimeout(options.timeout); + + // Validate maxBuffer, if present. + validateMaxBuffer(options.maxBuffer); + + options.killSignal = sanitizeKillSignal(options.killSignal); + + const child = spawn(file, args, { + cwd: options.cwd, + env: options.env, + gid: options.gid, + shell: options.shell, + signal: options.signal, + uid: options.uid, + windowsHide: !!options.windowsHide, + windowsVerbatimArguments: !!options.windowsVerbatimArguments, + }); + + let encoding; + const _stdout = []; + const _stderr = []; + if (options.encoding !== "buffer" && Buffer.isEncoding(options.encoding)) { + encoding = options.encoding; + } else { + encoding = null; + } + let stdoutLen = 0; + let stderrLen = 0; + let killed = false; + let exited = false; + let timeoutId; + + let ex = null; + + let cmd = file; + + function exithandler(code, signal) { + if (exited) return; + exited = true; + + if (timeoutId) { + clearTimeout(timeoutId); + timeoutId = null; + } + + if (!callback) return; + + // merge chunks + let stdout; + let stderr; + if (encoding || child.stdout?.readableEncoding) { + stdout = ArrayPrototypeJoin(_stdout, ""); + } else { + stdout = Buffer.concat(_stdout); + } + if (encoding || child.stderr?.readableEncoding) { + stderr = ArrayPrototypeJoin(_stderr, ""); + } else { + stderr = Buffer.concat(_stderr); + } + + if (!ex && code === 0 && signal === null) { + callback(null, stdout, stderr); + return; + } + + if (args?.length) cmd += ` ${ArrayPrototypeJoin(args, " ")}`; + + ex ||= genericNodeError(`Command failed: ${cmd}\n${stderr}`, { + code: code < 0 ? getSystemErrorName(code) : code, + killed: child.killed || killed, + signal: signal, + }); + + ex.cmd = cmd; + callback(ex, stdout, stderr); + } + + function errorhandler(e) { + ex = e; + + if (child.stdout) child.stdout.destroy(); + + if (child.stderr) child.stderr.destroy(); + + exithandler(); + } + + function kill() { + if (child.stdout) child.stdout.destroy(); + + if (child.stderr) child.stderr.destroy(); + + killed = true; + try { + child.kill(options.killSignal); + } catch (e) { + ex = e; + exithandler(); + } + } + + if (options.timeout > 0) { + timeoutId = setTimeout(function delayedKill() { + kill(); + timeoutId = null; + }, options.timeout); + } + + if (child.stdout) { + if (encoding) child.stdout.setEncoding(encoding); + + child.stdout.on("data", function onChildStdout(chunk) { + // Do not need to count the length + if (options.maxBuffer === Infinity) { + ArrayPrototypePush(_stdout, chunk); + return; + } + const encoding = child.stdout.readableEncoding; + const length = encoding + ? Buffer.byteLength(chunk, encoding) + : chunk.length; + const slice = encoding + ? StringPrototypeSlice + : (buf, ...args) => buf.slice(...args); + stdoutLen += length; + + if (stdoutLen > options.maxBuffer) { + const truncatedLen = options.maxBuffer - (stdoutLen - length); + ArrayPrototypePush(_stdout, slice(chunk, 0, truncatedLen)); + + ex = new ERR_CHILD_PROCESS_STDIO_MAXBUFFER("stdout"); + kill(); + } else { + ArrayPrototypePush(_stdout, chunk); + } + }); + } + + if (child.stderr) { + if (encoding) child.stderr.setEncoding(encoding); + + child.stderr.on("data", function onChildStderr(chunk) { + // Do not need to count the length + if (options.maxBuffer === Infinity) { + ArrayPrototypePush(_stderr, chunk); + return; + } + const encoding = child.stderr.readableEncoding; + const length = encoding + ? Buffer.byteLength(chunk, encoding) + : chunk.length; + stderrLen += length; + + if (stderrLen > options.maxBuffer) { + const truncatedLen = options.maxBuffer - (stderrLen - length); + ArrayPrototypePush(_stderr, chunk.slice(0, truncatedLen)); + + ex = new ERR_CHILD_PROCESS_STDIO_MAXBUFFER("stderr"); + kill(); + } else { + ArrayPrototypePush(_stderr, chunk); + } + }); + } + + child.addListener("close", exithandler); + child.addListener("error", errorhandler); + + return child; +} + +ObjectDefineProperty(execFile, promisify.custom, { + __proto__: null, + enumerable: false, + value: customPromiseExecFunction(execFile), +}); + +function copyProcessEnvToEnv(env, name, optionEnv) { + if ( + process.env[name] && + (!optionEnv || !ObjectPrototypeHasOwnProperty(optionEnv, name)) + ) { + env[name] = process.env[name]; + } +} + +let permissionModelFlagsToCopy; + +function getPermissionModelFlagsToCopy() { + if (permissionModelFlagsToCopy === undefined) { + permissionModelFlagsToCopy = [ + ...permission.availableFlags(), + "--permission", + ]; + } + return permissionModelFlagsToCopy; +} + +function copyPermissionModelFlagsToEnv(env, key, args) { + // Do not override if permission was already passed to file + if ( + args.includes("--permission") || + (env[key] && env[key].indexOf("--permission") !== -1) + ) { + return; + } + + const flagsToCopy = getPermissionModelFlagsToCopy(); + for (const arg of process.execArgv) { + for (const flag of flagsToCopy) { + if (arg.startsWith(flag)) { + env[key] = `${env[key] ? env[key] + " " + arg : arg}`; + } + } + } +} + +let emittedDEP0190Already = false; +function normalizeSpawnArguments(file, args, options) { + validateString(file, "file"); + validateArgumentNullCheck(file, "file"); + + if (file.length === 0) + throw new ERR_INVALID_ARG_VALUE("file", file, "cannot be empty"); + + if (ArrayIsArray(args)) { + args = ArrayPrototypeSlice(args); + } else if (args == null) { + args = []; + } else if (typeof args !== "object") { + throw new ERR_INVALID_ARG_TYPE("args", "object", args); + } else { + options = args; + args = []; + } + + validateArgumentsNullCheck(args, "args"); + + if (options === undefined) options = kEmptyObject; + else validateObject(options, "options"); + + options = { __proto__: null, ...options }; + let cwd = options.cwd; + + // Validate the cwd, if present. + if (cwd != null) { + cwd = getValidatedPath(cwd, "options.cwd"); + } + + // Validate detached, if present. + if (options.detached != null) { + validateBoolean(options.detached, "options.detached"); + } + + // Validate the uid, if present. + if (options.uid != null) { + validateInt32(options.uid, "options.uid"); + } + + // Validate the gid, if present. + if (options.gid != null) { + validateInt32(options.gid, "options.gid"); + } + + // Validate the shell, if present. + if ( + options.shell != null && + typeof options.shell !== "boolean" && + typeof options.shell !== "string" + ) { + throw new ERR_INVALID_ARG_TYPE( + "options.shell", + ["boolean", "string"], + options.shell + ); + } + + // Validate argv0, if present. + if (options.argv0 != null) { + validateString(options.argv0, "options.argv0"); + validateArgumentNullCheck(options.argv0, "options.argv0"); + } + + // Validate windowsHide, if present. + if (options.windowsHide != null) { + validateBoolean(options.windowsHide, "options.windowsHide"); + } + + // Validate windowsVerbatimArguments, if present. + let { windowsVerbatimArguments } = options; + if (windowsVerbatimArguments != null) { + validateBoolean( + windowsVerbatimArguments, + "options.windowsVerbatimArguments" + ); + } + + if (options.shell) { + validateArgumentNullCheck(options.shell, "options.shell"); + if (args.length > 0 && !emittedDEP0190Already) { + process.emitWarning( + "Passing args to a child process with shell option true can lead to security " + + "vulnerabilities, as the arguments are not escaped, only concatenated.", + "DeprecationWarning", + "DEP0190" + ); + emittedDEP0190Already = true; + } + + const command = + args.length > 0 ? `${file} ${ArrayPrototypeJoin(args, " ")}` : file; + // Set the shell, switches, and commands. + if (process.platform === "win32") { + if (typeof options.shell === "string") file = options.shell; + else file = process.env.comspec || "cmd.exe"; + // '/d /s /c' is used only for cmd.exe. + if (RegExpPrototypeExec(/^(?:.*\\)?cmd(?:\.exe)?$/i, file) !== null) { + args = ["/d", "/s", "/c", `"${command}"`]; + windowsVerbatimArguments = true; + } else { + args = ["-c", command]; + } + } else { + if (typeof options.shell === "string") file = options.shell; + else if (process.platform === "android") file = "/system/bin/sh"; + else file = "/bin/sh"; + args = ["-c", command]; + } + } + + if (typeof options.argv0 === "string") { + ArrayPrototypeUnshift(args, options.argv0); + } else { + ArrayPrototypeUnshift(args, file); + } + + // Shallow copy to guarantee changes won't impact process.env + const env = options.env || { ...process.env }; + const envPairs = []; + + // process.env.NODE_V8_COVERAGE always propagates, making it possible to + // collect coverage for programs that spawn with white-listed environment. + copyProcessEnvToEnv(env, "NODE_V8_COVERAGE", options.env); + + if (isZOS) { + // The following environment variables must always propagate if set. + copyProcessEnvToEnv(env, "_BPXK_AUTOCVT", options.env); + copyProcessEnvToEnv(env, "_CEE_RUNOPTS", options.env); + copyProcessEnvToEnv(env, "_TAG_REDIR_ERR", options.env); + copyProcessEnvToEnv(env, "_TAG_REDIR_IN", options.env); + copyProcessEnvToEnv(env, "_TAG_REDIR_OUT", options.env); + copyProcessEnvToEnv(env, "STEPLIB", options.env); + copyProcessEnvToEnv(env, "LIBPATH", options.env); + copyProcessEnvToEnv(env, "_EDC_SIG_DFLT", options.env); + copyProcessEnvToEnv(env, "_EDC_SUSV3", options.env); + } + + if (permission.isEnabled()) { + copyPermissionModelFlagsToEnv(env, "NODE_OPTIONS", args); + } + + let envKeys = []; + // Prototype values are intentionally included. + for (const key in env) { + ArrayPrototypePush(envKeys, key); + } + + if (process.platform === "win32") { + // On Windows env keys are case insensitive. Filter out duplicates, + // keeping only the first one (in lexicographic order) + const sawKey = new SafeSet(); + envKeys = ArrayPrototypeFilter(ArrayPrototypeSort(envKeys), (key) => { + const uppercaseKey = StringPrototypeToUpperCase(key); + if (sawKey.has(uppercaseKey)) { + return false; + } + sawKey.add(uppercaseKey); + return true; + }); + } + + for (const key of envKeys) { + const value = env[key]; + if (value !== undefined) { + validateArgumentNullCheck(key, `options.env['${key}']`); + validateArgumentNullCheck(value, `options.env['${key}']`); + ArrayPrototypePush(envPairs, `${key}=${value}`); + } + } + + return { + // Make a shallow copy so we don't clobber the user's options object. + __proto__: null, + ...options, + args, + cwd, + detached: !!options.detached, + envPairs, + file, + windowsHide: !!options.windowsHide, + windowsVerbatimArguments: !!windowsVerbatimArguments, + }; +} + +function abortChildProcess(child, killSignal, reason) { + if (!child) return; + try { + if (child.kill(killSignal)) { + child.emit("error", new AbortError(undefined, { cause: reason })); + } + } catch (err) { + child.emit("error", err); + } +} + +/** + * Spawns a new process using the given `file`. + * @param {string} file + * @param {string[]} [args] + * @param {{ + * cwd?: string | URL; + * env?: Record; + * argv0?: string; + * stdio?: Array | string; + * detached?: boolean; + * uid?: number; + * gid?: number; + * serialization?: string; + * shell?: boolean | string; + * windowsVerbatimArguments?: boolean; + * windowsHide?: boolean; + * signal?: AbortSignal; + * timeout?: number; + * killSignal?: string | number; + * }} [options] + * @returns {ChildProcess} + */ +function spawn(file, args, options) { + options = normalizeSpawnArguments(file, args, options); + validateTimeout(options.timeout); + validateAbortSignal(options.signal, "options.signal"); + const killSignal = sanitizeKillSignal(options.killSignal); + const child = new ChildProcess(); + + debug("spawn", options); + child.spawn(options); + + if (options.timeout > 0) { + let timeoutId = setTimeout(() => { + if (timeoutId) { + try { + child.kill(killSignal); + } catch (err) { + child.emit("error", err); + } + timeoutId = null; + } + }, options.timeout); + + child.once("exit", () => { + if (timeoutId) { + clearTimeout(timeoutId); + timeoutId = null; + } + }); + } + + if (options.signal) { + const signal = options.signal; + if (signal.aborted) { + process.nextTick(onAbortListener); + } else { + addAbortListener ??= + require("internal/events/abort_listener").addAbortListener; + const disposable = addAbortListener(signal, onAbortListener); + child.once("exit", disposable[SymbolDispose]); + } + + function onAbortListener() { + abortChildProcess(child, killSignal, options.signal.reason); + } + } + + return child; +} + +/** + * Spawns a new process synchronously using the given `file`. + * @param {string} file + * @param {string[]} [args] + * @param {{ + * cwd?: string | URL; + * input?: string | Buffer | TypedArray | DataView; + * argv0?: string; + * stdio?: string | Array; + * env?: Record; + * uid?: number; + * gid?: number; + * timeout?: number; + * killSignal?: string | number; + * maxBuffer?: number; + * encoding?: string; + * shell?: boolean | string; + * windowsVerbatimArguments?: boolean; + * windowsHide?: boolean; + * }} [options] + * @returns {{ + * pid: number; + * output: Array; + * stdout: Buffer | string; + * stderr: Buffer | string; + * status: number | null; + * signal: string | null; + * error: Error; + * }} + */ +function spawnSync(file, args, options) { + options = { + __proto__: null, + maxBuffer: MAX_BUFFER, + ...normalizeSpawnArguments(file, args, options), + }; + + debug("spawnSync", options); + + // Validate the timeout, if present. + validateTimeout(options.timeout); + + // Validate maxBuffer, if present. + validateMaxBuffer(options.maxBuffer); + + // Validate and translate the kill signal, if present. + options.killSignal = sanitizeKillSignal(options.killSignal); + + options.stdio = getValidStdio(options.stdio || "pipe", true).stdio; + + if (options.input) { + const stdin = (options.stdio[0] = { ...options.stdio[0] }); + stdin.input = options.input; + } + + // We may want to pass data in on any given fd, ensure it is a valid buffer + for (let i = 0; i < options.stdio.length; i++) { + const input = options.stdio[i]?.input; + if (input != null) { + const pipe = (options.stdio[i] = { ...options.stdio[i] }); + if (isArrayBufferView(input)) { + pipe.input = input; + } else if (typeof input === "string") { + pipe.input = Buffer.from(input, options.encoding); + } else { + throw new ERR_INVALID_ARG_TYPE( + `options.stdio[${i}]`, + ["Buffer", "TypedArray", "DataView", "string"], + input + ); + } + } + } + + return child_process.spawnSync(options); +} + +function checkExecSyncError(ret, args, cmd) { + let err; + if (ret.error) { + err = ret.error; + ObjectAssign(err, ret); + } else if (ret.status !== 0) { + let msg = "Command failed: "; + msg += cmd || ArrayPrototypeJoin(args, " "); + if (ret.stderr && ret.stderr.length > 0) + msg += `\n${ret.stderr.toString()}`; + err = genericNodeError(msg, ret); + } + return err; +} + +/** + * Spawns a file as a shell synchronously. + * @param {string} file + * @param {string[]} [args] + * @param {{ + * cwd?: string | URL; + * input?: string | Buffer | TypedArray | DataView; + * stdio?: string | Array; + * env?: Record; + * uid?: number; + * gid?: number; + * timeout?: number; + * killSignal?: string | number; + * maxBuffer?: number; + * encoding?: string; + * windowsHide?: boolean; + * shell?: boolean | string; + * }} [options] + * @returns {Buffer | string} + */ +function execFileSync(file, args, options) { + ({ file, args, options } = normalizeExecFileArgs(file, args, options)); + + const inheritStderr = !options.stdio; + const ret = spawnSync(file, args, options); + + if (inheritStderr && ret.stderr) process.stderr.write(ret.stderr); + + const errArgs = [options.argv0 || file]; + ArrayPrototypePushApply(errArgs, args); + const err = checkExecSyncError(ret, errArgs); + + if (err) throw err; + + return ret.stdout; +} + +/** + * Spawns a shell executing the given `command` synchronously. + * @param {string} command + * @param {{ + * cwd?: string | URL; + * input?: string | Buffer | TypedArray | DataView; + * stdio?: string | Array; + * env?: Record; + * shell?: string; + * uid?: number; + * gid?: number; + * timeout?: number; + * killSignal?: string | number; + * maxBuffer?: number; + * encoding?: string; + * windowsHide?: boolean; + * }} [options] + * @returns {Buffer | string} + */ +function execSync(command, options) { + const opts = normalizeExecArgs(command, options, null); + const inheritStderr = !opts.options.stdio; + + const ret = spawnSync(opts.file, opts.options); + + if (inheritStderr && ret.stderr) process.stderr.write(ret.stderr); + + const err = checkExecSyncError(ret, undefined, command); + + if (err) throw err; + + return ret.stdout; +} + +function validateArgumentNullCheck(arg, propName) { + if (typeof arg === "string" && StringPrototypeIncludes(arg, "\u0000")) { + throw new ERR_INVALID_ARG_VALUE( + propName, + arg, + "must be a string without null bytes" + ); + } +} + +function validateArgumentsNullCheck(args, propName) { + for (let i = 0; i < args.length; ++i) { + validateArgumentNullCheck(args[i], `${propName}[${i}]`); + } +} + +function validateTimeout(timeout) { + if (timeout != null) { + validateInteger(timeout, "timeout", 0); + } +} + +function validateMaxBuffer(maxBuffer) { + if (maxBuffer != null) { + validateNumber(maxBuffer, "options.maxBuffer", 0); + } +} + +function sanitizeKillSignal(killSignal) { + if (typeof killSignal === "string" || typeof killSignal === "number") { + return convertToValidSignal(killSignal); + } else if (killSignal != null) { + throw new ERR_INVALID_ARG_TYPE( + "options.killSignal", + ["string", "number"], + killSignal + ); + } +} + +module.exports = { + _forkChild, + ChildProcess, + exec, + execFile, + execFileSync, + execSync, + fork, + spawn, + spawnSync, +}; diff --git a/.codesandbox/node/cluster.js b/.codesandbox/node/cluster.js new file mode 100644 index 0000000000..6f3dc16892 --- /dev/null +++ b/.codesandbox/node/cluster.js @@ -0,0 +1,8 @@ +'use strict'; + +const { + ObjectPrototypeHasOwnProperty: ObjectHasOwn, +} = primordials; + +const childOrPrimary = ObjectHasOwn(process.env, 'NODE_UNIQUE_ID') ? 'child' : 'primary'; +module.exports = require(`internal/cluster/${childOrPrimary}`); \ No newline at end of file diff --git a/.codesandbox/node/console.js b/.codesandbox/node/console.js new file mode 100644 index 0000000000..d896d2e320 --- /dev/null +++ b/.codesandbox/node/console.js @@ -0,0 +1,3 @@ +"use strict"; + +module.exports = require("internal/console/global"); diff --git a/.codesandbox/node/crypto.js b/.codesandbox/node/crypto.js new file mode 100644 index 0000000000..06bfebc7e1 --- /dev/null +++ b/.codesandbox/node/crypto.js @@ -0,0 +1,1365 @@ +'use strict'; + +const { + Array, + ArrayBufferIsView, + ArrayIsArray, + ArrayPrototypeForEach, + MathFloor, + MathMin, + MathTrunc, + NumberIsInteger, + NumberIsNaN, + NumberMAX_SAFE_INTEGER, + NumberMIN_SAFE_INTEGER, + ObjectDefineProperties, + ObjectDefineProperty, + ObjectPrototypeHasOwnProperty, + ObjectSetPrototypeOf, + RegExpPrototypeSymbolReplace, + StringPrototypeCharCodeAt, + StringPrototypeSlice, + StringPrototypeToLowerCase, + StringPrototypeTrim, + SymbolSpecies, + SymbolToPrimitive, + TypedArrayPrototypeFill, + TypedArrayPrototypeGetBuffer, + TypedArrayPrototypeGetByteLength, + TypedArrayPrototypeGetByteOffset, + TypedArrayPrototypeGetLength, + TypedArrayPrototypeSet, + TypedArrayPrototypeSlice, + Uint8Array, + Uint8ArrayPrototype, +} = primordials; + +const { + byteLengthUtf8, + compare: _compare, + compareOffset, + copy: _copy, + fill: bindingFill, + isAscii: bindingIsAscii, + isUtf8: bindingIsUtf8, + indexOfBuffer, + indexOfNumber, + indexOfString, + swap16: _swap16, + swap32: _swap32, + swap64: _swap64, + kMaxLength, + kStringMaxLength, + atob: _atob, + btoa: _btoa, +} = internalBinding('buffer'); +const { + constants: { + ALL_PROPERTIES, + ONLY_ENUMERABLE, + }, + getOwnNonIndexProperties, + isInsideNodeModules, +} = internalBinding('util'); +const { + customInspectSymbol, + lazyDOMException, + normalizeEncoding, + kIsEncodingSymbol, + defineLazyProperties, + encodingsMap, + deprecate, +} = require('internal/util'); +const { + isAnyArrayBuffer, + isArrayBufferView, + isUint8Array, + isTypedArray, +} = require('internal/util/types'); +const { + inspect: utilInspect, +} = require('internal/util/inspect'); + +const { + codes: { + ERR_BUFFER_OUT_OF_BOUNDS, + ERR_INVALID_ARG_TYPE, + ERR_INVALID_ARG_VALUE, + ERR_INVALID_BUFFER_SIZE, + ERR_MISSING_ARGS, + ERR_OUT_OF_RANGE, + ERR_UNKNOWN_ENCODING, + }, + genericNodeError, +} = require('internal/errors'); +const { + validateArray, + validateBuffer, + validateInteger, + validateNumber, + validateString, +} = require('internal/validators'); +// Provide validateInteger() but with kMaxLength as the default maximum value. +const validateOffset = (value, name, min = 0, max = kMaxLength) => + validateInteger(value, name, min, max); + +const { + FastBuffer, + markAsUntransferable, + addBufferPrototypeMethods, + createUnsafeBuffer, +} = require('internal/buffer'); + +FastBuffer.prototype.constructor = Buffer; +Buffer.prototype = FastBuffer.prototype; +addBufferPrototypeMethods(Buffer.prototype); + +const constants = ObjectDefineProperties({}, { + MAX_LENGTH: { + __proto__: null, + value: kMaxLength, + writable: false, + enumerable: true, + }, + MAX_STRING_LENGTH: { + __proto__: null, + value: kStringMaxLength, + writable: false, + enumerable: true, + }, +}); + +Buffer.poolSize = 8 * 1024; +let poolSize, poolOffset, allocPool, allocBuffer; + +function createPool() { + poolSize = Buffer.poolSize; + allocBuffer = createUnsafeBuffer(poolSize); + allocPool = allocBuffer.buffer; + markAsUntransferable(allocPool); + poolOffset = 0; +} +createPool(); + +function alignPool() { + // Ensure aligned slices + if (poolOffset & 0x7) { + poolOffset |= 0x7; + poolOffset++; + } +} + +let bufferWarningAlreadyEmitted = false; +let nodeModulesCheckCounter = 0; +const bufferWarning = 'Buffer() is deprecated due to security and usability ' + + 'issues. Please use the Buffer.alloc(), ' + + 'Buffer.allocUnsafe(), or Buffer.from() methods instead.'; + +function showFlaggedDeprecation() { + if (bufferWarningAlreadyEmitted || + ++nodeModulesCheckCounter > 10000 || + (!require('internal/options').getOptionValue('--pending-deprecation') && + isInsideNodeModules(100, true))) { + // We don't emit a warning, because we either: + // - Already did so, or + // - Already checked too many times whether a call is coming + // from node_modules and want to stop slowing down things, or + // - We aren't running with `--pending-deprecation` enabled, + // and the code is inside `node_modules`. + // - We found node_modules in up to the topmost 100 frames, or + // there are more than 100 frames and we don't want to search anymore. + return; + } + + process.emitWarning(bufferWarning, 'DeprecationWarning', 'DEP0005'); + bufferWarningAlreadyEmitted = true; +} + +function toInteger(n, defaultVal) { + n = +n; + if (!NumberIsNaN(n) && + n >= NumberMIN_SAFE_INTEGER && + n <= NumberMAX_SAFE_INTEGER) { + return ((n % 1) === 0 ? n : MathFloor(n)); + } + return defaultVal; +} + +function copyImpl(source, target, targetStart, sourceStart, sourceEnd) { + if (!ArrayBufferIsView(source)) + throw new ERR_INVALID_ARG_TYPE('source', ['Buffer', 'Uint8Array'], source); + if (!ArrayBufferIsView(target)) + throw new ERR_INVALID_ARG_TYPE('target', ['Buffer', 'Uint8Array'], target); + + if (targetStart === undefined) { + targetStart = 0; + } else { + targetStart = NumberIsInteger(targetStart) ? targetStart : toInteger(targetStart, 0); + if (targetStart < 0) + throw new ERR_OUT_OF_RANGE('targetStart', '>= 0', targetStart); + } + + if (sourceStart === undefined) { + sourceStart = 0; + } else { + sourceStart = NumberIsInteger(sourceStart) ? sourceStart : toInteger(sourceStart, 0); + if (sourceStart < 0 || sourceStart > source.byteLength) + throw new ERR_OUT_OF_RANGE('sourceStart', `>= 0 && <= ${source.byteLength}`, sourceStart); + } + + if (sourceEnd === undefined) { + sourceEnd = source.byteLength; + } else { + sourceEnd = NumberIsInteger(sourceEnd) ? sourceEnd : toInteger(sourceEnd, 0); + if (sourceEnd < 0) + throw new ERR_OUT_OF_RANGE('sourceEnd', '>= 0', sourceEnd); + } + + if (targetStart >= target.byteLength || sourceStart >= sourceEnd) + return 0; + + return _copyActual(source, target, targetStart, sourceStart, sourceEnd); +} + +function _copyActual(source, target, targetStart, sourceStart, sourceEnd) { + if (sourceEnd - sourceStart > target.byteLength - targetStart) + sourceEnd = sourceStart + target.byteLength - targetStart; + + let nb = sourceEnd - sourceStart; + const sourceLen = source.byteLength - sourceStart; + if (nb > sourceLen) + nb = sourceLen; + + if (nb <= 0) + return 0; + + _copy(source, target, targetStart, sourceStart, nb); + + return nb; +} + +/** + * The Buffer() constructor is deprecated in documentation and should not be + * used moving forward. Rather, developers should use one of the three new + * factory APIs: Buffer.from(), Buffer.allocUnsafe() or Buffer.alloc() based on + * their specific needs. There is no runtime deprecation because of the extent + * to which the Buffer constructor is used in the ecosystem currently -- a + * runtime deprecation would introduce too much breakage at this time. It's not + * likely that the Buffer constructors would ever actually be removed. + * Deprecation Code: DEP0005 + * @returns {Buffer} + */ +function Buffer(arg, encodingOrOffset, length) { + showFlaggedDeprecation(); + // Common case. + if (typeof arg === 'number') { + if (typeof encodingOrOffset === 'string') { + throw new ERR_INVALID_ARG_TYPE('string', 'string', arg); + } + return Buffer.alloc(arg); + } + return Buffer.from(arg, encodingOrOffset, length); +} + +ObjectDefineProperty(Buffer, SymbolSpecies, { + __proto__: null, + enumerable: false, + configurable: true, + get() { return FastBuffer; }, +}); + +/** + * Functionally equivalent to Buffer(arg, encoding) but throws a TypeError + * if value is a number. + * Buffer.from(str[, encoding]) + * Buffer.from(array) + * Buffer.from(buffer) + * Buffer.from(arrayBuffer[, byteOffset[, length]]) + * @param {any} value + * @param {BufferEncoding|number} encodingOrOffset + * @param {number} [length] + * @returns {Buffer} + */ +Buffer.from = function from(value, encodingOrOffset, length) { + if (typeof value === 'string') + return fromString(value, encodingOrOffset); + + if (typeof value === 'object' && value !== null) { + if (isAnyArrayBuffer(value)) + return fromArrayBuffer(value, encodingOrOffset, length); + + const valueOf = value.valueOf && value.valueOf(); + if (valueOf != null && + valueOf !== value && + (typeof valueOf === 'string' || typeof valueOf === 'object')) { + return from(valueOf, encodingOrOffset, length); + } + + const b = fromObject(value); + if (b) + return b; + + if (typeof value[SymbolToPrimitive] === 'function') { + const primitive = value[SymbolToPrimitive]('string'); + if (typeof primitive === 'string') { + return fromString(primitive, encodingOrOffset); + } + } + } + + throw new ERR_INVALID_ARG_TYPE( + 'first argument', + ['string', 'Buffer', 'ArrayBuffer', 'Array', 'Array-like Object'], + value, + ); +}; + +/** + * Creates the Buffer as a copy of the underlying ArrayBuffer of the view + * rather than the contents of the view. + * @param {TypedArray} view + * @param {number} [offset] + * @param {number} [length] + * @returns {Buffer} + */ +Buffer.copyBytesFrom = function copyBytesFrom(view, offset, length) { + if (!isTypedArray(view)) { + throw new ERR_INVALID_ARG_TYPE('view', [ 'TypedArray' ], view); + } + + const viewLength = TypedArrayPrototypeGetLength(view); + if (viewLength === 0) { + return Buffer.alloc(0); + } + + if (offset !== undefined || length !== undefined) { + if (offset !== undefined) { + validateInteger(offset, 'offset', 0); + if (offset >= viewLength) return Buffer.alloc(0); + } else { + offset = 0; + } + let end; + if (length !== undefined) { + validateInteger(length, 'length', 0); + end = offset + length; + } else { + end = viewLength; + } + + view = TypedArrayPrototypeSlice(view, offset, end); + } + + return fromArrayLike(new Uint8Array( + TypedArrayPrototypeGetBuffer(view), + TypedArrayPrototypeGetByteOffset(view), + TypedArrayPrototypeGetByteLength(view))); +}; + +// Identical to the built-in %TypedArray%.of(), but avoids using the deprecated +// Buffer() constructor. Must use arrow function syntax to avoid automatically +// adding a `prototype` property and making the function a constructor. +// +// Refs: https://tc39.github.io/ecma262/#sec-%typedarray%.of +// Refs: https://esdiscuss.org/topic/isconstructor#content-11 +const of = (...items) => { + const newObj = createUnsafeBuffer(items.length); + for (let k = 0; k < items.length; k++) + newObj[k] = items[k]; + return newObj; +}; +Buffer.of = of; + +ObjectSetPrototypeOf(Buffer, Uint8Array); + +/** + * Creates a new filled Buffer instance. + * alloc(size[, fill[, encoding]]) + * @returns {FastBuffer} + */ +Buffer.alloc = function alloc(size, fill, encoding) { + validateNumber(size, 'size', 0, kMaxLength); + if (fill !== undefined && fill !== 0 && size > 0) { + const buf = createUnsafeBuffer(size); + return _fill(buf, fill, 0, buf.length, encoding); + } + return new FastBuffer(size); +}; + +/** + * Equivalent to Buffer(num), by default creates a non-zero-filled Buffer + * instance. If `--zero-fill-buffers` is set, will zero-fill the buffer. + * @returns {FastBuffer} + */ +Buffer.allocUnsafe = function allocUnsafe(size) { + validateNumber(size, 'size', 0, kMaxLength); + return allocate(size); +}; + +/** + * Equivalent to SlowBuffer(num), by default creates a non-zero-filled + * Buffer instance that is not allocated off the pre-initialized pool. + * If `--zero-fill-buffers` is set, will zero-fill the buffer. + * @param {number} size + * @returns {FastBuffer|undefined} + */ +Buffer.allocUnsafeSlow = function allocUnsafeSlow(size) { + validateNumber(size, 'size', 0, kMaxLength); + return createUnsafeBuffer(size); +}; + +// If --zero-fill-buffers command line argument is set, a zero-filled +// buffer is returned. +function SlowBuffer(size) { + validateNumber(size, 'size', 0, kMaxLength); + return createUnsafeBuffer(size); +} + +ObjectSetPrototypeOf(SlowBuffer.prototype, Uint8ArrayPrototype); +ObjectSetPrototypeOf(SlowBuffer, Uint8Array); + +function allocate(size) { + if (size <= 0) { + return new FastBuffer(); + } + if (size < (Buffer.poolSize >>> 1)) { + if (size > (poolSize - poolOffset)) + createPool(); + const b = new FastBuffer(allocPool, poolOffset, size); + poolOffset += size; + alignPool(); + return b; + } + return createUnsafeBuffer(size); +} + +function fromStringFast(string, ops) { + const maxLength = Buffer.poolSize >>> 1; + + let length = string.length; // Min length + + if (length >= maxLength) + return createFromString(string, ops); + + length *= 4; // Max length (4 bytes per character) + + if (length >= maxLength) + length = ops.byteLength(string); // Actual length + + if (length >= maxLength) + return createFromString(string, ops, length); + + if (length > (poolSize - poolOffset)) + createPool(); + + const actual = ops.write(allocBuffer, string, poolOffset, length); + const b = new FastBuffer(allocPool, poolOffset, actual); + + poolOffset += actual; + alignPool(); + return b; +} + +function createFromString(string, ops, length = ops.byteLength(string)) { + const buf = Buffer.allocUnsafeSlow(length); + const actual = ops.write(buf, string, 0, length); + return actual < length ? new FastBuffer(buf.buffer, 0, actual) : buf; +} + +function fromString(string, encoding) { + let ops; + if (!encoding || encoding === 'utf8' || typeof encoding !== 'string') { + ops = encodingOps.utf8; + } else { + ops = getEncodingOps(encoding); + if (ops === undefined) + throw new ERR_UNKNOWN_ENCODING(encoding); + } + + return string.length === 0 ? new FastBuffer() : fromStringFast(string, ops); +} + +function fromArrayBuffer(obj, byteOffset, length) { + // Convert byteOffset to integer + if (byteOffset === undefined) { + byteOffset = 0; + } else { + byteOffset = +byteOffset; + if (NumberIsNaN(byteOffset)) + byteOffset = 0; + } + + const maxLength = obj.byteLength - byteOffset; + + if (maxLength < 0) + throw new ERR_BUFFER_OUT_OF_BOUNDS('offset'); + + if (length !== undefined) { + // Convert length to non-negative integer. + length = +length; + if (length > 0) { + if (length > maxLength) + throw new ERR_BUFFER_OUT_OF_BOUNDS('length'); + } else { + length = 0; + } + } + + return new FastBuffer(obj, byteOffset, length); +} + +function fromArrayLike(obj) { + if (obj.length <= 0) + return new FastBuffer(); + if (obj.length < (Buffer.poolSize >>> 1)) { + if (obj.length > (poolSize - poolOffset)) + createPool(); + const b = new FastBuffer(allocPool, poolOffset, obj.length); + TypedArrayPrototypeSet(b, obj, 0); + poolOffset += obj.length; + alignPool(); + return b; + } + return new FastBuffer(obj); +} + +function fromObject(obj) { + if (obj.length !== undefined || isAnyArrayBuffer(obj.buffer)) { + if (typeof obj.length !== 'number') { + return new FastBuffer(); + } + return fromArrayLike(obj); + } + + if (obj.type === 'Buffer' && ArrayIsArray(obj.data)) { + return fromArrayLike(obj.data); + } +} + +// Static methods + +Buffer.isBuffer = function isBuffer(b) { + return b instanceof Buffer; +}; + +Buffer.compare = function compare(buf1, buf2) { + if (!isUint8Array(buf1)) { + throw new ERR_INVALID_ARG_TYPE('buf1', ['Buffer', 'Uint8Array'], buf1); + } + + if (!isUint8Array(buf2)) { + throw new ERR_INVALID_ARG_TYPE('buf2', ['Buffer', 'Uint8Array'], buf2); + } + + if (buf1 === buf2) { + return 0; + } + + return _compare(buf1, buf2); +}; + +Buffer.isEncoding = function isEncoding(encoding) { + return typeof encoding === 'string' && encoding.length !== 0 && + normalizeEncoding(encoding) !== undefined; +}; +Buffer[kIsEncodingSymbol] = Buffer.isEncoding; + +Buffer.concat = function concat(list, length) { + validateArray(list, 'list'); + + if (list.length === 0) + return new FastBuffer(); + + if (length === undefined) { + length = 0; + for (let i = 0; i < list.length; i++) { + if (list[i].length) { + length += list[i].length; + } + } + } else { + validateOffset(length, 'length'); + } + + const buffer = Buffer.allocUnsafe(length); + let pos = 0; + for (let i = 0; i < list.length; i++) { + const buf = list[i]; + if (!isUint8Array(buf)) { + // TODO(BridgeAR): This should not be of type ERR_INVALID_ARG_TYPE. + // Instead, find the proper error code for this. + throw new ERR_INVALID_ARG_TYPE( + `list[${i}]`, ['Buffer', 'Uint8Array'], list[i]); + } + pos += _copyActual(buf, buffer, pos, 0, buf.length); + } + + // Note: `length` is always equal to `buffer.length` at this point + if (pos < length) { + // Zero-fill the remaining bytes if the specified `length` was more than + // the actual total length, i.e. if we have some remaining allocated bytes + // there were not initialized. + TypedArrayPrototypeFill(buffer, 0, pos, length); + } + + return buffer; +}; + +function base64ByteLength(str, bytes) { + // Handle padding + if (StringPrototypeCharCodeAt(str, bytes - 1) === 0x3D) + bytes--; + if (bytes > 1 && StringPrototypeCharCodeAt(str, bytes - 1) === 0x3D) + bytes--; + + // Base64 ratio: 3/4 + return (bytes * 3) >>> 2; +} + +const encodingOps = { + utf8: { + encoding: 'utf8', + encodingVal: encodingsMap.utf8, + byteLength: byteLengthUtf8, + write: (buf, string, offset, len) => buf.utf8Write(string, offset, len), + slice: (buf, start, end) => buf.utf8Slice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfString(buf, val, byteOffset, encodingsMap.utf8, dir), + }, + ucs2: { + encoding: 'ucs2', + encodingVal: encodingsMap.utf16le, + byteLength: (string) => string.length * 2, + write: (buf, string, offset, len) => buf.ucs2Write(string, offset, len), + slice: (buf, start, end) => buf.ucs2Slice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfString(buf, val, byteOffset, encodingsMap.utf16le, dir), + }, + utf16le: { + encoding: 'utf16le', + encodingVal: encodingsMap.utf16le, + byteLength: (string) => string.length * 2, + write: (buf, string, offset, len) => buf.ucs2Write(string, offset, len), + slice: (buf, start, end) => buf.ucs2Slice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfString(buf, val, byteOffset, encodingsMap.utf16le, dir), + }, + latin1: { + encoding: 'latin1', + encodingVal: encodingsMap.latin1, + byteLength: (string) => string.length, + write: (buf, string, offset, len) => buf.latin1Write(string, offset, len), + slice: (buf, start, end) => buf.latin1Slice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfString(buf, val, byteOffset, encodingsMap.latin1, dir), + }, + ascii: { + encoding: 'ascii', + encodingVal: encodingsMap.ascii, + byteLength: (string) => string.length, + write: (buf, string, offset, len) => buf.asciiWrite(string, offset, len), + slice: (buf, start, end) => buf.asciiSlice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfBuffer(buf, + fromStringFast(val, encodingOps.ascii), + byteOffset, + encodingsMap.ascii, + dir), + }, + base64: { + encoding: 'base64', + encodingVal: encodingsMap.base64, + byteLength: (string) => base64ByteLength(string, string.length), + write: (buf, string, offset, len) => buf.base64Write(string, offset, len), + slice: (buf, start, end) => buf.base64Slice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfBuffer(buf, + fromStringFast(val, encodingOps.base64), + byteOffset, + encodingsMap.base64, + dir), + }, + base64url: { + encoding: 'base64url', + encodingVal: encodingsMap.base64url, + byteLength: (string) => base64ByteLength(string, string.length), + write: (buf, string, offset, len) => + buf.base64urlWrite(string, offset, len), + slice: (buf, start, end) => buf.base64urlSlice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfBuffer(buf, + fromStringFast(val, encodingOps.base64url), + byteOffset, + encodingsMap.base64url, + dir), + }, + hex: { + encoding: 'hex', + encodingVal: encodingsMap.hex, + byteLength: (string) => string.length >>> 1, + write: (buf, string, offset, len) => buf.hexWrite(string, offset, len), + slice: (buf, start, end) => buf.hexSlice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfBuffer(buf, + fromStringFast(val, encodingOps.hex), + byteOffset, + encodingsMap.hex, + dir), + }, +}; +function getEncodingOps(encoding) { + encoding += ''; + switch (encoding.length) { + case 4: + if (encoding === 'utf8') return encodingOps.utf8; + if (encoding === 'ucs2') return encodingOps.ucs2; + encoding = StringPrototypeToLowerCase(encoding); + if (encoding === 'utf8') return encodingOps.utf8; + if (encoding === 'ucs2') return encodingOps.ucs2; + break; + case 5: + if (encoding === 'utf-8') return encodingOps.utf8; + if (encoding === 'ascii') return encodingOps.ascii; + if (encoding === 'ucs-2') return encodingOps.ucs2; + encoding = StringPrototypeToLowerCase(encoding); + if (encoding === 'utf-8') return encodingOps.utf8; + if (encoding === 'ascii') return encodingOps.ascii; + if (encoding === 'ucs-2') return encodingOps.ucs2; + break; + case 7: + if (encoding === 'utf16le' || + StringPrototypeToLowerCase(encoding) === 'utf16le') + return encodingOps.utf16le; + break; + case 8: + if (encoding === 'utf-16le' || + StringPrototypeToLowerCase(encoding) === 'utf-16le') + return encodingOps.utf16le; + break; + case 6: + if (encoding === 'latin1' || encoding === 'binary') + return encodingOps.latin1; + if (encoding === 'base64') return encodingOps.base64; + encoding = StringPrototypeToLowerCase(encoding); + if (encoding === 'latin1' || encoding === 'binary') + return encodingOps.latin1; + if (encoding === 'base64') return encodingOps.base64; + break; + case 3: + if (encoding === 'hex' || StringPrototypeToLowerCase(encoding) === 'hex') + return encodingOps.hex; + break; + case 9: + if (encoding === 'base64url' || + StringPrototypeToLowerCase(encoding) === 'base64url') + return encodingOps.base64url; + break; + } +} + +function byteLength(string, encoding) { + if (typeof string !== 'string') { + if (isArrayBufferView(string) || isAnyArrayBuffer(string)) { + return string.byteLength; + } + + throw new ERR_INVALID_ARG_TYPE( + 'string', ['string', 'Buffer', 'ArrayBuffer'], string, + ); + } + + const len = string.length; + if (len === 0) + return 0; + + if (!encoding || encoding === 'utf8') { + return byteLengthUtf8(string); + } + + if (encoding === 'ascii') { + return len; + } + + const ops = getEncodingOps(encoding); + if (ops === undefined) { + // TODO (ronag): Makes more sense to throw here. + // throw new ERR_UNKNOWN_ENCODING(encoding); + return byteLengthUtf8(string); + } + + return ops.byteLength(string); +} + +Buffer.byteLength = byteLength; + +// For backwards compatibility. +ObjectDefineProperty(Buffer.prototype, 'parent', { + __proto__: null, + enumerable: true, + get() { + if (!(this instanceof Buffer)) + return undefined; + return this.buffer; + }, +}); +ObjectDefineProperty(Buffer.prototype, 'offset', { + __proto__: null, + enumerable: true, + get() { + if (!(this instanceof Buffer)) + return undefined; + return this.byteOffset; + }, +}); + +Buffer.prototype.copy = + function copy(target, targetStart, sourceStart, sourceEnd) { + return copyImpl(this, target, targetStart, sourceStart, sourceEnd); + }; + +// No need to verify that "buf.length <= MAX_UINT32" since it's a read-only +// property of a typed array. +// This behaves neither like String nor Uint8Array in that we set start/end +// to their upper/lower bounds if the value passed is out of range. +Buffer.prototype.toString = function toString(encoding, start, end) { + if (arguments.length === 0) { + return this.utf8Slice(0, this.length); + } + + const len = this.length; + + if (start <= 0) + start = 0; + else if (start >= len) + return ''; + else + start = MathTrunc(start) || 0; + + if (end === undefined || end > len) + end = len; + else + end = MathTrunc(end) || 0; + + if (end <= start) + return ''; + + if (encoding === undefined) + return this.utf8Slice(start, end); + + const ops = getEncodingOps(encoding); + if (ops === undefined) + throw new ERR_UNKNOWN_ENCODING(encoding); + + return ops.slice(this, start, end); +}; + +Buffer.prototype.equals = function equals(otherBuffer) { + if (!isUint8Array(otherBuffer)) { + throw new ERR_INVALID_ARG_TYPE( + 'otherBuffer', ['Buffer', 'Uint8Array'], otherBuffer); + } + + if (this === otherBuffer) + return true; + const len = TypedArrayPrototypeGetByteLength(this); + if (len !== TypedArrayPrototypeGetByteLength(otherBuffer)) + return false; + + return len === 0 || _compare(this, otherBuffer) === 0; +}; + +let INSPECT_MAX_BYTES = 50; +// Override how buffers are presented by util.inspect(). +Buffer.prototype[customInspectSymbol] = function inspect(recurseTimes, ctx) { + const max = INSPECT_MAX_BYTES; + const actualMax = MathMin(max, this.length); + const remaining = this.length - max; + let str = StringPrototypeTrim(RegExpPrototypeSymbolReplace( + /(.{2})/g, this.hexSlice(0, actualMax), '$1 ')); + if (remaining > 0) + str += ` ... ${remaining} more byte${remaining > 1 ? 's' : ''}`; + // Inspect special properties as well, if possible. + if (ctx) { + let extras = false; + const filter = ctx.showHidden ? ALL_PROPERTIES : ONLY_ENUMERABLE; + const obj = { __proto__: null }; + ArrayPrototypeForEach(getOwnNonIndexProperties(this, filter), + (key) => { + extras = true; + obj[key] = this[key]; + }); + if (extras) { + if (this.length !== 0) + str += ', '; + // '[Object: null prototype] {'.length === 26 + // This is guarded with a test. + str += StringPrototypeSlice(utilInspect(obj, { + ...ctx, + breakLength: Infinity, + compact: true, + }), 27, -2); + } + } + let constructorName = 'Buffer'; + try { + const { constructor } = this; + if (typeof constructor === 'function' && ObjectPrototypeHasOwnProperty(constructor, 'name')) { + constructorName = constructor.name; + } + } catch { /* Ignore error and use default name */ } + return `<${constructorName} ${str}>`; +}; +Buffer.prototype.inspect = Buffer.prototype[customInspectSymbol]; + +Buffer.prototype.compare = function compare(target, + targetStart, + targetEnd, + sourceStart, + sourceEnd) { + if (!isUint8Array(target)) { + throw new ERR_INVALID_ARG_TYPE('target', ['Buffer', 'Uint8Array'], target); + } + if (arguments.length === 1) + return _compare(this, target); + + if (targetStart === undefined) + targetStart = 0; + else + validateOffset(targetStart, 'targetStart'); + + if (targetEnd === undefined) + targetEnd = target.length; + else + validateOffset(targetEnd, 'targetEnd', 0, target.length); + + if (sourceStart === undefined) + sourceStart = 0; + else + validateOffset(sourceStart, 'sourceStart'); + + if (sourceEnd === undefined) + sourceEnd = this.length; + else + validateOffset(sourceEnd, 'sourceEnd', 0, this.length); + + if (sourceStart >= sourceEnd) + return (targetStart >= targetEnd ? 0 : -1); + if (targetStart >= targetEnd) + return 1; + + return compareOffset(this, target, targetStart, sourceStart, targetEnd, + sourceEnd); +}; + +// Finds either the first index of `val` in `buffer` at offset >= `byteOffset`, +// OR the last index of `val` in `buffer` at offset <= `byteOffset`. +// +// Arguments: +// - buffer - a Buffer to search +// - val - a string, Buffer, or number +// - byteOffset - an index into `buffer`; will be clamped to an int32 +// - encoding - an optional encoding, relevant if val is a string +// - dir - true for indexOf, false for lastIndexOf +function bidirectionalIndexOf(buffer, val, byteOffset, encoding, dir) { + validateBuffer(buffer); + + if (typeof byteOffset === 'string') { + encoding = byteOffset; + byteOffset = undefined; + } else if (byteOffset > 0x7fffffff) { + byteOffset = 0x7fffffff; + } else if (byteOffset < -0x80000000) { + byteOffset = -0x80000000; + } + // Coerce to Number. Values like null and [] become 0. + byteOffset = +byteOffset; + // If the offset is undefined, "foo", {}, coerces to NaN, search whole buffer. + if (NumberIsNaN(byteOffset)) { + byteOffset = dir ? 0 : (buffer.length || buffer.byteLength); + } + dir = !!dir; // Cast to bool. + + if (typeof val === 'number') + return indexOfNumber(buffer, val >>> 0, byteOffset, dir); + + let ops; + if (encoding === undefined) + ops = encodingOps.utf8; + else + ops = getEncodingOps(encoding); + + if (typeof val === 'string') { + if (ops === undefined) + throw new ERR_UNKNOWN_ENCODING(encoding); + return ops.indexOf(buffer, val, byteOffset, dir); + } + + if (isUint8Array(val)) { + const encodingVal = + (ops === undefined ? encodingsMap.utf8 : ops.encodingVal); + return indexOfBuffer(buffer, val, byteOffset, encodingVal, dir); + } + + throw new ERR_INVALID_ARG_TYPE( + 'value', ['number', 'string', 'Buffer', 'Uint8Array'], val, + ); +} + +Buffer.prototype.indexOf = function indexOf(val, byteOffset, encoding) { + return bidirectionalIndexOf(this, val, byteOffset, encoding, true); +}; + +Buffer.prototype.lastIndexOf = function lastIndexOf(val, byteOffset, encoding) { + return bidirectionalIndexOf(this, val, byteOffset, encoding, false); +}; + +Buffer.prototype.includes = function includes(val, byteOffset, encoding) { + return this.indexOf(val, byteOffset, encoding) !== -1; +}; + +// Usage: +// buffer.fill(number[, offset[, end]]) +// buffer.fill(buffer[, offset[, end]]) +// buffer.fill(string[, offset[, end]][, encoding]) +Buffer.prototype.fill = function fill(value, offset, end, encoding) { + return _fill(this, value, offset, end, encoding); +}; + +function _fill(buf, value, offset, end, encoding) { + if (typeof value === 'string') { + if (offset === undefined || typeof offset === 'string') { + encoding = offset; + offset = 0; + end = buf.length; + } else if (typeof end === 'string') { + encoding = end; + end = buf.length; + } + + const normalizedEncoding = normalizeEncoding(encoding); + if (normalizedEncoding === undefined) { + validateString(encoding, 'encoding'); + throw new ERR_UNKNOWN_ENCODING(encoding); + } + + if (value.length === 0) { + // If value === '' default to zero. + value = 0; + } else if (value.length === 1) { + // Fast path: If `value` fits into a single byte, use that numeric value. + if (normalizedEncoding === 'utf8') { + const code = StringPrototypeCharCodeAt(value, 0); + if (code < 128) { + value = code; + } + } else if (normalizedEncoding === 'latin1') { + value = StringPrototypeCharCodeAt(value, 0); + } + } + } else { + encoding = undefined; + } + + if (offset === undefined) { + offset = 0; + end = buf.length; + } else { + validateOffset(offset, 'offset'); + // Invalid ranges are not set to a default, so can range check early. + if (end === undefined) { + end = buf.length; + } else { + validateOffset(end, 'end', 0, buf.length); + } + if (offset >= end) + return buf; + } + + + if (typeof value === 'number') { + // OOB check + const byteLen = TypedArrayPrototypeGetByteLength(buf); + const fillLength = end - offset; + if (offset > end || fillLength + offset > byteLen) + throw new ERR_BUFFER_OUT_OF_BOUNDS(); + + TypedArrayPrototypeFill(buf, value, offset, end); + } else { + const res = bindingFill(buf, value, offset, end, encoding); + if (res < 0) { + if (res === -1) + throw new ERR_INVALID_ARG_VALUE('value', value); + throw new ERR_BUFFER_OUT_OF_BOUNDS(); + } + } + + return buf; +} + +Buffer.prototype.write = function write(string, offset, length, encoding) { + // Buffer#write(string); + if (offset === undefined) { + return this.utf8Write(string, 0, this.length); + } + // Buffer#write(string, encoding) + if (length === undefined && typeof offset === 'string') { + encoding = offset; + length = this.length; + offset = 0; + + // Buffer#write(string, offset[, length][, encoding]) + } else { + validateOffset(offset, 'offset', 0, this.length); + + const remaining = this.length - offset; + + if (length === undefined) { + length = remaining; + } else if (typeof length === 'string') { + encoding = length; + length = remaining; + } else { + validateOffset(length, 'length', 0, this.length); + if (length > remaining) + length = remaining; + } + } + + if (!encoding || encoding === 'utf8') + return this.utf8Write(string, offset, length); + if (encoding === 'ascii') + return this.asciiWrite(string, offset, length); + + const ops = getEncodingOps(encoding); + if (ops === undefined) + throw new ERR_UNKNOWN_ENCODING(encoding); + return ops.write(this, string, offset, length); +}; + +Buffer.prototype.toJSON = function toJSON() { + if (this.length > 0) { + const data = new Array(this.length); + for (let i = 0; i < this.length; ++i) + data[i] = this[i]; + return { type: 'Buffer', data }; + } + return { type: 'Buffer', data: [] }; +}; + +function adjustOffset(offset, length) { + // Use Math.trunc() to convert offset to an integer value that can be larger + // than an Int32. Hence, don't use offset | 0 or similar techniques. + offset = MathTrunc(offset); + if (offset === 0) { + return 0; + } + if (offset < 0) { + offset += length; + return offset > 0 ? offset : 0; + } + if (offset < length) { + return offset; + } + return NumberIsNaN(offset) ? 0 : length; +} + +Buffer.prototype.subarray = function subarray(start, end) { + const srcLength = this.length; + start = adjustOffset(start, srcLength); + end = end !== undefined ? adjustOffset(end, srcLength) : srcLength; + const newLength = end > start ? end - start : 0; + return new FastBuffer(this.buffer, this.byteOffset + start, newLength); +}; + +Buffer.prototype.slice = function slice(start, end) { + return this.subarray(start, end); +}; + +function swap(b, n, m) { + const i = b[n]; + b[n] = b[m]; + b[m] = i; +} + +Buffer.prototype.swap16 = function swap16() { + // For Buffer.length < 128, it's generally faster to + // do the swap in javascript. For larger buffers, + // dropping down to the native code is faster. + const len = this.length; + if (len % 2 !== 0) + throw new ERR_INVALID_BUFFER_SIZE('16-bits'); + if (len < 128) { + for (let i = 0; i < len; i += 2) + swap(this, i, i + 1); + return this; + } + return _swap16(this); +}; + +Buffer.prototype.swap32 = function swap32() { + // For Buffer.length < 192, it's generally faster to + // do the swap in javascript. For larger buffers, + // dropping down to the native code is faster. + const len = this.length; + if (len % 4 !== 0) + throw new ERR_INVALID_BUFFER_SIZE('32-bits'); + if (len < 192) { + for (let i = 0; i < len; i += 4) { + swap(this, i, i + 3); + swap(this, i + 1, i + 2); + } + return this; + } + return _swap32(this); +}; + +Buffer.prototype.swap64 = function swap64() { + // For Buffer.length < 192, it's generally faster to + // do the swap in javascript. For larger buffers, + // dropping down to the native code is faster. + const len = this.length; + if (len % 8 !== 0) + throw new ERR_INVALID_BUFFER_SIZE('64-bits'); + if (len < 192) { + for (let i = 0; i < len; i += 8) { + swap(this, i, i + 7); + swap(this, i + 1, i + 6); + swap(this, i + 2, i + 5); + swap(this, i + 3, i + 4); + } + return this; + } + return _swap64(this); +}; + +Buffer.prototype.toLocaleString = Buffer.prototype.toString; + +let transcode; +if (internalBinding('config').hasIntl) { + const { + icuErrName, + transcode: _transcode, + } = internalBinding('icu'); + + // Transcodes the Buffer from one encoding to another, returning a new + // Buffer instance. + transcode = function transcode(source, fromEncoding, toEncoding) { + if (!isUint8Array(source)) { + throw new ERR_INVALID_ARG_TYPE('source', + ['Buffer', 'Uint8Array'], source); + } + if (source.length === 0) return Buffer.alloc(0); + + fromEncoding = normalizeEncoding(fromEncoding) || fromEncoding; + toEncoding = normalizeEncoding(toEncoding) || toEncoding; + const result = _transcode(source, fromEncoding, toEncoding); + if (typeof result !== 'number') + return result; + + const code = icuErrName(result); + const err = genericNodeError( + `Unable to transcode Buffer [${code}]`, + { code: code, errno: result }, + ); + throw err; + }; +} + +function btoa(input) { + // The implementation here has not been performance optimized in any way and + // should not be. + // Refs: https://github.com/nodejs/node/pull/38433#issuecomment-828426932 + if (arguments.length === 0) { + throw new ERR_MISSING_ARGS('input'); + } + const result = _btoa(`${input}`); + if (result === -1) { + throw lazyDOMException('Invalid character', 'InvalidCharacterError'); + } + return result; +} + +function atob(input) { + if (arguments.length === 0) { + throw new ERR_MISSING_ARGS('input'); + } + + const result = _atob(`${input}`); + + switch (result) { + case -2: // Invalid character + throw lazyDOMException('Invalid character', 'InvalidCharacterError'); + case -1: // Single character remained + throw lazyDOMException( + 'The string to be decoded is not correctly encoded.', + 'InvalidCharacterError'); + case -3: // Possible overflow + // TODO(@anonrig): Throw correct error in here. + throw lazyDOMException('The input causes overflow.', 'InvalidCharacterError'); + default: + return result; + } +} + +function isUtf8(input) { + if (isTypedArray(input) || isAnyArrayBuffer(input)) { + return bindingIsUtf8(input); + } + + throw new ERR_INVALID_ARG_TYPE('input', ['ArrayBuffer', 'Buffer', 'TypedArray'], input); +} + +function isAscii(input) { + if (isTypedArray(input) || isAnyArrayBuffer(input)) { + return bindingIsAscii(input); + } + + throw new ERR_INVALID_ARG_TYPE('input', ['ArrayBuffer', 'Buffer', 'TypedArray'], input); +} + +module.exports = { + Buffer, + SlowBuffer: deprecate( + SlowBuffer, + 'SlowBuffer() is deprecated. Please use Buffer.allocUnsafeSlow()', + 'DEP0030'), + transcode, + isUtf8, + isAscii, + + // Legacy + kMaxLength, + kStringMaxLength, + btoa, + atob, +}; + +ObjectDefineProperties(module.exports, { + constants: { + __proto__: null, + configurable: false, + enumerable: true, + value: constants, + }, + INSPECT_MAX_BYTES: { + __proto__: null, + configurable: true, + enumerable: true, + get() { return INSPECT_MAX_BYTES; }, + set(val) { + validateNumber(val, 'INSPECT_MAX_BYTES', 0); + INSPECT_MAX_BYTES = val; + }, + }, +}); + +defineLazyProperties( + module.exports, + 'internal/blob', + ['Blob', 'resolveObjectURL'], +); +defineLazyProperties( + module.exports, + 'internal/file', + ['File'], +); \ No newline at end of file diff --git a/.codesandbox/node/dfn.js b/.codesandbox/node/dfn.js new file mode 100644 index 0000000000..29b453dc2f --- /dev/null +++ b/.codesandbox/node/dfn.js @@ -0,0 +1,118 @@ +var dfnMapTarget = -1; +var dfnMapDone = 0; +var dfnMap = {}; +document.addEventListener('DOMContentLoaded', function (event) { + var links = []; + dfnMapTarget = document.links.length; + for (var i = 0; i < dfnMapTarget; i += 1) + links[i] = document.links[i]; + var inc = 100; + for (var i = 0; i < dfnMapTarget; i += inc) { + setTimeout(function (j) { + for (var k = j; k < j+inc && k < dfnMapTarget; k += 1) { + if (links[k].href.indexOf('#') >= 0) { + if (links[k].className != "no-backref" && + links[k].parentNode.className != "no-backref") { + var s = links[k].href.substr(links[k].href.indexOf('#') + 1); + if (!(s in dfnMap)) + dfnMap[s] = []; + dfnMap[s].push(links[k]); + } + } + dfnMapDone += 1; + } + }, 0, i); + } + document.body.className += " dfnEnabled"; +}, false); + +var dfnPanel; +var dfnUniqueId = 0; +var dfnTimeout; +document.addEventListener('click', dfnShow, false); +function dfnShow(event) { + if (dfnTimeout) { + clearTimeout(dfnTimeout); + dfnTimeout = null; + } + if (dfnPanel) { + dfnPanel.parentNode.removeChild(dfnPanel); + dfnPanel = null; + } + if (dfnMapDone == dfnMapTarget) { + var node = event.target; + while (node && (node.nodeType != event.target.ELEMENT_NODE || node.tagName != "DFN")) + node = node.parentNode; + if (node) { + var panel = document.createElement('div'); + panel.className = 'dfnPanel'; + if (node.id) { + var permalinkP = document.createElement('p'); + var permalinkA = document.createElement('a'); + permalinkA.href = '#' + node.id; + permalinkA.textContent = '#' + node.id; + permalinkP.appendChild(permalinkA); + panel.appendChild(permalinkP); + } + var p = document.createElement('p'); + panel.appendChild(p); + if (node.id in dfnMap || node.parentNode.id in dfnMap) { + p.textContent = 'Referenced in:'; + var ul = document.createElement('ul'); + var lastHeader; + var lastLi; + var n; + var sourceLinks = []; + if (node.id in dfnMap) + for (var i = 0; i < dfnMap[node.id].length; i += 1) + sourceLinks.push(dfnMap[node.id][i]); + if (node.parentNode.id in dfnMap) + for (var i = 0; i < dfnMap[node.parentNode.id].length; i += 1) + sourceLinks.push(dfnMap[node.parentNode.id][i]); + for (var i = 0; i < sourceLinks.length; i += 1) { + var link = sourceLinks[i]; + var header = dfnGetCaption(link); + var a = document.createElement('a'); + if (!link.id) + link.id = 'dfnReturnLink-' + dfnUniqueId++; + a.href = '#' + link.id; + if (header != lastHeader) { + lastHeader = header; + n = 1; + var li = document.createElement('li'); + var cloneHeader = header.cloneNode(true); + while (cloneHeader.hasChildNodes()) + if (cloneHeader.firstChild.className == 'section-link') + cloneHeader.removeChild(cloneHeader.firstChild); + else + a.appendChild(cloneHeader.firstChild); + lastLi = li; + li.appendChild(a); + ul.appendChild(li); + } else { + n += 1; + a.appendChild(document.createTextNode('(' + n + ')')); + lastLi.appendChild(document.createTextNode(' ')); + lastLi.appendChild(a); + } + } + panel.appendChild(ul); + } else { + p.textContent = 'No references in this file.'; + } + node.appendChild(panel); + dfnPanel = panel; + } + } else { + dfnTimeout = setTimeout(dfnShow, 250, event); + } +} + +function dfnGetCaption(link) { + var node = link; + while (node && !(node.parentNode.tagName == "DIV" && node.parentNode.className == "section")) + node = node.parentNode; + while (node && (node.nodeType != node.ELEMENT_NODE || !node.tagName.match(/^H[1-6]$/))) + node = node.previousSibling; + return node; +} \ No newline at end of file diff --git a/.codesandbox/node/dgram.js b/.codesandbox/node/dgram.js new file mode 100644 index 0000000000..c77ea89cc8 --- /dev/null +++ b/.codesandbox/node/dgram.js @@ -0,0 +1,1111 @@ +'use strict'; + +const { + Array, + ArrayIsArray, + ArrayPrototypePush, + FunctionPrototypeBind, + FunctionPrototypeCall, + ObjectDefineProperty, + ObjectSetPrototypeOf, + ReflectApply, + SymbolAsyncDispose, + SymbolDispose, +} = primordials; + +const { + ErrnoException, + ExceptionWithHostPort, + codes: { + ERR_BUFFER_OUT_OF_BOUNDS, + ERR_INVALID_ARG_TYPE, + ERR_INVALID_FD_TYPE, + ERR_IP_BLOCKED, + ERR_MISSING_ARGS, + ERR_SOCKET_ALREADY_BOUND, + ERR_SOCKET_BAD_BUFFER_SIZE, + ERR_SOCKET_BUFFER_SIZE, + ERR_SOCKET_DGRAM_IS_CONNECTED, + ERR_SOCKET_DGRAM_NOT_CONNECTED, + ERR_SOCKET_DGRAM_NOT_RUNNING, + }, +} = require('internal/errors'); +const { + kStateSymbol, + _createSocketHandle, + newHandle, +} = require('internal/dgram'); +const { isIP } = require('internal/net'); +const { + isInt32, + validateAbortSignal, + validateString, + validateNumber, + validatePort, + validateUint32, +} = require('internal/validators'); +const { Buffer } = require('buffer'); +const { deprecate, guessHandleType, promisify } = require('internal/util'); +const { isArrayBufferView } = require('internal/util/types'); +const EventEmitter = require('events'); +const { addAbortListener } = require('internal/events/abort_listener'); +const { + defaultTriggerAsyncIdScope, + symbols: { async_id_symbol, owner_symbol }, +} = require('internal/async_hooks'); +const { UV_UDP_REUSEADDR } = internalBinding('constants').os; + +const { + constants: { UV_UDP_IPV6ONLY, UV_UDP_REUSEPORT }, + UDP, + SendWrap, +} = internalBinding('udp_wrap'); + +const dc = require('diagnostics_channel'); +const udpSocketChannel = dc.channel('udp.socket'); + +const BIND_STATE_UNBOUND = 0; +const BIND_STATE_BINDING = 1; +const BIND_STATE_BOUND = 2; + +const CONNECT_STATE_DISCONNECTED = 0; +const CONNECT_STATE_CONNECTING = 1; +const CONNECT_STATE_CONNECTED = 2; + +const RECV_BUFFER = true; +const SEND_BUFFER = false; + +// Lazily loaded +let _cluster = null; +function lazyLoadCluster() { + return _cluster ??= require('cluster'); +} +let _blockList = null; +function lazyLoadBlockList() { + return _blockList ??= require('internal/blocklist').BlockList; +} + +function Socket(type, listener) { + FunctionPrototypeCall(EventEmitter, this); + let lookup; + let recvBufferSize; + let sendBufferSize; + let receiveBlockList; + let sendBlockList; + + let options; + if (type !== null && typeof type === 'object') { + options = type; + type = options.type; + lookup = options.lookup; + if (options.recvBufferSize) { + validateUint32(options.recvBufferSize, 'options.recvBufferSize'); + } + if (options.sendBufferSize) { + validateUint32(options.sendBufferSize, 'options.sendBufferSize'); + } + recvBufferSize = options.recvBufferSize; + sendBufferSize = options.sendBufferSize; + if (options.receiveBlockList) { + if (!lazyLoadBlockList().isBlockList(options.receiveBlockList)) { + throw new ERR_INVALID_ARG_TYPE('options.receiveBlockList', 'net.BlockList', options.receiveBlockList); + } + receiveBlockList = options.receiveBlockList; + } + if (options.sendBlockList) { + if (!lazyLoadBlockList().isBlockList(options.sendBlockList)) { + throw new ERR_INVALID_ARG_TYPE('options.sendBlockList', 'net.BlockList', options.sendBlockList); + } + sendBlockList = options.sendBlockList; + } + } + + const handle = newHandle(type, lookup); + handle[owner_symbol] = this; + + this[async_id_symbol] = handle.getAsyncId(); + this.type = type; + + if (typeof listener === 'function') + this.on('message', listener); + + this[kStateSymbol] = { + handle, + receiving: false, + bindState: BIND_STATE_UNBOUND, + connectState: CONNECT_STATE_DISCONNECTED, + queue: undefined, + reuseAddr: options?.reuseAddr, // Use UV_UDP_REUSEADDR if true. + reusePort: options?.reusePort, + ipv6Only: options?.ipv6Only, + recvBufferSize, + sendBufferSize, + receiveBlockList, + sendBlockList, + }; + + if (options?.signal !== undefined) { + const { signal } = options; + validateAbortSignal(signal, 'options.signal'); + const onAborted = () => { + if (this[kStateSymbol].handle) this.close(); + }; + if (signal.aborted) { + onAborted(); + } else { + const disposable = addAbortListener(signal, onAborted); + this.once('close', disposable[SymbolDispose]); + } + } + if (udpSocketChannel.hasSubscribers) { + udpSocketChannel.publish({ + socket: this, + }); + } +} +ObjectSetPrototypeOf(Socket.prototype, EventEmitter.prototype); +ObjectSetPrototypeOf(Socket, EventEmitter); + + +function createSocket(type, listener) { + return new Socket(type, listener); +} + + +function startListening(socket) { + const state = socket[kStateSymbol]; + + state.handle.onmessage = onMessage; + state.handle.onerror = onError; + state.handle.recvStart(); + state.receiving = true; + state.bindState = BIND_STATE_BOUND; + + if (state.recvBufferSize) + bufferSize(socket, state.recvBufferSize, RECV_BUFFER); + + if (state.sendBufferSize) + bufferSize(socket, state.sendBufferSize, SEND_BUFFER); + + socket.emit('listening'); +} + +function replaceHandle(self, newHandle) { + const state = self[kStateSymbol]; + const oldHandle = state.handle; + // Sync the old handle state to new handle + if (!oldHandle.hasRef() && typeof newHandle.unref === 'function') { + newHandle.unref(); + } + // Set up the handle that we got from primary. + newHandle.lookup = oldHandle.lookup; + newHandle.bind = oldHandle.bind; + newHandle.send = oldHandle.send; + newHandle[owner_symbol] = self; + + // Replace the existing handle by the handle we got from primary. + oldHandle.close(); + state.handle = newHandle; +} + +function bufferSize(self, size, buffer) { + if (size >>> 0 !== size) + throw new ERR_SOCKET_BAD_BUFFER_SIZE(); + + const ctx = {}; + const ret = self[kStateSymbol].handle.bufferSize(size, buffer, ctx); + if (ret === undefined) { + throw new ERR_SOCKET_BUFFER_SIZE(ctx); + } + return ret; +} + +// Query primary process to get the server handle and utilize it. +function bindServerHandle(self, options, errCb) { + const cluster = lazyLoadCluster(); + + const state = self[kStateSymbol]; + cluster._getServer(self, options, (err, handle) => { + if (err) { + // Do not call callback if socket is closed + if (state.handle) { + errCb(err); + } + return; + } + + if (!state.handle) { + // Handle has been closed in the mean time. + return handle.close(); + } + + replaceHandle(self, handle); + startListening(self); + }); +} + +Socket.prototype.bind = function(port_, address_ /* , callback */) { + let port = port_; + + healthCheck(this); + const state = this[kStateSymbol]; + + if (state.bindState !== BIND_STATE_UNBOUND) + throw new ERR_SOCKET_ALREADY_BOUND(); + + state.bindState = BIND_STATE_BINDING; + + const cb = arguments.length && arguments[arguments.length - 1]; + if (typeof cb === 'function') { + function removeListeners() { + this.removeListener('error', removeListeners); + this.removeListener('listening', onListening); + } + + function onListening() { + FunctionPrototypeCall(removeListeners, this); + FunctionPrototypeCall(cb, this); + } + + this.on('error', removeListeners); + this.on('listening', onListening); + } + + if (port !== null && + typeof port === 'object' && + typeof port.recvStart === 'function') { + replaceHandle(this, port); + startListening(this); + return this; + } + + // Open an existing fd instead of creating a new one. + if (port !== null && typeof port === 'object' && + isInt32(port.fd) && port.fd > 0) { + const fd = port.fd; + const exclusive = !!port.exclusive; + const state = this[kStateSymbol]; + + const cluster = lazyLoadCluster(); + + if (cluster.isWorker && !exclusive) { + bindServerHandle(this, { + address: null, + port: null, + addressType: this.type, + fd, + flags: null, + }, (err) => { + // Callback to handle error. + const ex = new ErrnoException(err, 'open'); + state.bindState = BIND_STATE_UNBOUND; + this.emit('error', ex); + }); + return this; + } + + const type = guessHandleType(fd); + if (type !== 'UDP') + throw new ERR_INVALID_FD_TYPE(type); + const err = state.handle.open(fd); + + if (err) + throw new ErrnoException(err, 'open'); + + startListening(this); + return this; + } + + let address; + let exclusive; + + if (port !== null && typeof port === 'object') { + address = port.address || ''; + exclusive = !!port.exclusive; + port = port.port; + } else { + address = typeof address_ === 'function' ? '' : address_; + exclusive = false; + } + + // Defaulting address for bind to all interfaces + if (!address) { + if (this.type === 'udp4') + address = '0.0.0.0'; + else + address = '::'; + } + + // Resolve address first + state.handle.lookup(address, (err, ip) => { + if (!state.handle) + return; // Handle has been closed in the mean time + + if (err) { + state.bindState = BIND_STATE_UNBOUND; + this.emit('error', err); + return; + } + + const cluster = lazyLoadCluster(); + + let flags = 0; + if (state.reuseAddr) + flags |= UV_UDP_REUSEADDR; + if (state.ipv6Only) + flags |= UV_UDP_IPV6ONLY; + if (state.reusePort) { + exclusive = true; + flags |= UV_UDP_REUSEPORT; + } + + if (cluster.isWorker && !exclusive) { + bindServerHandle(this, { + address: ip, + port: port, + addressType: this.type, + fd: -1, + flags: flags, + }, (err) => { + // Callback to handle error. + const ex = new ExceptionWithHostPort(err, 'bind', ip, port); + state.bindState = BIND_STATE_UNBOUND; + this.emit('error', ex); + }); + } else { + const err = state.handle.bind(ip, port || 0, flags); + if (err) { + const ex = new ExceptionWithHostPort(err, 'bind', ip, port); + state.bindState = BIND_STATE_UNBOUND; + this.emit('error', ex); + // Todo: close? + return; + } + + startListening(this); + } + }); + + return this; +}; + +Socket.prototype.connect = function(port, address, callback) { + port = validatePort(port, 'Port', false); + if (typeof address === 'function') { + callback = address; + address = ''; + } else if (address === undefined) { + address = ''; + } + + validateString(address, 'address'); + + const state = this[kStateSymbol]; + + if (state.connectState !== CONNECT_STATE_DISCONNECTED) + throw new ERR_SOCKET_DGRAM_IS_CONNECTED(); + + state.connectState = CONNECT_STATE_CONNECTING; + if (state.bindState === BIND_STATE_UNBOUND) + this.bind({ port: 0, exclusive: true }, null); + + if (state.bindState !== BIND_STATE_BOUND) { + enqueue(this, FunctionPrototypeBind(_connect, this, + port, address, callback)); + return; + } + + ReflectApply(_connect, this, [port, address, callback]); +}; + + +function _connect(port, address, callback) { + const state = this[kStateSymbol]; + if (callback) + this.once('connect', callback); + + const afterDns = (ex, ip) => { + defaultTriggerAsyncIdScope( + this[async_id_symbol], + doConnect, + ex, this, ip, address, port, callback, + ); + }; + + state.handle.lookup(address, afterDns); +} + + +function doConnect(ex, self, ip, address, port, callback) { + const state = self[kStateSymbol]; + if (!state.handle) + return; + if (!ex && state.sendBlockList?.check(ip, `ipv${isIP(ip)}`)) { + ex = new ERR_IP_BLOCKED(ip); + } + if (!ex) { + const err = state.handle.connect(ip, port); + if (err) { + ex = new ExceptionWithHostPort(err, 'connect', address, port); + } + } + + if (ex) { + state.connectState = CONNECT_STATE_DISCONNECTED; + return process.nextTick(() => { + if (callback) { + self.removeListener('connect', callback); + callback(ex); + } else { + self.emit('error', ex); + } + }); + } + + state.connectState = CONNECT_STATE_CONNECTED; + process.nextTick(() => self.emit('connect')); +} + + +Socket.prototype.disconnect = function() { + const state = this[kStateSymbol]; + if (state.connectState !== CONNECT_STATE_CONNECTED) + throw new ERR_SOCKET_DGRAM_NOT_CONNECTED(); + + const err = state.handle.disconnect(); + if (err) + throw new ErrnoException(err, 'connect'); + else + state.connectState = CONNECT_STATE_DISCONNECTED; +}; + + +// Thin wrapper around `send`, here for compatibility with dgram_legacy.js +Socket.prototype.sendto = function(buffer, + offset, + length, + port, + address, + callback) { + validateNumber(offset, 'offset'); + validateNumber(length, 'length'); + validateNumber(port, 'port'); + validateString(address, 'address'); + + this.send(buffer, offset, length, port, address, callback); +}; + + +function sliceBuffer(buffer, offset, length) { + if (typeof buffer === 'string') { + buffer = Buffer.from(buffer); + } else if (!isArrayBufferView(buffer)) { + throw new ERR_INVALID_ARG_TYPE('buffer', + ['Buffer', + 'TypedArray', + 'DataView', + 'string'], + buffer); + } + + offset = offset >>> 0; + length = length >>> 0; + if (offset > buffer.byteLength) { + throw new ERR_BUFFER_OUT_OF_BOUNDS('offset'); + } + + if (offset + length > buffer.byteLength) { + throw new ERR_BUFFER_OUT_OF_BOUNDS('length'); + } + + return Buffer.from(buffer.buffer, buffer.byteOffset + offset, length); +} + + +function fixBufferList(list) { + const newlist = new Array(list.length); + + for (let i = 0, l = list.length; i < l; i++) { + const buf = list[i]; + if (typeof buf === 'string') + newlist[i] = Buffer.from(buf); + else if (Buffer.isBuffer(buf)) + newlist[i] = buf; + else if (!isArrayBufferView(buf)) + return null; + else + newlist[i] = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength); + } + + return newlist; +} + + +function enqueue(self, toEnqueue) { + const state = self[kStateSymbol]; + + // If the send queue hasn't been initialized yet, do it, and install an + // event handler that flushes the send queue after binding is done. + if (state.queue === undefined) { + state.queue = []; + self.once(EventEmitter.errorMonitor, onListenError); + self.once('listening', onListenSuccess); + } + ArrayPrototypePush(state.queue, toEnqueue); +} + + +function onListenSuccess() { + this.removeListener(EventEmitter.errorMonitor, onListenError); + FunctionPrototypeCall(clearQueue, this); +} + + +function onListenError(err) { + this.removeListener('listening', onListenSuccess); + this[kStateSymbol].queue = undefined; +} + + +function clearQueue() { + const state = this[kStateSymbol]; + const queue = state.queue; + state.queue = undefined; + + // Flush the send queue. + for (const queueEntry of queue) + queueEntry(); +} + +// valid combinations +// For connectionless sockets +// send(buffer, offset, length, port, address, callback) +// send(buffer, offset, length, port, address) +// send(buffer, offset, length, port, callback) +// send(buffer, offset, length, port) +// send(bufferOrList, port, address, callback) +// send(bufferOrList, port, address) +// send(bufferOrList, port, callback) +// send(bufferOrList, port) +// For connected sockets +// send(buffer, offset, length, callback) +// send(buffer, offset, length) +// send(bufferOrList, callback) +// send(bufferOrList) +Socket.prototype.send = function(buffer, + offset, + length, + port, + address, + callback) { + + let list; + const state = this[kStateSymbol]; + const connected = state.connectState === CONNECT_STATE_CONNECTED; + if (!connected) { + if (address || (port && typeof port !== 'function')) { + buffer = sliceBuffer(buffer, offset, length); + } else { + callback = port; + port = offset; + address = length; + } + } else { + if (typeof length === 'number') { + buffer = sliceBuffer(buffer, offset, length); + if (typeof port === 'function') { + callback = port; + port = null; + } + } else { + callback = offset; + } + + if (port || address) + throw new ERR_SOCKET_DGRAM_IS_CONNECTED(); + } + + if (!ArrayIsArray(buffer)) { + if (typeof buffer === 'string') { + list = [ Buffer.from(buffer) ]; + } else if (!isArrayBufferView(buffer)) { + throw new ERR_INVALID_ARG_TYPE('buffer', + ['Buffer', + 'TypedArray', + 'DataView', + 'string'], + buffer); + } else { + list = [ buffer ]; + } + } else if (!(list = fixBufferList(buffer))) { + throw new ERR_INVALID_ARG_TYPE('buffer list arguments', + ['Buffer', + 'TypedArray', + 'DataView', + 'string'], + buffer); + } + + if (!connected) + port = validatePort(port, 'Port', false); + + // Normalize callback so it's either a function or undefined but not anything + // else. + if (typeof callback !== 'function') + callback = undefined; + + if (typeof address === 'function') { + callback = address; + address = undefined; + } else if (address != null) { + validateString(address, 'address'); + } + + healthCheck(this); + + if (state.bindState === BIND_STATE_UNBOUND) + this.bind({ port: 0, exclusive: true }, null); + + if (list.length === 0) + ArrayPrototypePush(list, Buffer.alloc(0)); + + // If the socket hasn't been bound yet, push the outbound packet onto the + // send queue and send after binding is complete. + if (state.bindState !== BIND_STATE_BOUND) { + enqueue(this, FunctionPrototypeBind(this.send, this, + list, port, address, callback)); + return; + } + + const afterDns = (ex, ip) => { + defaultTriggerAsyncIdScope( + this[async_id_symbol], + doSend, + ex, this, ip, list, address, port, callback, + ); + }; + + if (!connected) { + state.handle.lookup(address, afterDns); + } else { + afterDns(null, null); + } +}; + +function doSend(ex, self, ip, list, address, port, callback) { + const state = self[kStateSymbol]; + + if (ex) { + if (typeof callback === 'function') { + process.nextTick(callback, ex); + return; + } + + process.nextTick(() => self.emit('error', ex)); + return; + } else if (!state.handle) { + return; + } + + if (ip && state.sendBlockList?.check(ip, `ipv${isIP(ip)}`)) { + if (callback) { + process.nextTick(callback, new ERR_IP_BLOCKED(ip)); + } + return; + } + + const req = new SendWrap(); + req.list = list; // Keep reference alive. + req.address = address; + req.port = port; + if (callback) { + req.callback = callback; + req.oncomplete = afterSend; + } + + let err; + if (port) + err = state.handle.send(req, list, list.length, port, ip, !!callback); + else + err = state.handle.send(req, list, list.length, !!callback); + + if (err >= 1) { + // Synchronous finish. The return code is msg_length + 1 so that we can + // distinguish between synchronous success and asynchronous success. + if (callback) + process.nextTick(callback, null, err - 1); + return; + } + + if (err && callback) { + // Don't emit as error, dgram_legacy.js compatibility + const ex = new ExceptionWithHostPort(err, 'send', address, port); + process.nextTick(callback, ex); + } +} + +function afterSend(err, sent) { + if (err) { + err = new ExceptionWithHostPort(err, 'send', this.address, this.port); + } else { + err = null; + } + + this.callback(err, sent); +} + +Socket.prototype.close = function(callback) { + const state = this[kStateSymbol]; + const queue = state.queue; + + if (typeof callback === 'function') + this.on('close', callback); + + if (queue !== undefined) { + ArrayPrototypePush(queue, FunctionPrototypeBind(this.close, this)); + return this; + } + + healthCheck(this); + stopReceiving(this); + state.handle.close(); + state.handle = null; + defaultTriggerAsyncIdScope(this[async_id_symbol], + process.nextTick, + socketCloseNT, + this); + + return this; +}; + +Socket.prototype[SymbolAsyncDispose] = async function() { + if (!this[kStateSymbol].handle) { + return; + } + await FunctionPrototypeCall(promisify(this.close), this); +}; + + +function socketCloseNT(self) { + self.emit('close'); +} + + +Socket.prototype.address = function() { + healthCheck(this); + + const out = {}; + const err = this[kStateSymbol].handle.getsockname(out); + if (err) { + throw new ErrnoException(err, 'getsockname'); + } + + return out; +}; + +Socket.prototype.remoteAddress = function() { + healthCheck(this); + + const state = this[kStateSymbol]; + if (state.connectState !== CONNECT_STATE_CONNECTED) + throw new ERR_SOCKET_DGRAM_NOT_CONNECTED(); + + const out = {}; + const err = state.handle.getpeername(out); + if (err) + throw new ErrnoException(err, 'getpeername'); + + return out; +}; + + +Socket.prototype.setBroadcast = function(arg) { + const err = this[kStateSymbol].handle.setBroadcast(arg ? 1 : 0); + if (err) { + throw new ErrnoException(err, 'setBroadcast'); + } +}; + + +Socket.prototype.setTTL = function(ttl) { + validateNumber(ttl, 'ttl'); + + const err = this[kStateSymbol].handle.setTTL(ttl); + if (err) { + throw new ErrnoException(err, 'setTTL'); + } + + return ttl; +}; + + +Socket.prototype.setMulticastTTL = function(ttl) { + validateNumber(ttl, 'ttl'); + + const err = this[kStateSymbol].handle.setMulticastTTL(ttl); + if (err) { + throw new ErrnoException(err, 'setMulticastTTL'); + } + + return ttl; +}; + + +Socket.prototype.setMulticastLoopback = function(arg) { + const err = this[kStateSymbol].handle.setMulticastLoopback(arg ? 1 : 0); + if (err) { + throw new ErrnoException(err, 'setMulticastLoopback'); + } + + return arg; // 0.4 compatibility +}; + + +Socket.prototype.setMulticastInterface = function(interfaceAddress) { + healthCheck(this); + validateString(interfaceAddress, 'interfaceAddress'); + + const err = this[kStateSymbol].handle.setMulticastInterface(interfaceAddress); + if (err) { + throw new ErrnoException(err, 'setMulticastInterface'); + } +}; + +Socket.prototype.addMembership = function(multicastAddress, + interfaceAddress) { + healthCheck(this); + + if (!multicastAddress) { + throw new ERR_MISSING_ARGS('multicastAddress'); + } + + const { handle } = this[kStateSymbol]; + const err = handle.addMembership(multicastAddress, interfaceAddress); + if (err) { + throw new ErrnoException(err, 'addMembership'); + } +}; + + +Socket.prototype.dropMembership = function(multicastAddress, + interfaceAddress) { + healthCheck(this); + + if (!multicastAddress) { + throw new ERR_MISSING_ARGS('multicastAddress'); + } + + const { handle } = this[kStateSymbol]; + const err = handle.dropMembership(multicastAddress, interfaceAddress); + if (err) { + throw new ErrnoException(err, 'dropMembership'); + } +}; + +Socket.prototype.addSourceSpecificMembership = function(sourceAddress, + groupAddress, + interfaceAddress) { + healthCheck(this); + + validateString(sourceAddress, 'sourceAddress'); + validateString(groupAddress, 'groupAddress'); + + const err = + this[kStateSymbol].handle.addSourceSpecificMembership(sourceAddress, + groupAddress, + interfaceAddress); + if (err) { + throw new ErrnoException(err, 'addSourceSpecificMembership'); + } +}; + + +Socket.prototype.dropSourceSpecificMembership = function(sourceAddress, + groupAddress, + interfaceAddress) { + healthCheck(this); + + validateString(sourceAddress, 'sourceAddress'); + validateString(groupAddress, 'groupAddress'); + + const err = + this[kStateSymbol].handle.dropSourceSpecificMembership(sourceAddress, + groupAddress, + interfaceAddress); + if (err) { + throw new ErrnoException(err, 'dropSourceSpecificMembership'); + } +}; + + +function healthCheck(socket) { + if (!socket[kStateSymbol].handle) { + // Error message from dgram_legacy.js. + throw new ERR_SOCKET_DGRAM_NOT_RUNNING(); + } +} + + +function stopReceiving(socket) { + const state = socket[kStateSymbol]; + + if (!state.receiving) + return; + + state.handle.recvStop(); + state.receiving = false; +} + + +function onMessage(nread, handle, buf, rinfo) { + const self = handle[owner_symbol]; + if (nread < 0) { + return self.emit('error', new ErrnoException(nread, 'recvmsg')); + } + if (self[kStateSymbol]?.receiveBlockList?.check(rinfo.address, + rinfo.family?.toLocaleLowerCase())) { + return; + } + rinfo.size = buf.length; // compatibility + self.emit('message', buf, rinfo); +} + + +function onError(nread, handle, error) { + const self = handle[owner_symbol]; + return self.emit('error', error); +} + + +Socket.prototype.ref = function() { + const handle = this[kStateSymbol].handle; + + if (handle) + handle.ref(); + + return this; +}; + + +Socket.prototype.unref = function() { + const handle = this[kStateSymbol].handle; + + if (handle) + handle.unref(); + + return this; +}; + + +Socket.prototype.setRecvBufferSize = function(size) { + bufferSize(this, size, RECV_BUFFER); +}; + + +Socket.prototype.setSendBufferSize = function(size) { + bufferSize(this, size, SEND_BUFFER); +}; + + +Socket.prototype.getRecvBufferSize = function() { + return bufferSize(this, 0, RECV_BUFFER); +}; + + +Socket.prototype.getSendBufferSize = function() { + return bufferSize(this, 0, SEND_BUFFER); +}; + +Socket.prototype.getSendQueueSize = function() { + return this[kStateSymbol].handle.getSendQueueSize(); +}; + +Socket.prototype.getSendQueueCount = function() { + return this[kStateSymbol].handle.getSendQueueCount(); +}; + +// Deprecated private APIs. +ObjectDefineProperty(Socket.prototype, '_handle', { + __proto__: null, + get: deprecate(function() { + return this[kStateSymbol].handle; + }, 'Socket.prototype._handle is deprecated', 'DEP0112'), + set: deprecate(function(val) { + this[kStateSymbol].handle = val; + }, 'Socket.prototype._handle is deprecated', 'DEP0112'), +}); + + +ObjectDefineProperty(Socket.prototype, '_receiving', { + __proto__: null, + get: deprecate(function() { + return this[kStateSymbol].receiving; + }, 'Socket.prototype._receiving is deprecated', 'DEP0112'), + set: deprecate(function(val) { + this[kStateSymbol].receiving = val; + }, 'Socket.prototype._receiving is deprecated', 'DEP0112'), +}); + + +ObjectDefineProperty(Socket.prototype, '_bindState', { + __proto__: null, + get: deprecate(function() { + return this[kStateSymbol].bindState; + }, 'Socket.prototype._bindState is deprecated', 'DEP0112'), + set: deprecate(function(val) { + this[kStateSymbol].bindState = val; + }, 'Socket.prototype._bindState is deprecated', 'DEP0112'), +}); + + +ObjectDefineProperty(Socket.prototype, '_queue', { + __proto__: null, + get: deprecate(function() { + return this[kStateSymbol].queue; + }, 'Socket.prototype._queue is deprecated', 'DEP0112'), + set: deprecate(function(val) { + this[kStateSymbol].queue = val; + }, 'Socket.prototype._queue is deprecated', 'DEP0112'), +}); + + +ObjectDefineProperty(Socket.prototype, '_reuseAddr', { + __proto__: null, + get: deprecate(function() { + return this[kStateSymbol].reuseAddr; + }, 'Socket.prototype._reuseAddr is deprecated', 'DEP0112'), + set: deprecate(function(val) { + this[kStateSymbol].reuseAddr = val; + }, 'Socket.prototype._reuseAddr is deprecated', 'DEP0112'), +}); + + +Socket.prototype._healthCheck = deprecate(function() { + healthCheck(this); +}, 'Socket.prototype._healthCheck() is deprecated', 'DEP0112'); + + +Socket.prototype._stopReceiving = deprecate(function() { + stopReceiving(this); +}, 'Socket.prototype._stopReceiving() is deprecated', 'DEP0112'); + + +// Legacy alias on the C++ wrapper object. This is not public API, so we may +// want to runtime-deprecate it at some point. There's no hurry, though. +ObjectDefineProperty(UDP.prototype, 'owner', { + __proto__: null, + get() { return this[owner_symbol]; }, + set(v) { return this[owner_symbol] = v; }, +}); + + +module.exports = { + _createSocketHandle: deprecate( + _createSocketHandle, + 'dgram._createSocketHandle() is deprecated', + 'DEP0112', + ), + createSocket, + Socket, +}; \ No newline at end of file diff --git a/.codesandbox/node/diagnostics_channel.js b/.codesandbox/node/diagnostics_channel.js new file mode 100644 index 0000000000..1422b0dbdf --- /dev/null +++ b/.codesandbox/node/diagnostics_channel.js @@ -0,0 +1,439 @@ +"use strict"; + +const { + ArrayPrototypeAt, + ArrayPrototypeIndexOf, + ArrayPrototypePush, + ArrayPrototypePushApply, + ArrayPrototypeSlice, + ArrayPrototypeSplice, + ObjectDefineProperty, + ObjectGetPrototypeOf, + ObjectSetPrototypeOf, + Promise, + PromisePrototypeThen, + PromiseReject, + PromiseResolve, + ReflectApply, + SafeFinalizationRegistry, + SafeMap, + SymbolHasInstance, +} = primordials; + +const { + codes: { ERR_INVALID_ARG_TYPE }, +} = require("internal/errors"); +const { validateFunction } = require("internal/validators"); + +const { triggerUncaughtException } = internalBinding("errors"); + +const { WeakReference } = require("internal/util"); + +// Can't delete when weakref count reaches 0 as it could increment again. +// Only GC can be used as a valid time to clean up the channels map. +class WeakRefMap extends SafeMap { + #finalizers = new SafeFinalizationRegistry((key) => { + // Check that the key doesn't have any value before deleting, as the WeakRef for the key + // may have been replaced since finalization callbacks aren't synchronous with GC. + if (!this.has(key)) this.delete(key); + }); + + set(key, value) { + this.#finalizers.register(value, key); + return super.set(key, new WeakReference(value)); + } + + get(key) { + return super.get(key)?.get(); + } + + has(key) { + return !!this.get(key); + } + + incRef(key) { + return super.get(key)?.incRef(); + } + + decRef(key) { + return super.get(key)?.decRef(); + } +} + +function markActive(channel) { + // eslint-disable-next-line no-use-before-define + ObjectSetPrototypeOf(channel, ActiveChannel.prototype); + channel._subscribers = []; + channel._stores = new SafeMap(); +} + +function maybeMarkInactive(channel) { + // When there are no more active subscribers or bound, restore to fast prototype. + if (!channel._subscribers.length && !channel._stores.size) { + // eslint-disable-next-line no-use-before-define + ObjectSetPrototypeOf(channel, Channel.prototype); + channel._subscribers = undefined; + channel._stores = undefined; + } +} + +function defaultTransform(data) { + return data; +} + +function wrapStoreRun(store, data, next, transform = defaultTransform) { + return () => { + let context; + try { + context = transform(data); + } catch (err) { + process.nextTick(() => { + triggerUncaughtException(err, false); + }); + return next(); + } + + return store.run(context, next); + }; +} + +// TODO(qard): should there be a C++ channel interface? +class ActiveChannel { + subscribe(subscription) { + validateFunction(subscription, "subscription"); + this._subscribers = ArrayPrototypeSlice(this._subscribers); + ArrayPrototypePush(this._subscribers, subscription); + channels.incRef(this.name); + } + + unsubscribe(subscription) { + const index = ArrayPrototypeIndexOf(this._subscribers, subscription); + if (index === -1) return false; + + const before = ArrayPrototypeSlice(this._subscribers, 0, index); + const after = ArrayPrototypeSlice(this._subscribers, index + 1); + this._subscribers = before; + ArrayPrototypePushApply(this._subscribers, after); + + channels.decRef(this.name); + maybeMarkInactive(this); + + return true; + } + + bindStore(store, transform) { + const replacing = this._stores.has(store); + if (!replacing) channels.incRef(this.name); + this._stores.set(store, transform); + } + + unbindStore(store) { + if (!this._stores.has(store)) { + return false; + } + + this._stores.delete(store); + + channels.decRef(this.name); + maybeMarkInactive(this); + + return true; + } + + get hasSubscribers() { + return true; + } + + publish(data) { + const subscribers = this._subscribers; + for (let i = 0; i < (subscribers?.length || 0); i++) { + try { + const onMessage = subscribers[i]; + onMessage(data, this.name); + } catch (err) { + process.nextTick(() => { + triggerUncaughtException(err, false); + }); + } + } + } + + runStores(data, fn, thisArg, ...args) { + let run = () => { + this.publish(data); + return ReflectApply(fn, thisArg, args); + }; + + for (const entry of this._stores.entries()) { + const store = entry[0]; + const transform = entry[1]; + run = wrapStoreRun(store, data, run, transform); + } + + return run(); + } +} + +class Channel { + constructor(name) { + this._subscribers = undefined; + this._stores = undefined; + this.name = name; + + channels.set(name, this); + } + + static [SymbolHasInstance](instance) { + const prototype = ObjectGetPrototypeOf(instance); + return ( + prototype === Channel.prototype || prototype === ActiveChannel.prototype + ); + } + + subscribe(subscription) { + markActive(this); + this.subscribe(subscription); + } + + unsubscribe() { + return false; + } + + bindStore(store, transform) { + markActive(this); + this.bindStore(store, transform); + } + + unbindStore() { + return false; + } + + get hasSubscribers() { + return false; + } + + publish() {} + + runStores(data, fn, thisArg, ...args) { + return ReflectApply(fn, thisArg, args); + } +} + +const channels = new WeakRefMap(); + +function channel(name) { + const channel = channels.get(name); + if (channel) return channel; + + if (typeof name !== "string" && typeof name !== "symbol") { + throw new ERR_INVALID_ARG_TYPE("channel", ["string", "symbol"], name); + } + + return new Channel(name); +} + +function subscribe(name, subscription) { + return channel(name).subscribe(subscription); +} + +function unsubscribe(name, subscription) { + return channel(name).unsubscribe(subscription); +} + +function hasSubscribers(name) { + const channel = channels.get(name); + if (!channel) return false; + + return channel.hasSubscribers; +} + +const traceEvents = ["start", "end", "asyncStart", "asyncEnd", "error"]; + +function assertChannel(value, name) { + if (!(value instanceof Channel)) { + throw new ERR_INVALID_ARG_TYPE(name, ["Channel"], value); + } +} + +function tracingChannelFrom(nameOrChannels, name) { + if (typeof nameOrChannels === "string") { + return channel(`tracing:${nameOrChannels}:${name}`); + } + + if (typeof nameOrChannels === "object" && nameOrChannels !== null) { + const channel = nameOrChannels[name]; + assertChannel(channel, `nameOrChannels.${name}`); + return channel; + } + + throw new ERR_INVALID_ARG_TYPE( + "nameOrChannels", + ["string", "object", "TracingChannel"], + nameOrChannels + ); +} + +class TracingChannel { + constructor(nameOrChannels) { + for (let i = 0; i < traceEvents.length; ++i) { + const eventName = traceEvents[i]; + ObjectDefineProperty(this, eventName, { + __proto__: null, + value: tracingChannelFrom(nameOrChannels, eventName), + }); + } + } + + get hasSubscribers() { + return ( + this.start?.hasSubscribers || + this.end?.hasSubscribers || + this.asyncStart?.hasSubscribers || + this.asyncEnd?.hasSubscribers || + this.error?.hasSubscribers + ); + } + + subscribe(handlers) { + for (let i = 0; i < traceEvents.length; ++i) { + const name = traceEvents[i]; + if (!handlers[name]) continue; + + this[name]?.subscribe(handlers[name]); + } + } + + unsubscribe(handlers) { + let done = true; + + for (let i = 0; i < traceEvents.length; ++i) { + const name = traceEvents[i]; + if (!handlers[name]) continue; + + if (!this[name]?.unsubscribe(handlers[name])) { + done = false; + } + } + + return done; + } + + traceSync(fn, context = {}, thisArg, ...args) { + if (!this.hasSubscribers) { + return ReflectApply(fn, thisArg, args); + } + + const { start, end, error } = this; + + return start.runStores(context, () => { + try { + const result = ReflectApply(fn, thisArg, args); + context.result = result; + return result; + } catch (err) { + context.error = err; + error.publish(context); + throw err; + } finally { + end.publish(context); + } + }); + } + + tracePromise(fn, context = {}, thisArg, ...args) { + if (!this.hasSubscribers) { + return ReflectApply(fn, thisArg, args); + } + + const { start, end, asyncStart, asyncEnd, error } = this; + + function reject(err) { + context.error = err; + error.publish(context); + asyncStart.publish(context); + // TODO: Is there a way to have asyncEnd _after_ the continuation? + asyncEnd.publish(context); + return PromiseReject(err); + } + + function resolve(result) { + context.result = result; + asyncStart.publish(context); + // TODO: Is there a way to have asyncEnd _after_ the continuation? + asyncEnd.publish(context); + return result; + } + + return start.runStores(context, () => { + try { + let promise = ReflectApply(fn, thisArg, args); + // Convert thenables to native promises + if (!(promise instanceof Promise)) { + promise = PromiseResolve(promise); + } + return PromisePrototypeThen(promise, resolve, reject); + } catch (err) { + context.error = err; + error.publish(context); + throw err; + } finally { + end.publish(context); + } + }); + } + + traceCallback(fn, position = -1, context = {}, thisArg, ...args) { + if (!this.hasSubscribers) { + return ReflectApply(fn, thisArg, args); + } + + const { start, end, asyncStart, asyncEnd, error } = this; + + function wrappedCallback(err, res) { + if (err) { + context.error = err; + error.publish(context); + } else { + context.result = res; + } + + // Using runStores here enables manual context failure recovery + asyncStart.runStores(context, () => { + try { + return ReflectApply(callback, this, arguments); + } finally { + asyncEnd.publish(context); + } + }); + } + + const callback = ArrayPrototypeAt(args, position); + validateFunction(callback, "callback"); + ArrayPrototypeSplice(args, position, 1, wrappedCallback); + + return start.runStores(context, () => { + try { + return ReflectApply(fn, thisArg, args); + } catch (err) { + context.error = err; + error.publish(context); + throw err; + } finally { + end.publish(context); + } + }); + } +} + +function tracingChannel(nameOrChannels) { + return new TracingChannel(nameOrChannels); +} + +module.exports = { + channel, + hasSubscribers, + subscribe, + tracingChannel, + unsubscribe, + Channel, +}; diff --git a/.codesandbox/node/dns.js b/.codesandbox/node/dns.js new file mode 100644 index 0000000000..22d23dea58 --- /dev/null +++ b/.codesandbox/node/dns.js @@ -0,0 +1,345 @@ +"use strict"; + +const { ObjectDefineProperties, ObjectDefineProperty, Symbol } = primordials; + +const cares = internalBinding("cares_wrap"); +const { isIP } = require("internal/net"); +const { customPromisifyArgs } = require("internal/util"); +const { + DNSException, + codes: { ERR_INVALID_ARG_TYPE, ERR_INVALID_ARG_VALUE, ERR_MISSING_ARGS }, +} = require("internal/errors"); +const { + bindDefaultResolver, + setDefaultResolver, + validateHints, + getDefaultResultOrder, + setDefaultResultOrder, + errorCodes: dnsErrorCodes, + validDnsOrders, + validFamilies, +} = require("internal/dns/utils"); +const { Resolver } = require("internal/dns/callback_resolver"); +const { + NODATA, + FORMERR, + SERVFAIL, + NOTFOUND, + NOTIMP, + REFUSED, + BADQUERY, + BADNAME, + BADFAMILY, + BADRESP, + CONNREFUSED, + TIMEOUT, + EOF, + FILE, + NOMEM, + DESTRUCTION, + BADSTR, + BADFLAGS, + NONAME, + BADHINTS, + NOTINITIALIZED, + LOADIPHLPAPI, + ADDRGETNETWORKPARAMS, + CANCELLED, +} = dnsErrorCodes; +const { + validateBoolean, + validateFunction, + validateNumber, + validateOneOf, + validatePort, + validateString, +} = require("internal/validators"); + +const { + GetAddrInfoReqWrap, + GetNameInfoReqWrap, + DNS_ORDER_VERBATIM, + DNS_ORDER_IPV4_FIRST, + DNS_ORDER_IPV6_FIRST, +} = cares; + +const kPerfHooksDnsLookupContext = Symbol("kPerfHooksDnsLookupContext"); +const kPerfHooksDnsLookupServiceContext = Symbol( + "kPerfHooksDnsLookupServiceContext" +); + +const { hasObserver, startPerf, stopPerf } = require("internal/perf/observe"); + +let promises = null; // Lazy loaded + +function onlookup(err, addresses) { + if (err) { + return this.callback(new DNSException(err, "getaddrinfo", this.hostname)); + } + this.callback(null, addresses[0], this.family || isIP(addresses[0])); + if (this[kPerfHooksDnsLookupContext] && hasObserver("dns")) { + stopPerf(this, kPerfHooksDnsLookupContext, { detail: { addresses } }); + } +} + +function onlookupall(err, addresses) { + if (err) { + return this.callback(new DNSException(err, "getaddrinfo", this.hostname)); + } + + const family = this.family; + for (let i = 0; i < addresses.length; i++) { + const addr = addresses[i]; + addresses[i] = { + address: addr, + family: family || isIP(addr), + }; + } + + this.callback(null, addresses); + if (this[kPerfHooksDnsLookupContext] && hasObserver("dns")) { + stopPerf(this, kPerfHooksDnsLookupContext, { detail: { addresses } }); + } +} + +// Easy DNS A/AAAA look up +// lookup(hostname, [options,] callback) +function lookup(hostname, options, callback) { + let hints = 0; + let family = 0; + let all = false; + let dnsOrder = getDefaultResultOrder(); + + // Parse arguments + if (hostname) { + validateString(hostname, "hostname"); + } + + if (typeof options === "function") { + callback = options; + family = 0; + } else if (typeof options === "number") { + validateFunction(callback, "callback"); + + validateOneOf(options, "family", validFamilies); + family = options; + } else if (options !== undefined && typeof options !== "object") { + validateFunction(arguments.length === 2 ? options : callback, "callback"); + throw new ERR_INVALID_ARG_TYPE("options", ["integer", "object"], options); + } else { + validateFunction(callback, "callback"); + + if (options?.hints != null) { + validateNumber(options.hints, "options.hints"); + hints = options.hints >>> 0; + validateHints(hints); + } + if (options?.family != null) { + switch (options.family) { + case "IPv4": + family = 4; + break; + case "IPv6": + family = 6; + break; + default: + validateOneOf(options.family, "options.family", validFamilies); + family = options.family; + break; + } + } + if (options?.all != null) { + validateBoolean(options.all, "options.all"); + all = options.all; + } + if (options?.verbatim != null) { + validateBoolean(options.verbatim, "options.verbatim"); + dnsOrder = options.verbatim ? "verbatim" : "ipv4first"; + } + if (options?.order != null) { + validateOneOf(options.order, "options.order", validDnsOrders); + dnsOrder = options.order; + } + } + + if (!hostname) { + throw new ERR_INVALID_ARG_VALUE( + "hostname", + hostname, + "must be a non-empty string" + ); + } + + const matchedFamily = isIP(hostname); + if (matchedFamily) { + if (all) { + process.nextTick(callback, null, [ + { address: hostname, family: matchedFamily }, + ]); + } else { + process.nextTick(callback, null, hostname, matchedFamily); + } + return {}; + } + + const req = new GetAddrInfoReqWrap(); + req.callback = callback; + req.family = family; + req.hostname = hostname; + req.oncomplete = all ? onlookupall : onlookup; + + let order = DNS_ORDER_VERBATIM; + + if (dnsOrder === "ipv4first") { + order = DNS_ORDER_IPV4_FIRST; + } else if (dnsOrder === "ipv6first") { + order = DNS_ORDER_IPV6_FIRST; + } + + const err = cares.getaddrinfo(req, hostname, family, hints, order); + if (err) { + process.nextTick(callback, new DNSException(err, "getaddrinfo", hostname)); + return {}; + } + if (hasObserver("dns")) { + const detail = { + hostname, + family, + hints, + verbatim: order === DNS_ORDER_VERBATIM, + order: dnsOrder, + }; + + startPerf(req, kPerfHooksDnsLookupContext, { + type: "dns", + name: "lookup", + detail, + }); + } + return req; +} + +ObjectDefineProperty(lookup, customPromisifyArgs, { + __proto__: null, + value: ["address", "family"], + enumerable: false, +}); + +function onlookupservice(err, hostname, service) { + if (err) + return this.callback(new DNSException(err, "getnameinfo", this.hostname)); + + this.callback(null, hostname, service); + if (this[kPerfHooksDnsLookupServiceContext] && hasObserver("dns")) { + stopPerf(this, kPerfHooksDnsLookupServiceContext, { + detail: { hostname, service }, + }); + } +} + +function lookupService(address, port, callback) { + if (arguments.length !== 3) + throw new ERR_MISSING_ARGS("address", "port", "callback"); + + if (isIP(address) === 0) throw new ERR_INVALID_ARG_VALUE("address", address); + + validatePort(port); + + validateFunction(callback, "callback"); + + port = +port; + + const req = new GetNameInfoReqWrap(); + req.callback = callback; + req.hostname = address; + req.port = port; + req.oncomplete = onlookupservice; + + const err = cares.getnameinfo(req, address, port); + if (err) throw new DNSException(err, "getnameinfo", address); + if (hasObserver("dns")) { + startPerf(req, kPerfHooksDnsLookupServiceContext, { + type: "dns", + name: "lookupService", + detail: { + host: address, + port, + }, + }); + } + return req; +} + +ObjectDefineProperty(lookupService, customPromisifyArgs, { + __proto__: null, + value: ["hostname", "service"], + enumerable: false, +}); + +function defaultResolverSetServers(servers) { + const resolver = new Resolver(); + + resolver.setServers(servers); + setDefaultResolver(resolver); + bindDefaultResolver(module.exports, Resolver.prototype); + + if (promises !== null) + bindDefaultResolver(promises, promises.Resolver.prototype); +} + +module.exports = { + lookup, + lookupService, + + Resolver, + getDefaultResultOrder, + setDefaultResultOrder, + setServers: defaultResolverSetServers, + + // uv_getaddrinfo flags + ADDRCONFIG: cares.AI_ADDRCONFIG, + ALL: cares.AI_ALL, + V4MAPPED: cares.AI_V4MAPPED, + + // ERROR CODES + NODATA, + FORMERR, + SERVFAIL, + NOTFOUND, + NOTIMP, + REFUSED, + BADQUERY, + BADNAME, + BADFAMILY, + BADRESP, + CONNREFUSED, + TIMEOUT, + EOF, + FILE, + NOMEM, + DESTRUCTION, + BADSTR, + BADFLAGS, + NONAME, + BADHINTS, + NOTINITIALIZED, + LOADIPHLPAPI, + ADDRGETNETWORKPARAMS, + CANCELLED, +}; + +bindDefaultResolver(module.exports, Resolver.prototype); + +ObjectDefineProperties(module.exports, { + promises: { + __proto__: null, + configurable: true, + enumerable: true, + get() { + if (promises === null) { + promises = require("internal/dns/promises"); + } + return promises; + }, + }, +}); diff --git a/.codesandbox/node/domain.js b/.codesandbox/node/domain.js new file mode 100644 index 0000000000..29aefdb9ac --- /dev/null +++ b/.codesandbox/node/domain.js @@ -0,0 +1,529 @@ +"use strict"; + +// WARNING: THIS MODULE IS PENDING DEPRECATION. +// +// No new pull requests targeting this module will be accepted +// unless they address existing, critical bugs. + +const { + ArrayPrototypeEvery, + ArrayPrototypeIndexOf, + ArrayPrototypeLastIndexOf, + ArrayPrototypePush, + ArrayPrototypeSlice, + ArrayPrototypeSplice, + Error, + FunctionPrototypeCall, + ObjectDefineProperty, + Promise, + ReflectApply, + SafeMap, + SafeWeakMap, + StringPrototypeRepeat, + Symbol, +} = primordials; + +const EventEmitter = require("events"); +const { + ERR_DOMAIN_CALLBACK_NOT_AVAILABLE, + ERR_DOMAIN_CANNOT_SET_UNCAUGHT_EXCEPTION_CAPTURE, + ERR_UNHANDLED_ERROR, +} = require("internal/errors").codes; +const { createHook } = require("async_hooks"); +const { useDomainTrampoline } = require("internal/async_hooks"); + +const kWeak = Symbol("kWeak"); +const { WeakReference } = require("internal/util"); + +// Overwrite process.domain with a getter/setter that will allow for more +// effective optimizations +const _domain = [null]; +ObjectDefineProperty(process, "domain", { + __proto__: null, + enumerable: true, + get: function () { + return _domain[0]; + }, + set: function (arg) { + return (_domain[0] = arg); + }, +}); + +const vmPromises = new SafeWeakMap(); +const pairing = new SafeMap(); +const asyncHook = createHook({ + init(asyncId, type, triggerAsyncId, resource) { + if (process.domain !== null && process.domain !== undefined) { + // If this operation is created while in a domain, let's mark it + pairing.set(asyncId, process.domain[kWeak]); + // Promises from other contexts, such as with the VM module, should not + // have a domain property as it can be used to escape the sandbox. + if (type !== "PROMISE" || resource instanceof Promise) { + ObjectDefineProperty(resource, "domain", { + __proto__: null, + configurable: true, + enumerable: false, + value: process.domain, + writable: true, + }); + // Because promises from other contexts don't get a domain field, + // the domain needs to be held alive another way. Stuffing it in a + // weakmap connected to the promise lifetime can fix that. + } else { + vmPromises.set(resource, process.domain); + } + } + }, + before(asyncId) { + const current = pairing.get(asyncId); + if (current !== undefined) { + // Enter domain for this cb + // We will get the domain through current.get(), because the resource + // object's .domain property makes sure it is not garbage collected. + // However, we do need to make the reference to the domain non-weak, + // so that it cannot be garbage collected before the after() hook. + current.incRef(); + current.get().enter(); + } + }, + after(asyncId) { + const current = pairing.get(asyncId); + if (current !== undefined) { + // Exit domain for this cb + const domain = current.get(); + current.decRef(); + domain.exit(); + } + }, + destroy(asyncId) { + pairing.delete(asyncId); // cleaning up + }, +}); + +// When domains are in use, they claim full ownership of the +// uncaught exception capture callback. +if (process.hasUncaughtExceptionCaptureCallback()) { + throw new ERR_DOMAIN_CALLBACK_NOT_AVAILABLE(); +} + +// Get the stack trace at the point where `domain` was required. +// eslint-disable-next-line no-restricted-syntax +const domainRequireStack = new Error("require(`domain`) at this point").stack; + +const { setUncaughtExceptionCaptureCallback } = process; +process.setUncaughtExceptionCaptureCallback = function (fn) { + const err = new ERR_DOMAIN_CANNOT_SET_UNCAUGHT_EXCEPTION_CAPTURE(); + err.stack += `\n${StringPrototypeRepeat("-", 40)}\n${domainRequireStack}`; + throw err; +}; + +let sendMakeCallbackDeprecation = false; +function emitMakeCallbackDeprecation({ target, method }) { + if (!sendMakeCallbackDeprecation) { + process.emitWarning( + "Using a domain property in MakeCallback is deprecated. Use the " + + "async_context variant of MakeCallback or the AsyncResource class " + + "instead. " + + `(Triggered by calling ${method?.name || ""} ` + + `on ${target?.constructor?.name}.)`, + "DeprecationWarning", + "DEP0097" + ); + sendMakeCallbackDeprecation = true; + } +} + +function topLevelDomainCallback(cb, ...args) { + const domain = this.domain; + if (exports.active && domain) + emitMakeCallbackDeprecation({ target: this, method: cb }); + + if (domain) domain.enter(); + const ret = ReflectApply(cb, this, args); + if (domain) domain.exit(); + + return ret; +} + +// It's possible to enter one domain while already inside +// another one. The stack is each entered domain. +let stack = []; +exports._stack = stack; +useDomainTrampoline(topLevelDomainCallback); + +function updateExceptionCapture() { + if ( + ArrayPrototypeEvery(stack, (domain) => domain.listenerCount("error") === 0) + ) { + setUncaughtExceptionCaptureCallback(null); + } else { + setUncaughtExceptionCaptureCallback(null); + setUncaughtExceptionCaptureCallback((er) => { + return process.domain._errorHandler(er); + }); + } +} + +process.on("newListener", (name, listener) => { + if ( + name === "uncaughtException" && + listener !== domainUncaughtExceptionClear + ) { + // Make sure the first listener for `uncaughtException` always clears + // the domain stack. + process.removeListener(name, domainUncaughtExceptionClear); + process.prependListener(name, domainUncaughtExceptionClear); + } +}); + +process.on("removeListener", (name, listener) => { + if ( + name === "uncaughtException" && + listener !== domainUncaughtExceptionClear + ) { + // If the domain listener would be the only remaining one, remove it. + const listeners = process.listeners("uncaughtException"); + if (listeners.length === 1 && listeners[0] === domainUncaughtExceptionClear) + process.removeListener(name, domainUncaughtExceptionClear); + } +}); + +function domainUncaughtExceptionClear() { + stack.length = 0; + exports.active = process.domain = null; + updateExceptionCapture(); +} + +class Domain extends EventEmitter { + constructor() { + super(); + + this.members = []; + this[kWeak] = new WeakReference(this); + asyncHook.enable(); + + this.on("removeListener", updateExceptionCapture); + this.on("newListener", updateExceptionCapture); + } +} + +exports.Domain = Domain; + +exports.create = exports.createDomain = function createDomain() { + return new Domain(); +}; + +// The active domain is always the one that we're currently in. +exports.active = null; +Domain.prototype.members = undefined; + +// Called by process._fatalException in case an error was thrown. +Domain.prototype._errorHandler = function (er) { + let caught = false; + + if ((typeof er === "object" && er !== null) || typeof er === "function") { + ObjectDefineProperty(er, "domain", { + __proto__: null, + configurable: true, + enumerable: false, + value: this, + writable: true, + }); + er.domainThrown = true; + } + // Pop all adjacent duplicates of the currently active domain from the stack. + // This is done to prevent a domain's error handler to run within the context + // of itself, and re-entering itself recursively handler as a result of an + // exception thrown in its context. + while (exports.active === this) { + this.exit(); + } + + // The top-level domain-handler is handled separately. + // + // The reason is that if V8 was passed a command line option + // asking it to abort on an uncaught exception (currently + // "--abort-on-uncaught-exception"), we want an uncaught exception + // in the top-level domain error handler to make the + // process abort. Using try/catch here would always make V8 think + // that these exceptions are caught, and thus would prevent it from + // aborting in these cases. + if (stack.length === 0) { + // If there's no error handler, do not emit an 'error' event + // as this would throw an error, make the process exit, and thus + // prevent the process 'uncaughtException' event from being emitted + // if a listener is set. + if (this.listenerCount("error") > 0) { + // Clear the uncaughtExceptionCaptureCallback so that we know that, since + // the top-level domain is not active anymore, it would be ok to abort on + // an uncaught exception at this point + setUncaughtExceptionCaptureCallback(null); + try { + caught = this.emit("error", er); + } finally { + updateExceptionCapture(); + } + } + } else { + // Wrap this in a try/catch so we don't get infinite throwing + try { + // One of three things will happen here. + // + // 1. There is a handler, caught = true + // 2. There is no handler, caught = false + // 3. It throws, caught = false + // + // If caught is false after this, then there's no need to exit() + // the domain, because we're going to crash the process anyway. + caught = this.emit("error", er); + } catch (er2) { + // The domain error handler threw! oh no! + // See if another domain can catch THIS error, + // or else crash on the original one. + updateExceptionCapture(); + if (stack.length) { + exports.active = process.domain = stack[stack.length - 1]; + caught = process.domain._errorHandler(er2); + } else { + // Pass on to the next exception handler. + throw er2; + } + } + } + + // Exit all domains on the stack. Uncaught exceptions end the + // current tick and no domains should be left on the stack + // between ticks. + domainUncaughtExceptionClear(); + + return caught; +}; + +Domain.prototype.enter = function () { + // Note that this might be a no-op, but we still need + // to push it onto the stack so that we can pop it later. + exports.active = process.domain = this; + ArrayPrototypePush(stack, this); + updateExceptionCapture(); +}; + +Domain.prototype.exit = function () { + // Don't do anything if this domain is not on the stack. + const index = ArrayPrototypeLastIndexOf(stack, this); + if (index === -1) return; + + // Exit all domains until this one. + ArrayPrototypeSplice(stack, index); + + exports.active = stack.length === 0 ? undefined : stack[stack.length - 1]; + process.domain = exports.active; + updateExceptionCapture(); +}; + +// note: this works for timers as well. +Domain.prototype.add = function (ee) { + // If the domain is already added, then nothing left to do. + if (ee.domain === this) return; + + // Has a domain already - remove it first. + if (ee.domain) ee.domain.remove(ee); + + // Check for circular Domain->Domain links. + // They cause big issues. + // + // For example: + // var d = domain.create(); + // var e = domain.create(); + // d.add(e); + // e.add(d); + // e.emit('error', er); // RangeError, stack overflow! + if (this.domain && ee instanceof Domain) { + for (let d = this.domain; d; d = d.domain) { + if (ee === d) return; + } + } + + ObjectDefineProperty(ee, "domain", { + __proto__: null, + configurable: true, + enumerable: false, + value: this, + writable: true, + }); + ArrayPrototypePush(this.members, ee); +}; + +Domain.prototype.remove = function (ee) { + ee.domain = null; + const index = ArrayPrototypeIndexOf(this.members, ee); + if (index !== -1) ArrayPrototypeSplice(this.members, index, 1); +}; + +Domain.prototype.run = function (fn) { + this.enter(); + const ret = ReflectApply(fn, this, ArrayPrototypeSlice(arguments, 1)); + this.exit(); + + return ret; +}; + +function intercepted(_this, self, cb, fnargs) { + if (fnargs[0] && fnargs[0] instanceof Error) { + const er = fnargs[0]; + er.domainBound = cb; + er.domainThrown = false; + ObjectDefineProperty(er, "domain", { + __proto__: null, + configurable: true, + enumerable: false, + value: self, + writable: true, + }); + self.emit("error", er); + return; + } + + self.enter(); + const ret = ReflectApply(cb, _this, ArrayPrototypeSlice(fnargs, 1)); + self.exit(); + + return ret; +} + +Domain.prototype.intercept = function (cb) { + const self = this; + + function runIntercepted() { + return intercepted(this, self, cb, arguments); + } + + return runIntercepted; +}; + +function bound(_this, self, cb, fnargs) { + self.enter(); + const ret = ReflectApply(cb, _this, fnargs); + self.exit(); + + return ret; +} + +Domain.prototype.bind = function (cb) { + const self = this; + + function runBound() { + return bound(this, self, cb, arguments); + } + + ObjectDefineProperty(runBound, "domain", { + __proto__: null, + configurable: true, + enumerable: false, + value: this, + writable: true, + }); + + return runBound; +}; + +// Override EventEmitter methods to make it domain-aware. +EventEmitter.usingDomains = true; + +const eventInit = EventEmitter.init; +EventEmitter.init = function (opts) { + ObjectDefineProperty(this, "domain", { + __proto__: null, + configurable: true, + enumerable: false, + value: null, + writable: true, + }); + if (exports.active && !(this instanceof exports.Domain)) { + this.domain = exports.active; + } + + return FunctionPrototypeCall(eventInit, this, opts); +}; + +const eventEmit = EventEmitter.prototype.emit; +EventEmitter.prototype.emit = function emit(...args) { + const domain = this.domain; + + const type = args[0]; + const shouldEmitError = type === "error" && this.listenerCount(type) > 0; + + // Just call original `emit` if current EE instance has `error` + // handler, there's no active domain or this is process + if ( + shouldEmitError || + domain === null || + domain === undefined || + this === process + ) { + return ReflectApply(eventEmit, this, args); + } + + if (type === "error") { + const er = args.length > 1 && args[1] ? args[1] : new ERR_UNHANDLED_ERROR(); + + if (typeof er === "object") { + er.domainEmitter = this; + ObjectDefineProperty(er, "domain", { + __proto__: null, + configurable: true, + enumerable: false, + value: domain, + writable: true, + }); + er.domainThrown = false; + } + + // Remove the current domain (and its duplicates) from the domains stack and + // set the active domain to its parent (if any) so that the domain's error + // handler doesn't run in its own context. This prevents any event emitter + // created or any exception thrown in that error handler from recursively + // executing that error handler. + const origDomainsStack = ArrayPrototypeSlice(stack); + const origActiveDomain = process.domain; + + // Travel the domains stack from top to bottom to find the first domain + // instance that is not a duplicate of the current active domain. + let idx = stack.length - 1; + while (idx > -1 && process.domain === stack[idx]) { + --idx; + } + + // Change the stack to not contain the current active domain, and only the + // domains above it on the stack. + if (idx < 0) { + stack.length = 0; + } else { + ArrayPrototypeSplice(stack, idx + 1); + } + + // Change the current active domain + if (stack.length > 0) { + exports.active = process.domain = stack[stack.length - 1]; + } else { + exports.active = process.domain = null; + } + + updateExceptionCapture(); + + domain.emit("error", er); + + // Now that the domain's error handler has completed, restore the domains + // stack and the active domain to their original values. + exports._stack = stack = origDomainsStack; + exports.active = process.domain = origActiveDomain; + updateExceptionCapture(); + + return false; + } + + domain.enter(); + const ret = ReflectApply(eventEmit, this, args); + domain.exit(); + + return ret; +}; diff --git a/.codesandbox/node/events.js b/.codesandbox/node/events.js new file mode 100644 index 0000000000..fdb1605ea9 --- /dev/null +++ b/.codesandbox/node/events.js @@ -0,0 +1,1244 @@ +"use strict"; + +const { + ArrayPrototypeJoin, + ArrayPrototypePop, + ArrayPrototypePush, + ArrayPrototypeSlice, + ArrayPrototypeSplice, + ArrayPrototypeUnshift, + AsyncIteratorPrototype, + Boolean, + Error, + ErrorCaptureStackTrace, + FunctionPrototypeBind, + NumberMAX_SAFE_INTEGER, + ObjectDefineProperties, + ObjectDefineProperty, + ObjectGetPrototypeOf, + ObjectSetPrototypeOf, + Promise, + PromiseReject, + PromiseResolve, + ReflectApply, + ReflectOwnKeys, + String, + StringPrototypeSplit, + Symbol, + SymbolAsyncIterator, + SymbolDispose, + SymbolFor, +} = primordials; +const kRejection = SymbolFor("nodejs.rejection"); + +const { kEmptyObject, spliceOne } = require("internal/util"); + +const { inspect, identicalSequenceRange } = require("internal/util/inspect"); + +let FixedQueue; +let kFirstEventParam; +let kResistStopPropagation; + +const { + AbortError, + codes: { ERR_INVALID_ARG_TYPE, ERR_UNHANDLED_ERROR }, + genericNodeError, + kEnhanceStackBeforeInspector, +} = require("internal/errors"); + +const { + validateInteger, + validateAbortSignal, + validateBoolean, + validateFunction, + validateNumber, + validateObject, + validateString, +} = require("internal/validators"); +const { addAbortListener } = require("internal/events/abort_listener"); + +const kCapture = Symbol("kCapture"); +const kErrorMonitor = Symbol("events.errorMonitor"); +const kShapeMode = Symbol("shapeMode"); +const kMaxEventTargetListeners = Symbol("events.maxEventTargetListeners"); +const kMaxEventTargetListenersWarned = Symbol( + "events.maxEventTargetListenersWarned" +); +const kWatermarkData = SymbolFor("nodejs.watermarkData"); + +let EventEmitterAsyncResource; +// The EventEmitterAsyncResource has to be initialized lazily because event.js +// is loaded so early in the bootstrap process, before async_hooks is available. +// +// This implementation was adapted straight from addaleax's +// eventemitter-asyncresource MIT-licensed userland module. +// https://github.com/addaleax/eventemitter-asyncresource +function lazyEventEmitterAsyncResource() { + if (EventEmitterAsyncResource === undefined) { + const { AsyncResource } = require("async_hooks"); + + class EventEmitterReferencingAsyncResource extends AsyncResource { + #eventEmitter; + + /** + * @param {EventEmitter} ee + * @param {string} [type] + * @param {{ + * triggerAsyncId?: number, + * requireManualDestroy?: boolean, + * }} [options] + */ + constructor(ee, type, options) { + super(type, options); + this.#eventEmitter = ee; + } + + /** + * @type {EventEmitter} + */ + get eventEmitter() { + return this.#eventEmitter; + } + } + + EventEmitterAsyncResource = class EventEmitterAsyncResource extends ( + EventEmitter + ) { + #asyncResource; + + /** + * @param {{ + * name?: string, + * triggerAsyncId?: number, + * requireManualDestroy?: boolean, + * }} [options] + */ + constructor(options = undefined) { + let name; + if (typeof options === "string") { + name = options; + options = undefined; + } else { + if (new.target === EventEmitterAsyncResource) { + validateString(options?.name, "options.name"); + } + name = options?.name || new.target.name; + } + super(options); + + this.#asyncResource = new EventEmitterReferencingAsyncResource( + this, + name, + options + ); + } + + /** + * @param {symbol|string} event + * @param {any[]} args + * @returns {boolean} + */ + emit(event, ...args) { + const asyncResource = this.#asyncResource; + ArrayPrototypeUnshift(args, super.emit, this, event); + return ReflectApply(asyncResource.runInAsyncScope, asyncResource, args); + } + + /** + * @returns {void} + */ + emitDestroy() { + this.#asyncResource.emitDestroy(); + } + + /** + * @type {number} + */ + get asyncId() { + return this.#asyncResource.asyncId(); + } + + /** + * @type {number} + */ + get triggerAsyncId() { + return this.#asyncResource.triggerAsyncId(); + } + + /** + * @type {EventEmitterReferencingAsyncResource} + */ + get asyncResource() { + return this.#asyncResource; + } + }; + } + return EventEmitterAsyncResource; +} + +/** + * Creates a new `EventEmitter` instance. + * @param {{ captureRejections?: boolean; }} [opts] + * @constructs EventEmitter + */ +function EventEmitter(opts) { + EventEmitter.init.call(this, opts); +} +module.exports = EventEmitter; +module.exports.addAbortListener = addAbortListener; +module.exports.once = once; +module.exports.on = on; +module.exports.getEventListeners = getEventListeners; +module.exports.getMaxListeners = getMaxListeners; +module.exports.listenerCount = listenerCount; +// Backwards-compat with node 0.10.x +EventEmitter.EventEmitter = EventEmitter; + +EventEmitter.usingDomains = false; + +EventEmitter.captureRejectionSymbol = kRejection; +ObjectDefineProperty(EventEmitter, "captureRejections", { + __proto__: null, + get() { + return EventEmitter.prototype[kCapture]; + }, + set(value) { + validateBoolean(value, "EventEmitter.captureRejections"); + + EventEmitter.prototype[kCapture] = value; + }, + enumerable: true, +}); + +ObjectDefineProperty(EventEmitter, "EventEmitterAsyncResource", { + __proto__: null, + enumerable: true, + get: lazyEventEmitterAsyncResource, + set: undefined, + configurable: true, +}); + +EventEmitter.errorMonitor = kErrorMonitor; + +// The default for captureRejections is false +ObjectDefineProperty(EventEmitter.prototype, kCapture, { + __proto__: null, + value: false, + writable: true, + enumerable: false, +}); + +EventEmitter.prototype._events = undefined; +EventEmitter.prototype._eventsCount = 0; +EventEmitter.prototype._maxListeners = undefined; + +// By default EventEmitters will print a warning if more than 10 listeners are +// added to it. This is a useful default which helps finding memory leaks. +let defaultMaxListeners = 10; +let isEventTarget; + +function checkListener(listener) { + validateFunction(listener, "listener"); +} + +ObjectDefineProperty(EventEmitter, "defaultMaxListeners", { + __proto__: null, + enumerable: true, + get: function () { + return defaultMaxListeners; + }, + set: function (arg) { + validateNumber(arg, "defaultMaxListeners", 0); + defaultMaxListeners = arg; + }, +}); + +ObjectDefineProperties(EventEmitter, { + kMaxEventTargetListeners: { + __proto__: null, + value: kMaxEventTargetListeners, + enumerable: false, + configurable: false, + writable: false, + }, + kMaxEventTargetListenersWarned: { + __proto__: null, + value: kMaxEventTargetListenersWarned, + enumerable: false, + configurable: false, + writable: false, + }, +}); + +/** + * Sets the max listeners. + * @param {number} n + * @param {EventTarget[] | EventEmitter[]} [eventTargets] + * @returns {void} + */ +EventEmitter.setMaxListeners = function ( + n = defaultMaxListeners, + ...eventTargets +) { + validateNumber(n, "setMaxListeners", 0); + if (eventTargets.length === 0) { + defaultMaxListeners = n; + } else { + if (isEventTarget === undefined) + isEventTarget = require("internal/event_target").isEventTarget; + + for (let i = 0; i < eventTargets.length; i++) { + const target = eventTargets[i]; + if (isEventTarget(target)) { + target[kMaxEventTargetListeners] = n; + target[kMaxEventTargetListenersWarned] = false; + } else if (typeof target.setMaxListeners === "function") { + target.setMaxListeners(n); + } else { + throw new ERR_INVALID_ARG_TYPE( + "eventTargets", + ["EventEmitter", "EventTarget"], + target + ); + } + } + } +}; + +// If you're updating this function definition, please also update any +// re-definitions, such as the one in the Domain module (lib/domain.js). +EventEmitter.init = function (opts) { + if ( + this._events === undefined || + this._events === ObjectGetPrototypeOf(this)._events + ) { + this._events = { __proto__: null }; + this._eventsCount = 0; + this[kShapeMode] = false; + } else { + this[kShapeMode] = true; + } + + this._maxListeners ||= undefined; + + if (opts?.captureRejections) { + validateBoolean(opts.captureRejections, "options.captureRejections"); + this[kCapture] = Boolean(opts.captureRejections); + } else { + // Assigning the kCapture property directly saves an expensive + // prototype lookup in a very sensitive hot path. + this[kCapture] = EventEmitter.prototype[kCapture]; + } +}; + +function addCatch(that, promise, type, args) { + if (!that[kCapture]) { + return; + } + + // Handle Promises/A+ spec, then could be a getter + // that throws on second use. + try { + const then = promise.then; + + if (typeof then === "function") { + then.call(promise, undefined, function (err) { + // The callback is called with nextTick to avoid a follow-up + // rejection from this promise. + process.nextTick(emitUnhandledRejectionOrErr, that, err, type, args); + }); + } + } catch (err) { + that.emit("error", err); + } +} + +function emitUnhandledRejectionOrErr(ee, err, type, args) { + if (typeof ee[kRejection] === "function") { + ee[kRejection](err, type, ...args); + } else { + // We have to disable the capture rejections mechanism, otherwise + // we might end up in an infinite loop. + const prev = ee[kCapture]; + + // If the error handler throws, it is not catchable and it + // will end up in 'uncaughtException'. We restore the previous + // value of kCapture in case the uncaughtException is present + // and the exception is handled. + try { + ee[kCapture] = false; + ee.emit("error", err); + } finally { + ee[kCapture] = prev; + } + } +} + +/** + * Increases the max listeners of the event emitter. + * @param {number} n + * @returns {EventEmitter} + */ +EventEmitter.prototype.setMaxListeners = function setMaxListeners(n) { + validateNumber(n, "setMaxListeners", 0); + this._maxListeners = n; + return this; +}; + +function _getMaxListeners(that) { + if (that._maxListeners === undefined) return EventEmitter.defaultMaxListeners; + return that._maxListeners; +} + +/** + * Returns the current max listener value for the event emitter. + * @returns {number} + */ +EventEmitter.prototype.getMaxListeners = function getMaxListeners() { + return _getMaxListeners(this); +}; + +function enhanceStackTrace(err, own) { + let ctorInfo = ""; + try { + const { name } = this.constructor; + if (name !== "EventEmitter") ctorInfo = ` on ${name} instance`; + } catch { + // Continue regardless of error. + } + const sep = `\nEmitted 'error' event${ctorInfo} at:\n`; + + const errStack = ArrayPrototypeSlice( + StringPrototypeSplit(err.stack, "\n"), + 1 + ); + const ownStack = ArrayPrototypeSlice( + StringPrototypeSplit(own.stack, "\n"), + 1 + ); + + const { len, offset } = identicalSequenceRange(ownStack, errStack); + if (len > 0) { + ArrayPrototypeSplice( + ownStack, + offset + 1, + len - 2, + " [... lines matching original stack trace ...]" + ); + } + + return err.stack + sep + ArrayPrototypeJoin(ownStack, "\n"); +} + +/** + * Synchronously calls each of the listeners registered + * for the event. + * @param {string | symbol} type + * @param {...any} [args] + * @returns {boolean} + */ +EventEmitter.prototype.emit = function emit(type, ...args) { + let doError = type === "error"; + + const events = this._events; + if (events !== undefined) { + if (doError && events[kErrorMonitor] !== undefined) + this.emit(kErrorMonitor, ...args); + doError &&= events.error === undefined; + } else if (!doError) return false; + + // If there is no 'error' event listener then throw. + if (doError) { + let er; + if (args.length > 0) er = args[0]; + if (er instanceof Error) { + try { + const capture = {}; + ErrorCaptureStackTrace(capture, EventEmitter.prototype.emit); + ObjectDefineProperty(er, kEnhanceStackBeforeInspector, { + __proto__: null, + value: FunctionPrototypeBind(enhanceStackTrace, this, er, capture), + configurable: true, + }); + } catch { + // Continue regardless of error. + } + + // Note: The comments on the `throw` lines are intentional, they show + // up in Node's output if this results in an unhandled exception. + throw er; // Unhandled 'error' event + } + + let stringifiedEr; + try { + stringifiedEr = inspect(er); + } catch { + stringifiedEr = er; + } + + // At least give some kind of context to the user + const err = new ERR_UNHANDLED_ERROR(stringifiedEr); + err.context = er; + throw err; // Unhandled 'error' event + } + + const handler = events[type]; + + if (handler === undefined) return false; + + if (typeof handler === "function") { + const result = ReflectApply(handler, this, args); + + // We check if result is undefined first because that + // is the most common case so we do not pay any perf + // penalty + if (result !== undefined && result !== null) { + addCatch(this, result, type, args); + } + } else { + const len = handler.length; + const listeners = arrayClone(handler); + for (let i = 0; i < len; ++i) { + const result = ReflectApply(listeners[i], this, args); + + // We check if result is undefined first because that + // is the most common case so we do not pay any perf + // penalty. + // This code is duplicated because extracting it away + // would make it non-inlineable. + if (result !== undefined && result !== null) { + addCatch(this, result, type, args); + } + } + } + + return true; +}; + +function _addListener(target, type, listener, prepend) { + let m; + let events; + let existing; + + checkListener(listener); + + events = target._events; + if (events === undefined) { + events = target._events = { __proto__: null }; + target._eventsCount = 0; + } else { + // To avoid recursion in the case that type === "newListener"! Before + // adding it to the listeners, first emit "newListener". + if (events.newListener !== undefined) { + target.emit("newListener", type, listener.listener ?? listener); + + // Re-assign `events` because a newListener handler could have caused the + // this._events to be assigned to a new object + events = target._events; + } + existing = events[type]; + } + + if (existing === undefined) { + // Optimize the case of one listener. Don't need the extra array object. + events[type] = listener; + ++target._eventsCount; + } else { + if (typeof existing === "function") { + // Adding the second element, need to change to array. + existing = events[type] = prepend + ? [listener, existing] + : [existing, listener]; + // If we've already got an array, just append. + } else if (prepend) { + existing.unshift(listener); + } else { + existing.push(listener); + } + + // Check for listener leak + m = _getMaxListeners(target); + if (m > 0 && existing.length > m && !existing.warned) { + existing.warned = true; + // No error code for this since it is a Warning + const w = genericNodeError( + `Possible EventEmitter memory leak detected. ${ + existing.length + } ${String(type)} listeners ` + + `added to ${inspect(target, { + depth: -1, + })}. MaxListeners is ${m}. Use emitter.setMaxListeners() to increase limit`, + { + name: "MaxListenersExceededWarning", + emitter: target, + type: type, + count: existing.length, + } + ); + process.emitWarning(w); + } + } + + return target; +} + +/** + * Adds a listener to the event emitter. + * @param {string | symbol} type + * @param {Function} listener + * @returns {EventEmitter} + */ +EventEmitter.prototype.addListener = function addListener(type, listener) { + return _addListener(this, type, listener, false); +}; + +EventEmitter.prototype.on = EventEmitter.prototype.addListener; + +/** + * Adds the `listener` function to the beginning of + * the listeners array. + * @param {string | symbol} type + * @param {Function} listener + * @returns {EventEmitter} + */ +EventEmitter.prototype.prependListener = function prependListener( + type, + listener +) { + return _addListener(this, type, listener, true); +}; + +function onceWrapper() { + if (!this.fired) { + this.target.removeListener(this.type, this.wrapFn); + this.fired = true; + if (arguments.length === 0) return this.listener.call(this.target); + return ReflectApply(this.listener, this.target, arguments); + } +} + +function _onceWrap(target, type, listener) { + const state = { fired: false, wrapFn: undefined, target, type, listener }; + const wrapped = onceWrapper.bind(state); + wrapped.listener = listener; + state.wrapFn = wrapped; + return wrapped; +} + +/** + * Adds a one-time `listener` function to the event emitter. + * @param {string | symbol} type + * @param {Function} listener + * @returns {EventEmitter} + */ +EventEmitter.prototype.once = function once(type, listener) { + checkListener(listener); + + this.on(type, _onceWrap(this, type, listener)); + return this; +}; + +/** + * Adds a one-time `listener` function to the beginning of + * the listeners array. + * @param {string | symbol} type + * @param {Function} listener + * @returns {EventEmitter} + */ +EventEmitter.prototype.prependOnceListener = function prependOnceListener( + type, + listener +) { + checkListener(listener); + + this.prependListener(type, _onceWrap(this, type, listener)); + return this; +}; + +/** + * Removes the specified `listener` from the listeners array. + * @param {string | symbol} type + * @param {Function} listener + * @returns {EventEmitter} + */ +EventEmitter.prototype.removeListener = function removeListener( + type, + listener +) { + checkListener(listener); + + const events = this._events; + if (events === undefined) return this; + + const list = events[type]; + if (list === undefined) return this; + + if (list === listener || list.listener === listener) { + this._eventsCount -= 1; + + if (this[kShapeMode]) { + events[type] = undefined; + } else if (this._eventsCount === 0) { + this._events = { __proto__: null }; + } else { + delete events[type]; + if (events.removeListener) + this.emit("removeListener", type, list.listener || listener); + } + } else if (typeof list !== "function") { + let position = -1; + + for (let i = list.length - 1; i >= 0; i--) { + if (list[i] === listener || list[i].listener === listener) { + position = i; + break; + } + } + + if (position < 0) return this; + + if (position === 0) list.shift(); + else { + spliceOne(list, position); + } + + if (list.length === 1) events[type] = list[0]; + + if (events.removeListener !== undefined) + this.emit("removeListener", type, listener); + } + + return this; +}; + +EventEmitter.prototype.off = EventEmitter.prototype.removeListener; + +/** + * Removes all listeners from the event emitter. (Only + * removes listeners for a specific event name if specified + * as `type`). + * @param {string | symbol} [type] + * @returns {EventEmitter} + */ +EventEmitter.prototype.removeAllListeners = function removeAllListeners(type) { + const events = this._events; + if (events === undefined) return this; + + // Not listening for removeListener, no need to emit + if (events.removeListener === undefined) { + if (arguments.length === 0) { + this._events = { __proto__: null }; + this._eventsCount = 0; + } else if (events[type] !== undefined) { + if (--this._eventsCount === 0) this._events = { __proto__: null }; + else delete events[type]; + } + this[kShapeMode] = false; + return this; + } + + // Emit removeListener for all listeners on all events + if (arguments.length === 0) { + for (const key of ReflectOwnKeys(events)) { + if (key === "removeListener") continue; + this.removeAllListeners(key); + } + this.removeAllListeners("removeListener"); + this._events = { __proto__: null }; + this._eventsCount = 0; + this[kShapeMode] = false; + return this; + } + + const listeners = events[type]; + + if (typeof listeners === "function") { + this.removeListener(type, listeners); + } else if (listeners !== undefined) { + // LIFO order + for (let i = listeners.length - 1; i >= 0; i--) { + this.removeListener(type, listeners[i]); + } + } + + return this; +}; + +function _listeners(target, type, unwrap) { + const events = target._events; + + if (events === undefined) return []; + + const evlistener = events[type]; + if (evlistener === undefined) return []; + + if (typeof evlistener === "function") + return unwrap ? [evlistener.listener || evlistener] : [evlistener]; + + return unwrap ? unwrapListeners(evlistener) : arrayClone(evlistener); +} + +/** + * Returns a copy of the array of listeners for the event name + * specified as `type`. + * @param {string | symbol} type + * @returns {Function[]} + */ +EventEmitter.prototype.listeners = function listeners(type) { + return _listeners(this, type, true); +}; + +/** + * Returns a copy of the array of listeners and wrappers for + * the event name specified as `type`. + * @param {string | symbol} type + * @returns {Function[]} + */ +EventEmitter.prototype.rawListeners = function rawListeners(type) { + return _listeners(this, type, false); +}; + +/** + * Returns the number of listeners listening to event name + * specified as `type`. + * @param {string | symbol} type + * @param {Function} [listener] + * @returns {number} + */ +EventEmitter.prototype.listenerCount = function listenerCount(type, listener) { + const events = this._events; + + if (events !== undefined) { + const evlistener = events[type]; + + if (typeof evlistener === "function") { + if (listener != null) { + return listener === evlistener || listener === evlistener.listener + ? 1 + : 0; + } + + return 1; + } else if (evlistener !== undefined) { + if (listener != null) { + let matching = 0; + + for (let i = 0, l = evlistener.length; i < l; i++) { + if ( + evlistener[i] === listener || + evlistener[i].listener === listener + ) { + matching++; + } + } + + return matching; + } + + return evlistener.length; + } + } + + return 0; +}; + +/** + * Returns an array listing the events for which + * the emitter has registered listeners. + * @returns {(string | symbol)[]} + */ +EventEmitter.prototype.eventNames = function eventNames() { + return this._eventsCount > 0 ? ReflectOwnKeys(this._events) : []; +}; + +function arrayClone(arr) { + // At least since V8 8.3, this implementation is faster than the previous + // which always used a simple for-loop + switch (arr.length) { + case 2: + return [arr[0], arr[1]]; + case 3: + return [arr[0], arr[1], arr[2]]; + case 4: + return [arr[0], arr[1], arr[2], arr[3]]; + case 5: + return [arr[0], arr[1], arr[2], arr[3], arr[4]]; + case 6: + return [arr[0], arr[1], arr[2], arr[3], arr[4], arr[5]]; + } + return ArrayPrototypeSlice(arr); +} + +function unwrapListeners(arr) { + const ret = arrayClone(arr); + for (let i = 0; i < ret.length; ++i) { + const orig = ret[i].listener; + if (typeof orig === "function") ret[i] = orig; + } + return ret; +} + +/** + * Returns a copy of the array of listeners for the event name + * specified as `type`. + * @param {EventEmitter | EventTarget} emitterOrTarget + * @param {string | symbol} type + * @returns {Function[]} + */ +function getEventListeners(emitterOrTarget, type) { + // First check if EventEmitter + if (typeof emitterOrTarget.listeners === "function") { + return emitterOrTarget.listeners(type); + } + // Require event target lazily to avoid always loading it + const { isEventTarget, kEvents } = require("internal/event_target"); + if (isEventTarget(emitterOrTarget)) { + const root = emitterOrTarget[kEvents].get(type); + const listeners = []; + let handler = root?.next; + while (handler?.listener !== undefined) { + const listener = handler.listener?.deref + ? handler.listener.deref() + : handler.listener; + listeners.push(listener); + handler = handler.next; + } + return listeners; + } + throw new ERR_INVALID_ARG_TYPE( + "emitter", + ["EventEmitter", "EventTarget"], + emitterOrTarget + ); +} + +/** + * Returns the max listeners set. + * @param {EventEmitter | EventTarget} emitterOrTarget + * @returns {number} + */ +function getMaxListeners(emitterOrTarget) { + if (typeof emitterOrTarget?.getMaxListeners === "function") { + return _getMaxListeners(emitterOrTarget); + } else if (typeof emitterOrTarget?.[kMaxEventTargetListeners] === "number") { + return emitterOrTarget[kMaxEventTargetListeners]; + } + + throw new ERR_INVALID_ARG_TYPE( + "emitter", + ["EventEmitter", "EventTarget"], + emitterOrTarget + ); +} + +/** + * Returns the number of registered listeners for `type`. + * @param {EventEmitter | EventTarget} emitterOrTarget + * @param {string | symbol} type + * @returns {number} + */ +function listenerCount(emitterOrTarget, type) { + if (typeof emitterOrTarget.listenerCount === "function") { + return emitterOrTarget.listenerCount(type); + } + const { isEventTarget, kEvents } = require("internal/event_target"); + if (isEventTarget(emitterOrTarget)) { + return emitterOrTarget[kEvents].get(type)?.size ?? 0; + } + throw new ERR_INVALID_ARG_TYPE( + "emitter", + ["EventEmitter", "EventTarget"], + emitterOrTarget + ); +} + +/** + * Creates a `Promise` that is fulfilled when the emitter + * emits the given event. + * @param {EventEmitter} emitter + * @param {string | symbol} name + * @param {{ signal: AbortSignal; }} [options] + * @returns {Promise} + */ +async function once(emitter, name, options = kEmptyObject) { + validateObject(options, "options"); + const { signal } = options; + validateAbortSignal(signal, "options.signal"); + if (signal?.aborted) + throw new AbortError(undefined, { cause: signal.reason }); + return new Promise((resolve, reject) => { + const errorListener = (err) => { + emitter.removeListener(name, resolver); + if (signal != null) { + eventTargetAgnosticRemoveListener(signal, "abort", abortListener); + } + reject(err); + }; + const resolver = (...args) => { + if (typeof emitter.removeListener === "function") { + emitter.removeListener("error", errorListener); + } + if (signal != null) { + eventTargetAgnosticRemoveListener(signal, "abort", abortListener); + } + resolve(args); + }; + + kResistStopPropagation ??= + require("internal/event_target").kResistStopPropagation; + const opts = { + __proto__: null, + once: true, + [kResistStopPropagation]: true, + }; + eventTargetAgnosticAddListener(emitter, name, resolver, opts); + if (name !== "error" && typeof emitter.once === "function") { + // EventTarget does not have `error` event semantics like Node + // EventEmitters, we listen to `error` events only on EventEmitters. + emitter.once("error", errorListener); + } + function abortListener() { + eventTargetAgnosticRemoveListener(emitter, name, resolver); + eventTargetAgnosticRemoveListener(emitter, "error", errorListener); + reject(new AbortError(undefined, { cause: signal?.reason })); + } + if (signal != null) { + eventTargetAgnosticAddListener(signal, "abort", abortListener, { + __proto__: null, + once: true, + [kResistStopPropagation]: true, + }); + } + }); +} + +function createIterResult(value, done) { + return { value, done }; +} + +function eventTargetAgnosticRemoveListener(emitter, name, listener, flags) { + if (typeof emitter.removeListener === "function") { + emitter.removeListener(name, listener); + } else if (typeof emitter.removeEventListener === "function") { + emitter.removeEventListener(name, listener, flags); + } else { + throw new ERR_INVALID_ARG_TYPE("emitter", "EventEmitter", emitter); + } +} + +function eventTargetAgnosticAddListener(emitter, name, listener, flags) { + if (typeof emitter.on === "function") { + if (flags?.once) { + emitter.once(name, listener); + } else { + emitter.on(name, listener); + } + } else if (typeof emitter.addEventListener === "function") { + emitter.addEventListener(name, listener, flags); + } else { + throw new ERR_INVALID_ARG_TYPE("emitter", "EventEmitter", emitter); + } +} + +/** + * Returns an `AsyncIterator` that iterates `event` events. + * @param {EventEmitter} emitter + * @param {string | symbol} event + * @param {{ + * signal: AbortSignal; + * close?: string[]; + * highWaterMark?: number, + * lowWaterMark?: number + * }} [options] + * @returns {AsyncIterator} + */ +function on(emitter, event, options = kEmptyObject) { + // Parameters validation + validateObject(options, "options"); + const signal = options.signal; + validateAbortSignal(signal, "options.signal"); + if (signal?.aborted) + throw new AbortError(undefined, { cause: signal.reason }); + // Support both highWaterMark and highWatermark for backward compatibility + const highWatermark = + options.highWaterMark ?? options.highWatermark ?? NumberMAX_SAFE_INTEGER; + validateInteger(highWatermark, "options.highWaterMark", 1); + // Support both lowWaterMark and lowWatermark for backward compatibility + const lowWatermark = options.lowWaterMark ?? options.lowWatermark ?? 1; + validateInteger(lowWatermark, "options.lowWaterMark", 1); + + // Preparing controlling queues and variables + FixedQueue ??= require("internal/fixed_queue"); + const unconsumedEvents = new FixedQueue(); + const unconsumedPromises = new FixedQueue(); + let paused = false; + let error = null; + let finished = false; + let size = 0; + + const iterator = ObjectSetPrototypeOf( + { + next() { + // First, we consume all unread events + if (size) { + const value = unconsumedEvents.shift(); + size--; + if (paused && size < lowWatermark) { + emitter.resume(); + paused = false; + } + return PromiseResolve(createIterResult(value, false)); + } + + // Then we error, if an error happened + // This happens one time if at all, because after 'error' + // we stop listening + if (error) { + const p = PromiseReject(error); + // Only the first element errors + error = null; + return p; + } + + // If the iterator is finished, resolve to done + if (finished) return closeHandler(); + + // Wait until an event happens + return new Promise(function (resolve, reject) { + unconsumedPromises.push({ resolve, reject }); + }); + }, + + return() { + return closeHandler(); + }, + + throw(err) { + if (!err || !(err instanceof Error)) { + throw new ERR_INVALID_ARG_TYPE( + "EventEmitter.AsyncIterator", + "Error", + err + ); + } + errorHandler(err); + }, + [SymbolAsyncIterator]() { + return this; + }, + [kWatermarkData]: { + /** + * The current queue size + * @returns {number} + */ + get size() { + return size; + }, + /** + * The low watermark. The emitter is resumed every time size is lower than it + * @returns {number} + */ + get low() { + return lowWatermark; + }, + /** + * The high watermark. The emitter is paused every time size is higher than it + * @returns {number} + */ + get high() { + return highWatermark; + }, + /** + * It checks whether the emitter is paused by the watermark controller or not + * @returns {boolean} + */ + get isPaused() { + return paused; + }, + }, + }, + AsyncIteratorPrototype + ); + + // Adding event handlers + const { addEventListener, removeAll } = listenersController(); + kFirstEventParam ??= require("internal/events/symbols").kFirstEventParam; + addEventListener( + emitter, + event, + options[kFirstEventParam] + ? eventHandler + : function (...args) { + return eventHandler(args); + } + ); + if (event !== "error" && typeof emitter.on === "function") { + addEventListener(emitter, "error", errorHandler); + } + const closeEvents = options?.close; + if (closeEvents?.length) { + for (let i = 0; i < closeEvents.length; i++) { + addEventListener(emitter, closeEvents[i], closeHandler); + } + } + + const abortListenerDisposable = signal + ? addAbortListener(signal, abortListener) + : null; + + return iterator; + + function abortListener() { + errorHandler(new AbortError(undefined, { cause: signal?.reason })); + } + + function eventHandler(value) { + if (unconsumedPromises.isEmpty()) { + size++; + if (!paused && size > highWatermark) { + paused = true; + emitter.pause(); + } + unconsumedEvents.push(value); + } else unconsumedPromises.shift().resolve(createIterResult(value, false)); + } + + function errorHandler(err) { + if (unconsumedPromises.isEmpty()) error = err; + else unconsumedPromises.shift().reject(err); + + closeHandler(); + } + + function closeHandler() { + abortListenerDisposable?.[SymbolDispose](); + removeAll(); + finished = true; + const doneResult = createIterResult(undefined, true); + while (!unconsumedPromises.isEmpty()) { + unconsumedPromises.shift().resolve(doneResult); + } + + return PromiseResolve(doneResult); + } +} + +function listenersController() { + const listeners = []; + + return { + addEventListener(emitter, event, handler, flags) { + eventTargetAgnosticAddListener(emitter, event, handler, flags); + ArrayPrototypePush(listeners, [emitter, event, handler, flags]); + }, + removeAll() { + while (listeners.length > 0) { + ReflectApply( + eventTargetAgnosticRemoveListener, + undefined, + ArrayPrototypePop(listeners) + ); + } + }, + }; +} diff --git a/.codesandbox/node/fs.js b/.codesandbox/node/fs.js new file mode 100644 index 0000000000..c505db7363 --- /dev/null +++ b/.codesandbox/node/fs.js @@ -0,0 +1,3397 @@ +"use strict"; + +const { + ArrayFromAsync, + ArrayPrototypePush, + BigIntPrototypeToString, + Boolean, + FunctionPrototypeCall, + MathMax, + Number, + ObjectDefineProperties, + ObjectDefineProperty, + Promise, + PromisePrototypeThen, + PromiseResolve, + ReflectApply, + SafeMap, + SafeSet, + StringPrototypeCharCodeAt, + StringPrototypeIndexOf, + StringPrototypeSlice, + SymbolDispose, + uncurryThis, +} = primordials; + +const { fs: constants } = internalBinding("constants"); +const { + S_IFIFO, + S_IFLNK, + S_IFMT, + S_IFREG, + S_IFSOCK, + F_OK, + O_WRONLY, + O_SYMLINK, +} = constants; + +const pathModule = require("path"); +const { isArrayBufferView } = require("internal/util/types"); + +const binding = internalBinding("fs"); + +const { createBlobFromFilePath } = require("internal/blob"); + +const { Buffer } = require("buffer"); +const { isBuffer: BufferIsBuffer } = Buffer; +const BufferToString = uncurryThis(Buffer.prototype.toString); +const { + AbortError, + aggregateTwoErrors, + codes: { ERR_ACCESS_DENIED, ERR_FS_FILE_TOO_LARGE, ERR_INVALID_ARG_VALUE }, +} = require("internal/errors"); + +const { FSReqCallback, statValues } = binding; +const { toPathIfFileURL } = require("internal/url"); +const { + customPromisifyArgs: kCustomPromisifyArgsSymbol, + getLazy, + kEmptyObject, + promisify: { custom: kCustomPromisifiedSymbol }, + SideEffectFreeRegExpPrototypeExec, + defineLazyProperties, + isWindows, + isMacOS, +} = require("internal/util"); +const { + constants: { kIoMaxLength, kMaxUserId }, + copyObject, + Dirent, + getDirent, + getDirents, + getOptions, + getValidatedFd, + getValidatedPath, + handleErrorFromBinding, + preprocessSymlinkDestination, + Stats, + getStatFsFromBinding, + getStatsFromBinding, + realpathCacheKey, + stringToFlags, + stringToSymlinkType, + toUnixTimestamp, + validateBufferArray, + validateCpOptions, + validateOffsetLengthRead, + validateOffsetLengthWrite, + validatePath, + validatePosition, + validateRmOptions, + validateRmOptionsSync, + validateRmdirOptions, + validateStringAfterArrayBufferView, + warnOnNonPortableTemplate, +} = require("internal/fs/utils"); +const { + CHAR_FORWARD_SLASH, + CHAR_BACKWARD_SLASH, +} = require("internal/constants"); +const { + isInt32, + parseFileMode, + validateBoolean, + validateBuffer, + validateEncoding, + validateFunction, + validateInteger, + validateObject, + validateOneOf, + validateString, + kValidateObjectAllowNullable, +} = require("internal/validators"); + +const permission = require("internal/process/permission"); + +let fs; + +// Lazy loaded +let cpFn; +let cpSyncFn; +let promises = null; +let ReadStream; +let WriteStream; +let rimraf; +let kResistStopPropagation; +let ReadFileContext; + +// These have to be separate because of how graceful-fs happens to do it's +// monkeypatching. +let FileReadStream; +let FileWriteStream; +let Utf8Stream; + +function lazyLoadUtf8Stream() { + Utf8Stream ??= require("internal/streams/fast-utf8-stream"); +} + +// Ensure that callbacks run in the global context. Only use this function +// for callbacks that are passed to the binding layer, callbacks that are +// invoked from JS already run in the proper scope. +function makeCallback(cb) { + validateFunction(cb, "cb"); + + return (...args) => ReflectApply(cb, this, args); +} + +// Special case of `makeCallback()` that is specific to async `*stat()` calls as +// an optimization, since the data passed back to the callback needs to be +// transformed anyway. +function makeStatsCallback(cb) { + validateFunction(cb, "cb"); + + return (err, stats) => { + if (err) return cb(err); + cb(err, getStatsFromBinding(stats)); + }; +} + +const isFd = isInt32; + +function isFileType(stats, fileType) { + // Use stats array directly to avoid creating an fs.Stats instance just for + // our internal use. + let mode = stats[1]; + if (typeof mode === "bigint") mode = Number(mode); + return (mode & S_IFMT) === fileType; +} + +/** + * Tests a user's permissions for the file or directory + * specified by `path`. + * @param {string | Buffer | URL} path + * @param {number} [mode] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function access(path, mode, callback) { + if (typeof mode === "function") { + callback = mode; + mode = F_OK; + } + + path = getValidatedPath(path); + callback = makeCallback(callback); + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.access(path, mode, req); +} + +/** + * Synchronously tests a user's permissions for the file or + * directory specified by `path`. + * @param {string | Buffer | URL} path + * @param {number} [mode] + * @returns {void} + */ +function accessSync(path, mode) { + binding.access(getValidatedPath(path), mode); +} + +/** + * Tests whether or not the given path exists. + * @param {string | Buffer | URL} path + * @param {(exists?: boolean) => any} callback + * @returns {void} + */ +function exists(path, callback) { + validateFunction(callback, "cb"); + + function suppressedCallback(err) { + callback(!err); + } + + try { + fs.access(path, F_OK, suppressedCallback); + } catch { + return callback(false); + } +} + +ObjectDefineProperty(exists, kCustomPromisifiedSymbol, { + __proto__: null, + value: function exists(path) { + // eslint-disable-line func-name-matching + return new Promise((resolve) => fs.exists(path, resolve)); + }, +}); + +let showExistsDeprecation = true; +/** + * Synchronously tests whether or not the given path exists. + * @param {string | Buffer | URL} path + * @returns {boolean} + */ +function existsSync(path) { + try { + path = getValidatedPath(path); + } catch (err) { + if (showExistsDeprecation && err?.code === "ERR_INVALID_ARG_TYPE") { + process.emitWarning( + "Passing invalid argument types to fs.existsSync is deprecated", + "DeprecationWarning", + "DEP0187" + ); + showExistsDeprecation = false; + } + return false; + } + + return binding.existsSync(path); +} + +function readFileAfterOpen(err, fd) { + const context = this.context; + + if (err) { + context.callback(err); + return; + } + + context.fd = fd; + + const req = new FSReqCallback(); + req.oncomplete = readFileAfterStat; + req.context = context; + binding.fstat(fd, false, req); +} + +function readFileAfterStat(err, stats) { + const context = this.context; + + if (err) return context.close(err); + + // TODO(BridgeAR): Check if allocating a smaller chunk is better performance + // wise, similar to the promise based version (less peak memory and chunked + // stringify operations vs multiple C++/JS boundary crossings). + const size = (context.size = isFileType(stats, S_IFREG) ? stats[8] : 0); + + if (size > kIoMaxLength) { + err = new ERR_FS_FILE_TOO_LARGE(size); + return context.close(err); + } + + try { + if (size === 0) { + // TODO(BridgeAR): If an encoding is set, use the StringDecoder to concat + // the result and reuse the buffer instead of allocating a new one. + context.buffers = []; + } else { + context.buffer = Buffer.allocUnsafeSlow(size); + } + } catch (err) { + return context.close(err); + } + context.read(); +} + +function checkAborted(signal, callback) { + if (signal?.aborted) { + callback(new AbortError(undefined, { cause: signal.reason })); + return true; + } + return false; +} + +/** + * Asynchronously reads the entire contents of a file. + * @param {string | Buffer | URL | number} path + * @param {{ + * encoding?: string | null; + * flag?: string; + * signal?: AbortSignal; + * } | string} [options] + * @param {( + * err?: Error, + * data?: string | Buffer + * ) => any} callback + * @returns {void} + */ +function readFile(path, options, callback) { + callback ||= options; + validateFunction(callback, "cb"); + options = getOptions(options, { flag: "r" }); + ReadFileContext ??= require("internal/fs/read/context"); + const context = new ReadFileContext(callback, options.encoding); + context.isUserFd = isFd(path); // File descriptor ownership + + if (options.signal) { + context.signal = options.signal; + } + if (context.isUserFd) { + process.nextTick(function tick(context) { + FunctionPrototypeCall(readFileAfterOpen, { context }, null, path); + }, context); + return; + } + + if (checkAborted(options.signal, callback)) return; + + const flagsNumber = stringToFlags(options.flag, "options.flag"); + const req = new FSReqCallback(); + req.context = context; + req.oncomplete = readFileAfterOpen; + binding.open(getValidatedPath(path), flagsNumber, 0o666, req); +} + +function tryStatSync(fd, isUserFd) { + const stats = binding.fstat(fd, false, undefined, true /* shouldNotThrow */); + if (stats === undefined && !isUserFd) { + fs.closeSync(fd); + } + return stats; +} + +function tryCreateBuffer(size, fd, isUserFd) { + let threw = true; + let buffer; + try { + if (size > kIoMaxLength) { + throw new ERR_FS_FILE_TOO_LARGE(size); + } + buffer = Buffer.allocUnsafe(size); + threw = false; + } finally { + if (threw && !isUserFd) fs.closeSync(fd); + } + return buffer; +} + +function tryReadSync(fd, isUserFd, buffer, pos, len) { + let threw = true; + let bytesRead; + try { + bytesRead = fs.readSync(fd, buffer, pos, len); + threw = false; + } finally { + if (threw && !isUserFd) fs.closeSync(fd); + } + return bytesRead; +} + +/** + * Synchronously reads the entire contents of a file. + * @param {string | Buffer | URL | number} path + * @param {{ + * encoding?: string | null; + * flag?: string; + * }} [options] + * @returns {string | Buffer} + */ +function readFileSync(path, options) { + options = getOptions(options, { flag: "r" }); + + if (options.encoding === "utf8" || options.encoding === "utf-8") { + if (!isInt32(path)) { + path = getValidatedPath(path); + } + return binding.readFileUtf8(path, stringToFlags(options.flag)); + } + + const isUserFd = isFd(path); // File descriptor ownership + const fd = isUserFd ? path : fs.openSync(path, options.flag, 0o666); + + const stats = tryStatSync(fd, isUserFd); + const size = isFileType(stats, S_IFREG) ? stats[8] : 0; + let pos = 0; + let buffer; // Single buffer with file data + let buffers; // List for when size is unknown + + if (size === 0) { + buffers = []; + } else { + buffer = tryCreateBuffer(size, fd, isUserFd); + } + + let bytesRead; + + if (size !== 0) { + do { + bytesRead = tryReadSync(fd, isUserFd, buffer, pos, size - pos); + pos += bytesRead; + } while (bytesRead !== 0 && pos < size); + } else { + do { + // The kernel lies about many files. + // Go ahead and try to read some bytes. + buffer = Buffer.allocUnsafe(8192); + bytesRead = tryReadSync(fd, isUserFd, buffer, 0, 8192); + if (bytesRead !== 0) { + ArrayPrototypePush(buffers, buffer.slice(0, bytesRead)); + } + pos += bytesRead; + } while (bytesRead !== 0); + } + + if (!isUserFd) fs.closeSync(fd); + + if (size === 0) { + // Data was collected into the buffers list. + buffer = Buffer.concat(buffers, pos); + } else if (pos < size) { + buffer = buffer.slice(0, pos); + } + + if (options.encoding) buffer = buffer.toString(options.encoding); + return buffer; +} + +function defaultCloseCallback(err) { + if (err != null) throw err; +} + +/** + * Closes the file descriptor. + * @param {number} fd + * @param {(err?: Error) => any} [callback] + * @returns {void} + */ +function close(fd, callback = defaultCloseCallback) { + if (callback !== defaultCloseCallback) callback = makeCallback(callback); + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.close(fd, req); +} + +/** + * Synchronously closes the file descriptor. + * @param {number} fd + * @returns {void} + */ +function closeSync(fd) { + binding.close(fd); +} + +/** + * Asynchronously opens a file. + * @param {string | Buffer | URL} path + * @param {string | number} [flags] + * @param {string | number} [mode] + * @param {( + * err?: Error, + * fd?: number + * ) => any} callback + * @returns {void} + */ +function open(path, flags, mode, callback) { + path = getValidatedPath(path); + if (arguments.length < 3) { + callback = flags; + flags = "r"; + mode = 0o666; + } else if (typeof mode === "function") { + callback = mode; + mode = 0o666; + } else { + mode = parseFileMode(mode, "mode", 0o666); + } + const flagsNumber = stringToFlags(flags); + callback = makeCallback(callback); + + const req = new FSReqCallback(); + req.oncomplete = callback; + + binding.open(path, flagsNumber, mode, req); +} + +/** + * Synchronously opens a file. + * @param {string | Buffer | URL} path + * @param {string | number} [flags] + * @param {string | number} [mode] + * @returns {number} + */ +function openSync(path, flags, mode) { + return binding.open( + getValidatedPath(path), + stringToFlags(flags), + parseFileMode(mode, "mode", 0o666) + ); +} + +/** + * @param {string | Buffer | URL } path + * @param {{ + * type?: string; + * }} [options] + * @returns {Promise} + */ +function openAsBlob(path, options = kEmptyObject) { + validateObject(options, "options"); + const type = options.type || ""; + validateString(type, "options.type"); + // The underlying implementation here returns the Blob synchronously for now. + // To give ourselves flexibility to maybe return the Blob asynchronously, + // this API returns a Promise. + path = getValidatedPath(path); + return PromiseResolve(createBlobFromFilePath(path, { type })); +} + +/** + * Reads file from the specified `fd` (file descriptor). + * @param {number} fd + * @param {Buffer | TypedArray | DataView} buffer + * @param {number | { + * offset?: number; + * length?: number; + * position?: number | bigint | null; + * }} [offsetOrOptions] + * @param {number} length + * @param {number | bigint | null} position + * @param {( + * err?: Error, + * bytesRead?: number, + * buffer?: Buffer + * ) => any} callback + * @returns {void} + */ +function read(fd, buffer, offsetOrOptions, length, position, callback) { + fd = getValidatedFd(fd); + + let offset = offsetOrOptions; + let params = null; + if (arguments.length <= 4) { + if (arguments.length === 4) { + // This is fs.read(fd, buffer, options, callback) + validateObject(offsetOrOptions, "options", kValidateObjectAllowNullable); + callback = length; + params = offsetOrOptions; + } else if (arguments.length === 3) { + // This is fs.read(fd, bufferOrParams, callback) + if (!isArrayBufferView(buffer)) { + // This is fs.read(fd, params, callback) + params = buffer; + ({ buffer = Buffer.alloc(16384) } = params ?? kEmptyObject); + } + callback = offsetOrOptions; + } else { + // This is fs.read(fd, callback) + callback = buffer; + buffer = Buffer.alloc(16384); + } + + if (params !== undefined) { + validateObject(params, "options", kValidateObjectAllowNullable); + } + ({ + offset = 0, + length = buffer?.byteLength - offset, + position = null, + } = params ?? kEmptyObject); + } + + validateBuffer(buffer); + validateFunction(callback, "cb"); + + if (offset == null) { + offset = 0; + } else { + validateInteger(offset, "offset", 0); + } + + length |= 0; + + if (length === 0) { + return process.nextTick(function tick() { + callback(null, 0, buffer); + }); + } + + if (buffer.byteLength === 0) { + throw new ERR_INVALID_ARG_VALUE( + "buffer", + buffer, + "is empty and cannot be written" + ); + } + + validateOffsetLengthRead(offset, length, buffer.byteLength); + + if (position == null) { + position = -1; + } else { + validatePosition(position, "position", length); + } + + function wrapper(err, bytesRead) { + // Retain a reference to buffer so that it can't be GC'ed too soon. + callback(err, bytesRead || 0, buffer); + } + + const req = new FSReqCallback(); + req.oncomplete = wrapper; + + binding.read(fd, buffer, offset, length, position, req); +} + +ObjectDefineProperty(read, kCustomPromisifyArgsSymbol, { + __proto__: null, + value: ["bytesRead", "buffer"], + enumerable: false, +}); + +/** + * Synchronously reads the file from the + * specified `fd` (file descriptor). + * @param {number} fd + * @param {Buffer | TypedArray | DataView} buffer + * @param {number | { + * offset?: number; + * length?: number; + * position?: number | bigint | null; + * }} [offsetOrOptions] + * @param {number} [length] + * @param {number} [position] + * @returns {number} + */ +function readSync(fd, buffer, offsetOrOptions, length, position) { + fd = getValidatedFd(fd); + + validateBuffer(buffer); + + let offset = offsetOrOptions; + if (arguments.length <= 3 || typeof offsetOrOptions === "object") { + if (offsetOrOptions !== undefined) { + validateObject(offsetOrOptions, "options", kValidateObjectAllowNullable); + } + + ({ + offset = 0, + length = buffer.byteLength - offset, + position = null, + } = offsetOrOptions ?? kEmptyObject); + } + + if (offset === undefined) { + offset = 0; + } else { + validateInteger(offset, "offset", 0); + } + + length |= 0; + + if (length === 0) { + return 0; + } + + if (buffer.byteLength === 0) { + throw new ERR_INVALID_ARG_VALUE( + "buffer", + buffer, + "is empty and cannot be written" + ); + } + + validateOffsetLengthRead(offset, length, buffer.byteLength); + + if (position == null) { + position = -1; + } else { + validatePosition(position, "position", length); + } + + return binding.read(fd, buffer, offset, length, position); +} + +/** + * Reads file from the specified `fd` (file descriptor) + * and writes to an array of `ArrayBufferView`s. + * @param {number} fd + * @param {ArrayBufferView[]} buffers + * @param {number | null} [position] + * @param {( + * err?: Error, + * bytesRead?: number, + * buffers?: ArrayBufferView[] + * ) => any} callback + * @returns {void} + */ +function readv(fd, buffers, position, callback) { + function wrapper(err, read) { + callback(err, read || 0, buffers); + } + + fd = getValidatedFd(fd); + validateBufferArray(buffers); + callback ||= position; + validateFunction(callback, "cb"); + + const req = new FSReqCallback(); + req.oncomplete = wrapper; + + if (typeof position !== "number") position = null; + + binding.readBuffers(fd, buffers, position, req); +} + +ObjectDefineProperty(readv, kCustomPromisifyArgsSymbol, { + __proto__: null, + value: ["bytesRead", "buffers"], + enumerable: false, +}); + +/** + * Synchronously reads file from the + * specified `fd` (file descriptor) and writes to an array + * of `ArrayBufferView`s. + * @param {number} fd + * @param {ArrayBufferView[]} buffers + * @param {number | null} [position] + * @returns {number} + */ +function readvSync(fd, buffers, position) { + fd = getValidatedFd(fd); + validateBufferArray(buffers); + + if (typeof position !== "number") position = null; + + return binding.readBuffers(fd, buffers, position); +} + +/** + * Writes `buffer` to the specified `fd` (file descriptor). + * @param {number} fd + * @param {Buffer | TypedArray | DataView | string} buffer + * @param {number | object} [offsetOrOptions] + * @param {number} [length] + * @param {number | null} [position] + * @param {( + * err?: Error, + * bytesWritten?: number, + * buffer?: Buffer | TypedArray | DataView + * ) => any} callback + * @returns {void} + */ +function write(fd, buffer, offsetOrOptions, length, position, callback) { + function wrapper(err, written) { + // Retain a reference to buffer so that it can't be GC'ed too soon. + callback(err, written || 0, buffer); + } + + fd = getValidatedFd(fd); + + let offset = offsetOrOptions; + if (isArrayBufferView(buffer)) { + callback ||= position || length || offset; + validateFunction(callback, "cb"); + + if (typeof offset === "object") { + ({ + offset = 0, + length = buffer.byteLength - offset, + position = null, + } = offsetOrOptions ?? kEmptyObject); + } + + if (offset == null || typeof offset === "function") { + offset = 0; + } else { + validateInteger(offset, "offset", 0); + } + if (typeof length !== "number") length = buffer.byteLength - offset; + if (typeof position !== "number") position = null; + validateOffsetLengthWrite(offset, length, buffer.byteLength); + + const req = new FSReqCallback(); + req.oncomplete = wrapper; + binding.writeBuffer(fd, buffer, offset, length, position, req); + return; + } + + validateStringAfterArrayBufferView(buffer, "buffer"); + + if (typeof position !== "function") { + if (typeof offset === "function") { + position = offset; + offset = null; + } else { + position = length; + } + length = "utf8"; + } + + const str = buffer; + validateEncoding(str, length); + callback = position; + validateFunction(callback, "cb"); + + const req = new FSReqCallback(); + req.oncomplete = wrapper; + binding.writeString(fd, str, offset, length, req); +} + +ObjectDefineProperty(write, kCustomPromisifyArgsSymbol, { + __proto__: null, + value: ["bytesWritten", "buffer"], + enumerable: false, +}); + +/** + * Synchronously writes `buffer` to the + * specified `fd` (file descriptor). + * @param {number} fd + * @param {Buffer | TypedArray | DataView | string} buffer + * @param {{ + * offset?: number; + * length?: number; + * position?: number | null; + * }} [offsetOrOptions] + * @param {number} [length] + * @param {number} [position] + * @returns {number} + */ +function writeSync(fd, buffer, offsetOrOptions, length, position) { + fd = getValidatedFd(fd); + const ctx = {}; + let result; + + let offset = offsetOrOptions; + if (isArrayBufferView(buffer)) { + if (typeof offset === "object") { + ({ + offset = 0, + length = buffer.byteLength - offset, + position = null, + } = offsetOrOptions ?? kEmptyObject); + } + if (position === undefined) position = null; + if (offset == null) { + offset = 0; + } else { + validateInteger(offset, "offset", 0); + } + if (typeof length !== "number") length = buffer.byteLength - offset; + validateOffsetLengthWrite(offset, length, buffer.byteLength); + result = binding.writeBuffer( + fd, + buffer, + offset, + length, + position, + undefined, + ctx + ); + } else { + validateStringAfterArrayBufferView(buffer, "buffer"); + validateEncoding(buffer, length); + + if (offset === undefined) offset = null; + result = binding.writeString(fd, buffer, offset, length, undefined, ctx); + } + handleErrorFromBinding(ctx); + return result; +} + +/** + * Writes an array of `ArrayBufferView`s to the + * specified `fd` (file descriptor). + * @param {number} fd + * @param {ArrayBufferView[]} buffers + * @param {number | null} [position] + * @param {( + * err?: Error, + * bytesWritten?: number, + * buffers?: ArrayBufferView[] + * ) => any} callback + * @returns {void} + */ +function writev(fd, buffers, position, callback) { + function wrapper(err, written) { + callback(err, written || 0, buffers); + } + + fd = getValidatedFd(fd); + validateBufferArray(buffers); + callback ||= position; + validateFunction(callback, "cb"); + + if (buffers.length === 0) { + process.nextTick(callback, null, 0, buffers); + return; + } + + const req = new FSReqCallback(); + req.oncomplete = wrapper; + + if (typeof position !== "number") position = null; + + binding.writeBuffers(fd, buffers, position, req); +} + +ObjectDefineProperty(writev, kCustomPromisifyArgsSymbol, { + __proto__: null, + value: ["bytesWritten", "buffer"], + enumerable: false, +}); + +/** + * Synchronously writes an array of `ArrayBufferView`s + * to the specified `fd` (file descriptor). + * @param {number} fd + * @param {ArrayBufferView[]} buffers + * @param {number | null} [position] + * @returns {number} + */ +function writevSync(fd, buffers, position) { + fd = getValidatedFd(fd); + validateBufferArray(buffers); + + if (buffers.length === 0) { + return 0; + } + + if (typeof position !== "number") position = null; + + return binding.writeBuffers(fd, buffers, position); +} + +/** + * Asynchronously renames file at `oldPath` to + * the pathname provided as `newPath`. + * @param {string | Buffer | URL} oldPath + * @param {string | Buffer | URL} newPath + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function rename(oldPath, newPath, callback) { + callback = makeCallback(callback); + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.rename( + getValidatedPath(oldPath, "oldPath"), + getValidatedPath(newPath, "newPath"), + req + ); +} + +/** + * Synchronously renames file at `oldPath` to + * the pathname provided as `newPath`. + * @param {string | Buffer | URL} oldPath + * @param {string | Buffer | URL} newPath + * @returns {void} + */ +function renameSync(oldPath, newPath) { + binding.rename( + getValidatedPath(oldPath, "oldPath"), + getValidatedPath(newPath, "newPath") + ); +} + +/** + * Truncates the file. + * @param {string | Buffer | URL} path + * @param {number} [len] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function truncate(path, len, callback) { + if (typeof len === "function") { + callback = len; + len = 0; + } else if (len === undefined) { + len = 0; + } + + validateInteger(len, "len"); + len = MathMax(0, len); + validateFunction(callback, "cb"); + fs.open(path, "r+", (er, fd) => { + if (er) return callback(er); + const req = new FSReqCallback(); + req.oncomplete = function oncomplete(er) { + fs.close(fd, (er2) => { + callback(aggregateTwoErrors(er2, er)); + }); + }; + binding.ftruncate(fd, len, req); + }); +} + +/** + * Synchronously truncates the file. + * @param {string | Buffer | URL} path + * @param {number} [len] + * @returns {void} + */ +function truncateSync(path, len) { + if (len === undefined) { + len = 0; + } + // Allow error to be thrown, but still close fd. + const fd = fs.openSync(path, "r+"); + try { + fs.ftruncateSync(fd, len); + } finally { + fs.closeSync(fd); + } +} + +/** + * Truncates the file descriptor. + * @param {number} fd + * @param {number} [len] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function ftruncate(fd, len = 0, callback) { + if (typeof len === "function") { + callback = len; + len = 0; + } + validateInteger(len, "len"); + len = MathMax(0, len); + callback = makeCallback(callback); + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.ftruncate(fd, len, req); +} + +/** + * Synchronously truncates the file descriptor. + * @param {number} fd + * @param {number} [len] + * @returns {void} + */ +function ftruncateSync(fd, len = 0) { + validateInteger(len, "len"); + binding.ftruncate(fd, len < 0 ? 0 : len); +} + +function lazyLoadCp() { + if (cpFn === undefined) { + ({ cpFn } = require("internal/fs/cp/cp")); + cpFn = require("util").callbackify(cpFn); + ({ cpSyncFn } = require("internal/fs/cp/cp-sync")); + } +} + +function lazyLoadRimraf() { + if (rimraf === undefined) ({ rimraf } = require("internal/fs/rimraf")); +} + +/** + * Asynchronously removes a directory. + * @param {string | Buffer | URL} path + * @param {object} [options] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function rmdir(path, options, callback) { + if (typeof options === "function") { + callback = options; + options = undefined; + } + + if (options?.recursive !== undefined) { + // This API previously accepted a `recursive` option that was deprecated + // and removed. However, in order to make the change more visible, we + // opted to throw an error if recursive is specified rather than removing it + // entirely. + throw new ERR_INVALID_ARG_VALUE( + "options.recursive", + options.recursive, + "is no longer supported" + ); + } + + callback = makeCallback(callback); + path = getValidatedPath(path); + + validateRmdirOptions(options); + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.rmdir(path, req); +} + +/** + * Synchronously removes a directory. + * @param {string | Buffer | URL} path + * @param {object} [options] + * @returns {void} + */ +function rmdirSync(path, options) { + path = getValidatedPath(path); + + if (options?.recursive !== undefined) { + throw new ERR_INVALID_ARG_VALUE( + "options.recursive", + options.recursive, + "is no longer supported" + ); + } + + validateRmdirOptions(options); + binding.rmdir(path); +} + +/** + * Asynchronously removes files and + * directories (modeled on the standard POSIX `rm` utility). + * @param {string | Buffer | URL} path + * @param {{ + * force?: boolean; + * maxRetries?: number; + * recursive?: boolean; + * retryDelay?: number; + * }} [options] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function rm(path, options, callback) { + if (typeof options === "function") { + callback = options; + options = undefined; + } + path = getValidatedPath(path); + + validateRmOptions(path, options, false, (err, options) => { + if (err) { + return callback(err); + } + lazyLoadRimraf(); + return rimraf(path, options, callback); + }); +} + +/** + * Synchronously removes files and + * directories (modeled on the standard POSIX `rm` utility). + * @param {string | Buffer | URL} path + * @param {{ + * force?: boolean; + * maxRetries?: number; + * recursive?: boolean; + * retryDelay?: number; + * }} [options] + * @returns {void} + */ +function rmSync(path, options) { + const opts = validateRmOptionsSync(path, options, false); + return binding.rmSync( + getValidatedPath(path), + opts.maxRetries, + opts.recursive, + opts.retryDelay + ); +} + +/** + * Forces all currently queued I/O operations associated + * with the file to the operating system's synchronized + * I/O completion state. + * @param {number} fd + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function fdatasync(fd, callback) { + const req = new FSReqCallback(); + req.oncomplete = makeCallback(callback); + + if (permission.isEnabled()) { + callback( + new ERR_ACCESS_DENIED( + "fdatasync API is disabled when Permission Model is enabled." + ) + ); + return; + } + binding.fdatasync(fd, req); +} + +/** + * Synchronously forces all currently queued I/O operations + * associated with the file to the operating + * system's synchronized I/O completion state. + * @param {number} fd + * @returns {void} + */ +function fdatasyncSync(fd) { + if (permission.isEnabled()) { + throw new ERR_ACCESS_DENIED( + "fdatasync API is disabled when Permission Model is enabled." + ); + } + binding.fdatasync(fd); +} + +/** + * Requests for all data for the open file descriptor + * to be flushed to the storage device. + * @param {number} fd + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function fsync(fd, callback) { + const req = new FSReqCallback(); + req.oncomplete = makeCallback(callback); + if (permission.isEnabled()) { + callback( + new ERR_ACCESS_DENIED( + "fsync API is disabled when Permission Model is enabled." + ) + ); + return; + } + binding.fsync(fd, req); +} + +/** + * Synchronously requests for all data for the open + * file descriptor to be flushed to the storage device. + * @param {number} fd + * @returns {void} + */ +function fsyncSync(fd) { + if (permission.isEnabled()) { + throw new ERR_ACCESS_DENIED( + "fsync API is disabled when Permission Model is enabled." + ); + } + binding.fsync(fd); +} + +/** + * Asynchronously creates a directory. + * @param {string | Buffer | URL} path + * @param {{ + * recursive?: boolean; + * mode?: string | number; + * } | number} [options] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function mkdir(path, options, callback) { + let mode = 0o777; + let recursive = false; + if (typeof options === "function") { + callback = options; + } else if (typeof options === "number" || typeof options === "string") { + mode = parseFileMode(options, "mode"); + } else if (options) { + if (options.recursive !== undefined) { + recursive = options.recursive; + validateBoolean(recursive, "options.recursive"); + } + if (options.mode !== undefined) { + mode = parseFileMode(options.mode, "options.mode"); + } + } + callback = makeCallback(callback); + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.mkdir(getValidatedPath(path), mode, recursive, req); +} + +/** + * Synchronously creates a directory. + * @param {string | Buffer | URL} path + * @param {{ + * recursive?: boolean; + * mode?: string | number; + * } | number} [options] + * @returns {string | void} + */ +function mkdirSync(path, options) { + let mode = 0o777; + let recursive = false; + if (typeof options === "number" || typeof options === "string") { + mode = parseFileMode(options, "mode"); + } else if (options) { + if (options.recursive !== undefined) { + recursive = options.recursive; + validateBoolean(recursive, "options.recursive"); + } + if (options.mode !== undefined) { + mode = parseFileMode(options.mode, "options.mode"); + } + } + + const result = binding.mkdir(getValidatedPath(path), mode, recursive); + + if (recursive) { + return result; + } +} + +/* + * An recursive algorithm for reading the entire contents of the `basePath` directory. + * This function does not validate `basePath` as a directory. It is passed directly to + * `binding.readdir`. + * @param {string} basePath + * @param {{ encoding: string, withFileTypes: boolean }} options + * @param {( + * err?: Error, + * files?: string[] | Buffer[] | Dirent[] + * ) => any} callback + * @returns {void} + */ +function readdirRecursive(basePath, options, callback) { + const context = { + withFileTypes: Boolean(options.withFileTypes), + encoding: options.encoding, + basePath, + readdirResults: [], + pathsQueue: [basePath], + }; + + let i = 0; + + function read(path) { + const req = new FSReqCallback(); + req.oncomplete = (err, result) => { + if (err) { + callback(err); + return; + } + + if (result === undefined) { + callback(null, context.readdirResults); + return; + } + + processReaddirResult({ + result, + currentPath: path, + context, + }); + + if (i < context.pathsQueue.length) { + read(context.pathsQueue[i++]); + } else { + callback(null, context.readdirResults); + } + }; + + binding.readdir(path, context.encoding, context.withFileTypes, req); + } + + read(context.pathsQueue[i++]); +} + +// Calling `readdir` with `withFileTypes=true`, the result is an array of arrays. +// The first array is the names, and the second array is the types. +// They are guaranteed to be the same length; hence, setting `length` to the length +// of the first array within the result. +const processReaddirResult = (args) => + args.context.withFileTypes ? handleDirents(args) : handleFilePaths(args); + +function handleDirents({ result, currentPath, context }) { + const { 0: names, 1: types } = result; + const { length } = names; + + for (let i = 0; i < length; i++) { + // Avoid excluding symlinks, as they are not directories. + // Refs: https://github.com/nodejs/node/issues/52663 + const fullPath = pathModule.join(currentPath, names[i]); + const dirent = getDirent(currentPath, names[i], types[i]); + ArrayPrototypePush(context.readdirResults, dirent); + + if (dirent.isDirectory() || binding.internalModuleStat(fullPath) === 1) { + ArrayPrototypePush(context.pathsQueue, fullPath); + } + } +} + +function handleFilePaths({ result, currentPath, context }) { + for (let i = 0; i < result.length; i++) { + const resultPath = pathModule.join(currentPath, result[i]); + const relativeResultPath = pathModule.relative( + context.basePath, + resultPath + ); + const stat = binding.internalModuleStat(resultPath); + ArrayPrototypePush(context.readdirResults, relativeResultPath); + + if (stat === 1) { + ArrayPrototypePush(context.pathsQueue, resultPath); + } + } +} + +/** + * An iterative algorithm for reading the entire contents of the `basePath` directory. + * This function does not validate `basePath` as a directory. It is passed directly to + * `binding.readdir`. + * @param {string} basePath + * @param {{ encoding: string, withFileTypes: boolean }} options + * @returns {string[] | Dirent[]} + */ +function readdirSyncRecursive(basePath, options) { + const context = { + withFileTypes: Boolean(options.withFileTypes), + encoding: options.encoding, + basePath, + readdirResults: [], + pathsQueue: [basePath], + }; + + function read(path) { + const readdirResult = binding.readdir( + path, + context.encoding, + context.withFileTypes + ); + + if (readdirResult === undefined) { + return; + } + + processReaddirResult({ + result: readdirResult, + currentPath: path, + context, + }); + } + + for (let i = 0; i < context.pathsQueue.length; i++) { + read(context.pathsQueue[i]); + } + + return context.readdirResults; +} + +/** + * Reads the contents of a directory. + * @param {string | Buffer | URL} path + * @param {string | { + * encoding?: string; + * withFileTypes?: boolean; + * recursive?: boolean; + * }} [options] + * @param {( + * err?: Error, + * files?: string[] | Buffer[] | Dirent[] + * ) => any} callback + * @returns {void} + */ +function readdir(path, options, callback) { + callback = makeCallback(typeof options === "function" ? options : callback); + options = getOptions(options); + path = getValidatedPath(path); + if (options.recursive != null) { + validateBoolean(options.recursive, "options.recursive"); + } + + if (options.recursive) { + // Make shallow copy to prevent mutating options from affecting results + options = copyObject(options); + + readdirRecursive(path, options, callback); + return; + } + + const req = new FSReqCallback(); + if (!options.withFileTypes) { + req.oncomplete = callback; + } else { + req.oncomplete = (err, result) => { + if (err) { + callback(err); + return; + } + getDirents(path, result, callback); + }; + } + binding.readdir(path, options.encoding, !!options.withFileTypes, req); +} + +/** + * Synchronously reads the contents of a directory. + * @param {string | Buffer | URL} path + * @param {string | { + * encoding?: string; + * withFileTypes?: boolean; + * recursive?: boolean; + * }} [options] + * @returns {string | Buffer[] | Dirent[]} + */ +function readdirSync(path, options) { + options = getOptions(options); + path = getValidatedPath(path); + if (options.recursive != null) { + validateBoolean(options.recursive, "options.recursive"); + } + + if (options.recursive) { + return readdirSyncRecursive(path, options); + } + + const result = binding.readdir( + path, + options.encoding, + !!options.withFileTypes + ); + + return result !== undefined && options.withFileTypes + ? getDirents(path, result) + : result; +} + +/** + * Invokes the callback with the `fs.Stats` + * for the file descriptor. + * @param {number} fd + * @param {{ bigint?: boolean; }} [options] + * @param {( + * err?: Error, + * stats?: Stats + * ) => any} [callback] + * @returns {void} + */ +function fstat(fd, options = { bigint: false }, callback) { + if (typeof options === "function") { + callback = options; + options = kEmptyObject; + } + callback = makeStatsCallback(callback); + + const req = new FSReqCallback(options.bigint); + req.oncomplete = callback; + binding.fstat(fd, options.bigint, req); +} + +/** + * Retrieves the `fs.Stats` for the symbolic link + * referred to by the `path`. + * @param {string | Buffer | URL} path + * @param {{ bigint?: boolean; }} [options] + * @param {( + * err?: Error, + * stats?: Stats + * ) => any} callback + * @returns {void} + */ +function lstat(path, options = { bigint: false }, callback) { + if (typeof options === "function") { + callback = options; + options = kEmptyObject; + } + callback = makeStatsCallback(callback); + path = getValidatedPath(path); + if (permission.isEnabled() && !permission.has("fs.read", path)) { + const resource = BufferIsBuffer(path) ? BufferToString(path) : path; + callback( + new ERR_ACCESS_DENIED( + "Access to this API has been restricted", + "FileSystemRead", + resource + ) + ); + return; + } + + const req = new FSReqCallback(options.bigint); + req.oncomplete = callback; + binding.lstat(path, options.bigint, req); +} + +/** + * Asynchronously gets the stats of a file. + * @param {string | Buffer | URL} path + * @param {{ bigint?: boolean; }} [options] + * @param {( + * err?: Error, + * stats?: Stats + * ) => any} callback + * @returns {void} + */ +function stat(path, options = { bigint: false }, callback) { + if (typeof options === "function") { + callback = options; + options = kEmptyObject; + } + callback = makeStatsCallback(callback); + + const req = new FSReqCallback(options.bigint); + req.oncomplete = callback; + binding.stat(getValidatedPath(path), options.bigint, req); +} + +function statfs(path, options = { bigint: false }, callback) { + if (typeof options === "function") { + callback = options; + options = kEmptyObject; + } + validateFunction(callback, "cb"); + path = getValidatedPath(path); + const req = new FSReqCallback(options.bigint); + req.oncomplete = (err, stats) => { + if (err) { + return callback(err); + } + + callback(err, getStatFsFromBinding(stats)); + }; + binding.statfs(getValidatedPath(path), options.bigint, req); +} + +/** + * Synchronously retrieves the `fs.Stats` for + * the file descriptor. + * @param {number} fd + * @param {{ bigint?: boolean; }} [options] + * @returns {Stats | undefined} + */ +function fstatSync(fd, options = { bigint: false }) { + const stats = binding.fstat(fd, options.bigint, undefined, false); + if (stats === undefined) { + return; + } + return getStatsFromBinding(stats); +} + +/** + * Synchronously retrieves the `fs.Stats` for + * the symbolic link referred to by the `path`. + * @param {string | Buffer | URL} path + * @param {{ + * bigint?: boolean; + * throwIfNoEntry?: boolean; + * }} [options] + * @returns {Stats | undefined} + */ +function lstatSync(path, options = { bigint: false, throwIfNoEntry: true }) { + path = getValidatedPath(path); + if (permission.isEnabled() && !permission.has("fs.read", path)) { + const resource = BufferIsBuffer(path) ? BufferToString(path) : path; + throw new ERR_ACCESS_DENIED( + "Access to this API has been restricted", + "FileSystemRead", + resource + ); + } + const stats = binding.lstat( + getValidatedPath(path), + options.bigint, + undefined, + options.throwIfNoEntry + ); + + if (stats === undefined) { + return; + } + return getStatsFromBinding(stats); +} + +/** + * Synchronously retrieves the `fs.Stats` + * for the `path`. + * @param {string | Buffer | URL} path + * @param {{ + * bigint?: boolean; + * throwIfNoEntry?: boolean; + * }} [options] + * @returns {Stats} + */ +function statSync(path, options = { bigint: false, throwIfNoEntry: true }) { + const stats = binding.stat( + getValidatedPath(path), + options.bigint, + undefined, + options.throwIfNoEntry + ); + if (stats === undefined) { + return undefined; + } + return getStatsFromBinding(stats); +} + +function statfsSync(path, options = { bigint: false }) { + const stats = binding.statfs(getValidatedPath(path), options.bigint); + return getStatFsFromBinding(stats); +} + +/** + * Reads the contents of a symbolic link + * referred to by `path`. + * @param {string | Buffer | URL} path + * @param {{ encoding?: string; } | string} [options] + * @param {( + * err?: Error, + * linkString?: string | Buffer + * ) => any} callback + * @returns {void} + */ +function readlink(path, options, callback) { + callback = makeCallback(typeof options === "function" ? options : callback); + options = getOptions(options); + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.readlink(getValidatedPath(path), options.encoding, req); +} + +/** + * Synchronously reads the contents of a symbolic link + * referred to by `path`. + * @param {string | Buffer | URL} path + * @param {{ encoding?: string; } | string} [options] + * @returns {string | Buffer} + */ +function readlinkSync(path, options) { + options = getOptions(options); + return binding.readlink(getValidatedPath(path), options.encoding); +} + +/** + * Creates the link called `path` pointing to `target`. + * @param {string | Buffer | URL} target + * @param {string | Buffer | URL} path + * @param {string | null} [type] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function symlink(target, path, type, callback) { + if (callback === undefined) { + callback = makeCallback(type); + type = undefined; + } else { + validateOneOf(type, "type", ["dir", "file", "junction", null, undefined]); + } + + // Due to the nature of Node.js runtime, symlinks has different edge cases that can bypass + // the permission model security guarantees. Thus, this API is disabled unless fs.read + // and fs.write permission has been given. + if (permission.isEnabled() && !permission.has("fs")) { + callback( + new ERR_ACCESS_DENIED( + "fs.symlink API requires full fs.read and fs.write permissions." + ) + ); + return; + } + + target = getValidatedPath(target, "target"); + path = getValidatedPath(path); + + if (isWindows && type == null) { + let absoluteTarget; + try { + // Symlinks targets can be relative to the newly created path. + // Calculate absolute file name of the symlink target, and check + // if it is a directory. Ignore resolve error to keep symlink + // errors consistent between platforms if invalid path is + // provided. + absoluteTarget = pathModule.resolve(path, "..", target); + } catch { + // Continue regardless of error. + } + if (absoluteTarget !== undefined) { + stat(absoluteTarget, (err, stat) => { + const resolvedType = !err && stat.isDirectory() ? "dir" : "file"; + const resolvedFlags = stringToSymlinkType(resolvedType); + const destination = preprocessSymlinkDestination( + target, + resolvedType, + path + ); + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.symlink(destination, path, resolvedFlags, req); + }); + return; + } + } + + const destination = preprocessSymlinkDestination(target, type, path); + + const flags = stringToSymlinkType(type); + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.symlink(destination, path, flags, req); +} + +/** + * Synchronously creates the link called `path` + * pointing to `target`. + * @param {string | Buffer | URL} target + * @param {string | Buffer | URL} path + * @param {string | null} [type] + * @returns {void} + */ +function symlinkSync(target, path, type) { + validateOneOf(type, "type", ["dir", "file", "junction", null, undefined]); + if (isWindows && type == null) { + const absoluteTarget = pathModule.resolve(`${path}`, "..", `${target}`); + if (statSync(absoluteTarget, { throwIfNoEntry: false })?.isDirectory()) { + type = "dir"; + } + } + + // Due to the nature of Node.js runtime, symlinks has different edge cases that can bypass + // the permission model security guarantees. Thus, this API is disabled unless fs.read + // and fs.write permission has been given. + if (permission.isEnabled() && !permission.has("fs")) { + throw new ERR_ACCESS_DENIED( + "fs.symlink API requires full fs.read and fs.write permissions." + ); + } + + target = getValidatedPath(target, "target"); + path = getValidatedPath(path); + + binding.symlink( + preprocessSymlinkDestination(target, type, path), + path, + stringToSymlinkType(type) + ); +} + +/** + * Creates a new link from the `existingPath` + * to the `newPath`. + * @param {string | Buffer | URL} existingPath + * @param {string | Buffer | URL} newPath + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function link(existingPath, newPath, callback) { + callback = makeCallback(callback); + + existingPath = getValidatedPath(existingPath, "existingPath"); + newPath = getValidatedPath(newPath, "newPath"); + + const req = new FSReqCallback(); + req.oncomplete = callback; + + binding.link(existingPath, newPath, req); +} + +/** + * Synchronously creates a new link from the `existingPath` + * to the `newPath`. + * @param {string | Buffer | URL} existingPath + * @param {string | Buffer | URL} newPath + * @returns {void} + */ +function linkSync(existingPath, newPath) { + existingPath = getValidatedPath(existingPath, "existingPath"); + newPath = getValidatedPath(newPath, "newPath"); + + binding.link(existingPath, newPath); +} + +/** + * Asynchronously removes a file or symbolic link. + * @param {string | Buffer | URL} path + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function unlink(path, callback) { + callback = makeCallback(callback); + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.unlink(getValidatedPath(path), req); +} + +/** + * Synchronously removes a file or symbolic link. + * @param {string | Buffer | URL} path + * @returns {void} + */ +function unlinkSync(path) { + binding.unlink(getValidatedPath(path)); +} + +/** + * Sets the permissions on the file. + * @param {number} fd + * @param {string | number} mode + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function fchmod(fd, mode, callback) { + mode = parseFileMode(mode, "mode"); + callback = makeCallback(callback); + + if (permission.isEnabled()) { + callback( + new ERR_ACCESS_DENIED( + "fchmod API is disabled when Permission Model is enabled." + ) + ); + return; + } + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.fchmod(fd, mode, req); +} + +/** + * Synchronously sets the permissions on the file. + * @param {number} fd + * @param {string | number} mode + * @returns {void} + */ +function fchmodSync(fd, mode) { + if (permission.isEnabled()) { + throw new ERR_ACCESS_DENIED( + "fchmod API is disabled when Permission Model is enabled." + ); + } + binding.fchmod(fd, parseFileMode(mode, "mode")); +} + +/** + * Changes the permissions on a symbolic link. + * @param {string | Buffer | URL} path + * @param {number} mode + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function lchmod(path, mode, callback) { + validateFunction(callback, "cb"); + mode = parseFileMode(mode, "mode"); + fs.open(path, O_WRONLY | O_SYMLINK, (err, fd) => { + if (err) { + callback(err); + return; + } + // Prefer to return the chmod error, if one occurs, + // but still try to close, and report closing errors if they occur. + fs.fchmod(fd, mode, (err) => { + fs.close(fd, (err2) => { + callback(aggregateTwoErrors(err2, err)); + }); + }); + }); +} + +/** + * Synchronously changes the permissions on a symbolic link. + * @param {string | Buffer | URL} path + * @param {number} mode + * @returns {void} + */ +function lchmodSync(path, mode) { + const fd = fs.openSync(path, O_WRONLY | O_SYMLINK); + + // Prefer to return the chmod error, if one occurs, + // but still try to close, and report closing errors if they occur. + try { + fs.fchmodSync(fd, mode); + } finally { + fs.closeSync(fd); + } +} + +/** + * Asynchronously changes the permissions of a file. + * @param {string | Buffer | URL} path + * @param {string | number} mode + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function chmod(path, mode, callback) { + path = getValidatedPath(path); + mode = parseFileMode(mode, "mode"); + callback = makeCallback(callback); + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.chmod(path, mode, req); +} + +/** + * Synchronously changes the permissions of a file. + * @param {string | Buffer | URL} path + * @param {string | number} mode + * @returns {void} + */ +function chmodSync(path, mode) { + path = getValidatedPath(path); + mode = parseFileMode(mode, "mode"); + + binding.chmod(path, mode); +} + +/** + * Sets the owner of the symbolic link. + * @param {string | Buffer | URL} path + * @param {number} uid + * @param {number} gid + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function lchown(path, uid, gid, callback) { + callback = makeCallback(callback); + path = getValidatedPath(path); + validateInteger(uid, "uid", -1, kMaxUserId); + validateInteger(gid, "gid", -1, kMaxUserId); + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.lchown(path, uid, gid, req); +} + +/** + * Synchronously sets the owner of the symbolic link. + * @param {string | Buffer | URL} path + * @param {number} uid + * @param {number} gid + * @returns {void} + */ +function lchownSync(path, uid, gid) { + path = getValidatedPath(path); + validateInteger(uid, "uid", -1, kMaxUserId); + validateInteger(gid, "gid", -1, kMaxUserId); + binding.lchown(path, uid, gid); +} + +/** + * Sets the owner of the file. + * @param {number} fd + * @param {number} uid + * @param {number} gid + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function fchown(fd, uid, gid, callback) { + validateInteger(uid, "uid", -1, kMaxUserId); + validateInteger(gid, "gid", -1, kMaxUserId); + callback = makeCallback(callback); + if (permission.isEnabled()) { + callback( + new ERR_ACCESS_DENIED( + "fchown API is disabled when Permission Model is enabled." + ) + ); + return; + } + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.fchown(fd, uid, gid, req); +} + +/** + * Synchronously sets the owner of the file. + * @param {number} fd + * @param {number} uid + * @param {number} gid + * @returns {void} + */ +function fchownSync(fd, uid, gid) { + validateInteger(uid, "uid", -1, kMaxUserId); + validateInteger(gid, "gid", -1, kMaxUserId); + if (permission.isEnabled()) { + throw new ERR_ACCESS_DENIED( + "fchown API is disabled when Permission Model is enabled." + ); + } + + binding.fchown(fd, uid, gid); +} + +/** + * Asynchronously changes the owner and group + * of a file. + * @param {string | Buffer | URL} path + * @param {number} uid + * @param {number} gid + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function chown(path, uid, gid, callback) { + callback = makeCallback(callback); + path = getValidatedPath(path); + validateInteger(uid, "uid", -1, kMaxUserId); + validateInteger(gid, "gid", -1, kMaxUserId); + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.chown(path, uid, gid, req); +} + +/** + * Synchronously changes the owner and group + * of a file. + * @param {string | Buffer | URL} path + * @param {number} uid + * @param {number} gid + * @returns {void} + */ +function chownSync(path, uid, gid) { + path = getValidatedPath(path); + validateInteger(uid, "uid", -1, kMaxUserId); + validateInteger(gid, "gid", -1, kMaxUserId); + binding.chown(path, uid, gid); +} + +/** + * Changes the file system timestamps of the object + * referenced by `path`. + * @param {string | Buffer | URL} path + * @param {number | string | Date} atime + * @param {number | string | Date} mtime + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function utimes(path, atime, mtime, callback) { + callback = makeCallback(callback); + path = getValidatedPath(path); + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.utimes(path, toUnixTimestamp(atime), toUnixTimestamp(mtime), req); +} + +/** + * Synchronously changes the file system timestamps + * of the object referenced by `path`. + * @param {string | Buffer | URL} path + * @param {number | string | Date} atime + * @param {number | string | Date} mtime + * @returns {void} + */ +function utimesSync(path, atime, mtime) { + binding.utimes( + getValidatedPath(path), + toUnixTimestamp(atime), + toUnixTimestamp(mtime) + ); +} + +/** + * Changes the file system timestamps of the object + * referenced by the supplied `fd` (file descriptor). + * @param {number} fd + * @param {number | string | Date} atime + * @param {number | string | Date} mtime + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function futimes(fd, atime, mtime, callback) { + atime = toUnixTimestamp(atime, "atime"); + mtime = toUnixTimestamp(mtime, "mtime"); + callback = makeCallback(callback); + + if (permission.isEnabled()) { + callback( + new ERR_ACCESS_DENIED( + "futimes API is disabled when Permission Model is enabled." + ) + ); + return; + } + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.futimes(fd, atime, mtime, req); +} + +/** + * Synchronously changes the file system timestamps + * of the object referenced by the + * supplied `fd` (file descriptor). + * @param {number} fd + * @param {number | string | Date} atime + * @param {number | string | Date} mtime + * @returns {void} + */ +function futimesSync(fd, atime, mtime) { + if (permission.isEnabled()) { + throw new ERR_ACCESS_DENIED( + "futimes API is disabled when Permission Model is enabled." + ); + } + + binding.futimes( + fd, + toUnixTimestamp(atime, "atime"), + toUnixTimestamp(mtime, "mtime") + ); +} + +/** + * Changes the access and modification times of + * a file in the same way as `fs.utimes()`. + * @param {string | Buffer | URL} path + * @param {number | string | Date} atime + * @param {number | string | Date} mtime + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function lutimes(path, atime, mtime, callback) { + callback = makeCallback(callback); + path = getValidatedPath(path); + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.lutimes(path, toUnixTimestamp(atime), toUnixTimestamp(mtime), req); +} + +/** + * Synchronously changes the access and modification + * times of a file in the same way as `fs.utimesSync()`. + * @param {string | Buffer | URL} path + * @param {number | string | Date} atime + * @param {number | string | Date} mtime + * @returns {void} + */ +function lutimesSync(path, atime, mtime) { + binding.lutimes( + getValidatedPath(path), + toUnixTimestamp(atime), + toUnixTimestamp(mtime) + ); +} + +function writeAll( + fd, + isUserFd, + buffer, + offset, + length, + signal, + flush, + callback +) { + if (signal?.aborted) { + const abortError = new AbortError(undefined, { cause: signal.reason }); + if (isUserFd) { + callback(abortError); + } else { + fs.close(fd, (err) => { + callback(aggregateTwoErrors(err, abortError)); + }); + } + return; + } + // write(fd, buffer, offset, length, position, callback) + fs.write(fd, buffer, offset, length, null, (writeErr, written) => { + if (writeErr) { + if (isUserFd) { + callback(writeErr); + } else { + fs.close(fd, (err) => { + callback(aggregateTwoErrors(err, writeErr)); + }); + } + } else if (written === length) { + if (!flush) { + if (isUserFd) { + callback(null); + } else { + fs.close(fd, callback); + } + } else { + fs.fsync(fd, (syncErr) => { + if (syncErr) { + if (isUserFd) { + callback(syncErr); + } else { + fs.close(fd, (err) => { + callback(aggregateTwoErrors(err, syncErr)); + }); + } + } else if (isUserFd) { + callback(null); + } else { + fs.close(fd, callback); + } + }); + } + } else { + offset += written; + length -= written; + writeAll(fd, isUserFd, buffer, offset, length, signal, flush, callback); + } + }); +} + +/** + * Asynchronously writes data to the file. + * @param {string | Buffer | URL | number} path + * @param {string | Buffer | TypedArray | DataView} data + * @param {{ + * encoding?: string | null; + * mode?: number; + * flag?: string; + * signal?: AbortSignal; + * flush?: boolean; + * } | string} [options] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function writeFile(path, data, options, callback) { + callback ||= options; + validateFunction(callback, "cb"); + options = getOptions(options, { + encoding: "utf8", + mode: 0o666, + flag: "w", + flush: false, + }); + const flag = options.flag || "w"; + const flush = options.flush ?? false; + + validateBoolean(flush, "options.flush"); + + if (!isArrayBufferView(data)) { + validateStringAfterArrayBufferView(data, "data"); + data = Buffer.from(data, options.encoding || "utf8"); + } + + if (isFd(path)) { + const isUserFd = true; + const signal = options.signal; + writeAll(path, isUserFd, data, 0, data.byteLength, signal, flush, callback); + return; + } + + if (checkAborted(options.signal, callback)) return; + + fs.open(path, flag, options.mode, (openErr, fd) => { + if (openErr) { + callback(openErr); + } else { + const isUserFd = false; + const signal = options.signal; + writeAll(fd, isUserFd, data, 0, data.byteLength, signal, flush, callback); + } + }); +} + +/** + * Synchronously writes data to the file. + * @param {string | Buffer | URL | number} path + * @param {string | Buffer | TypedArray | DataView} data + * @param {{ + * encoding?: string | null; + * mode?: number; + * flag?: string; + * flush?: boolean; + * } | string} [options] + * @returns {void} + */ +function writeFileSync(path, data, options) { + options = getOptions(options, { + encoding: "utf8", + mode: 0o666, + flag: "w", + flush: false, + }); + + const flush = options.flush ?? false; + + validateBoolean(flush, "options.flush"); + + const flag = options.flag || "w"; + + // C++ fast path for string data and UTF8 encoding + if ( + typeof data === "string" && + (options.encoding === "utf8" || options.encoding === "utf-8") + ) { + if (!isInt32(path)) { + path = getValidatedPath(path); + } + + return binding.writeFileUtf8( + path, + data, + stringToFlags(flag), + parseFileMode(options.mode, "mode", 0o666) + ); + } + + if (!isArrayBufferView(data)) { + validateStringAfterArrayBufferView(data, "data"); + data = Buffer.from(data, options.encoding || "utf8"); + } + + const isUserFd = isFd(path); // File descriptor ownership + const fd = isUserFd ? path : fs.openSync(path, flag, options.mode); + + let offset = 0; + let length = data.byteLength; + try { + while (length > 0) { + const written = fs.writeSync(fd, data, offset, length); + offset += written; + length -= written; + } + + if (flush) { + fs.fsyncSync(fd); + } + } finally { + if (!isUserFd) fs.closeSync(fd); + } +} + +/** + * Asynchronously appends data to a file. + * @param {string | Buffer | URL | number} path + * @param {string | Buffer} data + * @param {{ + * encoding?: string | null; + * mode?: number; + * flag?: string; + * flush?: boolean; + * } | string} [options] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function appendFile(path, data, options, callback) { + callback ||= options; + validateFunction(callback, "cb"); + options = getOptions(options, { encoding: "utf8", mode: 0o666, flag: "a" }); + + // Don't make changes directly on options object + options = copyObject(options); + + // Force append behavior when using a supplied file descriptor + if (!options.flag || isFd(path)) options.flag = "a"; + + fs.writeFile(path, data, options, callback); +} + +/** + * Synchronously appends data to a file. + * @param {string | Buffer | URL | number} path + * @param {string | Buffer} data + * @param {{ + * encoding?: string | null; + * mode?: number; + * flag?: string; + * } | string} [options] + * @returns {void} + */ +function appendFileSync(path, data, options) { + options = getOptions(options, { encoding: "utf8", mode: 0o666, flag: "a" }); + + // Don't make changes directly on options object + options = copyObject(options); + + // Force append behavior when using a supplied file descriptor + if (!options.flag || isFd(path)) options.flag = "a"; + + fs.writeFileSync(path, data, options); +} + +/** + * Watches for the changes on `filename`. + * @param {string | Buffer | URL} filename + * @param {string | { + * persistent?: boolean; + * recursive?: boolean; + * encoding?: string; + * signal?: AbortSignal; + * }} [options] + * @param {( + * eventType?: string, + * filename?: string | Buffer + * ) => any} [listener] + * @returns {watchers.FSWatcher} + */ +function watch(filename, options, listener) { + if (typeof options === "function") { + listener = options; + } + options = getOptions(options); + + // Don't make changes directly on options object + options = copyObject(options); + + if (options.persistent === undefined) options.persistent = true; + if (options.recursive === undefined) options.recursive = false; + + let watcher; + const watchers = require("internal/fs/watchers"); + const path = getValidatedPath(filename); + // TODO(anonrig): Remove non-native watcher when/if libuv supports recursive. + // As of November 2022, libuv does not support recursive file watch on all platforms, + // e.g. Linux due to the limitations of inotify. + if (options.recursive && !isMacOS && !isWindows) { + const nonNativeWatcher = require("internal/fs/recursive_watch"); + watcher = new nonNativeWatcher.FSWatcher(options); + watcher[watchers.kFSWatchStart](path); + } else { + watcher = new watchers.FSWatcher(); + watcher[watchers.kFSWatchStart]( + path, + options.persistent, + options.recursive, + options.encoding + ); + } + + if (listener) { + watcher.addListener("change", listener); + } + if (options.signal) { + if (options.signal.aborted) { + process.nextTick(() => watcher.close()); + } else { + const listener = () => watcher.close(); + kResistStopPropagation ??= + require("internal/event_target").kResistStopPropagation; + options.signal.addEventListener("abort", listener, { + __proto__: null, + [kResistStopPropagation]: true, + }); + watcher.once("close", () => { + options.signal.removeEventListener("abort", listener); + }); + } + } + + return watcher; +} + +const statWatchers = new SafeMap(); + +/** + * Watches for changes on `filename`. + * @param {string | Buffer | URL} filename + * @param {{ + * bigint?: boolean; + * persistent?: boolean; + * interval?: number; + * }} [options] + * @param {( + * current?: Stats, + * previous?: Stats + * ) => any} listener + * @returns {watchers.StatWatcher} + */ +function watchFile(filename, options, listener) { + filename = getValidatedPath(filename); + filename = pathModule.resolve(filename); + let stat; + + if (options === null || typeof options !== "object") { + listener = options; + options = null; + } + + options = { + // Poll interval in milliseconds. 5007 is what libev used to use. It's + // a little on the slow side but let's stick with it for now to keep + // behavioral changes to a minimum. + interval: 5007, + persistent: true, + ...options, + }; + + validateFunction(listener, "listener"); + + stat = statWatchers.get(filename); + const watchers = require("internal/fs/watchers"); + if (stat === undefined) { + stat = new watchers.StatWatcher(options.bigint); + stat[watchers.kFSStatWatcherStart]( + filename, + options.persistent, + options.interval + ); + statWatchers.set(filename, stat); + } else { + stat[watchers.kFSStatWatcherAddOrCleanRef]("add"); + } + + stat.addListener("change", listener); + return stat; +} + +/** + * Stops watching for changes on `filename`. + * @param {string | Buffer | URL} filename + * @param {() => any} [listener] + * @returns {void} + */ +function unwatchFile(filename, listener) { + filename = getValidatedPath(filename); + filename = pathModule.resolve(filename); + const stat = statWatchers.get(filename); + + if (stat === undefined) return; + const watchers = require("internal/fs/watchers"); + if (typeof listener === "function") { + const beforeListenerCount = stat.listenerCount("change"); + stat.removeListener("change", listener); + if (stat.listenerCount("change") < beforeListenerCount) + stat[watchers.kFSStatWatcherAddOrCleanRef]("clean"); + } else { + stat.removeAllListeners("change"); + stat[watchers.kFSStatWatcherAddOrCleanRef]("cleanAll"); + } + + if (stat.listenerCount("change") === 0) { + stat.stop(); + statWatchers.delete(filename); + } +} + +let splitRoot; +if (isWindows) { + // Regex to find the device root on Windows (e.g. 'c:\\'), including trailing + // slash. + const splitRootRe = /^(?:[a-zA-Z]:|[\\/]{2}[^\\/]+[\\/][^\\/]+)?[\\/]*/; + splitRoot = function splitRoot(str) { + return SideEffectFreeRegExpPrototypeExec(splitRootRe, str)[0]; + }; +} else { + splitRoot = function splitRoot(str) { + for (let i = 0; i < str.length; ++i) { + if (StringPrototypeCharCodeAt(str, i) !== CHAR_FORWARD_SLASH) + return StringPrototypeSlice(str, 0, i); + } + return str; + }; +} + +function encodeRealpathResult(result, options) { + if (!options || !options.encoding || options.encoding === "utf8") + return result; + const asBuffer = Buffer.from(result); + if (options.encoding === "buffer") { + return asBuffer; + } + return asBuffer.toString(options.encoding); +} + +// Finds the next portion of a (partial) path, up to the next path delimiter +let nextPart; +if (isWindows) { + nextPart = function nextPart(p, i) { + for (; i < p.length; ++i) { + const ch = StringPrototypeCharCodeAt(p, i); + + // Check for a separator character + if (ch === CHAR_BACKWARD_SLASH || ch === CHAR_FORWARD_SLASH) return i; + } + return -1; + }; +} else { + nextPart = function nextPart(p, i) { + return StringPrototypeIndexOf(p, "/", i); + }; +} + +/** + * Returns the resolved pathname. + * @param {string | Buffer | URL} p + * @param {string | { encoding?: string | null; }} [options] + * @returns {string | Buffer} + */ +function realpathSync(p, options) { + options = getOptions(options); + p = toPathIfFileURL(p); + if (typeof p !== "string") { + p += ""; + } + validatePath(p); + p = pathModule.resolve(p); + + const cache = options[realpathCacheKey]; + const maybeCachedResult = cache?.get(p); + if (maybeCachedResult) { + return maybeCachedResult; + } + + const seenLinks = new SafeMap(); + const knownHard = new SafeSet(); + const original = p; + + // Current character position in p + let pos; + // The partial path so far, including a trailing slash if any + let current; + // The partial path without a trailing slash (except when pointing at a root) + let base; + // The partial path scanned in the previous round, with slash + let previous; + + // Skip over roots + current = base = splitRoot(p); + pos = current.length; + + // On windows, check that the root exists. On unix there is no need. + if (isWindows) { + const out = binding.lstat( + base, + false, + undefined, + true /* throwIfNoEntry */ + ); + if (out === undefined) { + return; + } + knownHard.add(base); + } + + // Walk down the path, swapping out linked path parts for their real + // values + // NB: p.length changes. + while (pos < p.length) { + // find the next part + const result = nextPart(p, pos); + previous = current; + if (result === -1) { + const last = StringPrototypeSlice(p, pos); + current += last; + base = previous + last; + pos = p.length; + } else { + current += StringPrototypeSlice(p, pos, result + 1); + base = previous + StringPrototypeSlice(p, pos, result); + pos = result + 1; + } + + // Continue if not a symlink, break if a pipe/socket + if (knownHard.has(base) || cache?.get(base) === base) { + if (isFileType(statValues, S_IFIFO) || isFileType(statValues, S_IFSOCK)) { + break; + } + continue; + } + + let resolvedLink; + const maybeCachedResolved = cache?.get(base); + if (maybeCachedResolved) { + resolvedLink = maybeCachedResolved; + } else { + // Use stats array directly to avoid creating an fs.Stats instance just + // for our internal use. + + const stats = binding.lstat( + base, + true, + undefined, + true /* throwIfNoEntry */ + ); + if (stats === undefined) { + return; + } + + if (!isFileType(stats, S_IFLNK)) { + knownHard.add(base); + cache?.set(base, base); + continue; + } + + // Read the link if it wasn't read before + // dev/ino always return 0 on windows, so skip the check. + let linkTarget = null; + let id; + if (!isWindows) { + const dev = BigIntPrototypeToString(stats[0], 32); + const ino = BigIntPrototypeToString(stats[7], 32); + id = `${dev}:${ino}`; + if (seenLinks.has(id)) { + linkTarget = seenLinks.get(id); + } + } + if (linkTarget === null) { + binding.stat(base, false, undefined, true); + linkTarget = binding.readlink(base, undefined); + } + resolvedLink = pathModule.resolve(previous, linkTarget); + + cache?.set(base, resolvedLink); + if (!isWindows) seenLinks.set(id, linkTarget); + } + + // Resolve the link, then start over + p = pathModule.resolve(resolvedLink, StringPrototypeSlice(p, pos)); + + // Skip over roots + current = base = splitRoot(p); + pos = current.length; + + // On windows, check that the root exists. On unix there is no need. + if (isWindows && !knownHard.has(base)) { + const out = binding.lstat( + base, + false, + undefined, + true /* throwIfNoEntry */ + ); + if (out === undefined) { + return; + } + knownHard.add(base); + } + } + + cache?.set(original, p); + return encodeRealpathResult(p, options); +} + +/** + * Returns the resolved pathname. + * @param {string | Buffer | URL} path + * @param {string | { encoding?: string; }} [options] + * @returns {string | Buffer} + */ +realpathSync.native = (path, options) => { + options = getOptions(options); + return binding.realpath(getValidatedPath(path), options.encoding); +}; + +/** + * Asynchronously computes the canonical pathname by + * resolving `.`, `..` and symbolic links. + * @param {string | Buffer | URL} p + * @param {string | { encoding?: string; }} [options] + * @param {( + * err?: Error, + * resolvedPath?: string | Buffer + * ) => any} callback + * @returns {void} + */ +function realpath(p, options, callback) { + if (typeof options === "function") { + callback = options; + } else { + validateFunction(callback, "cb"); + } + options = getOptions(options); + p = toPathIfFileURL(p); + + if (typeof p !== "string") { + p += ""; + } + validatePath(p); + p = pathModule.resolve(p); + + const seenLinks = new SafeMap(); + const knownHard = new SafeSet(); + + // Current character position in p + let pos; + // The partial path so far, including a trailing slash if any + let current; + // The partial path without a trailing slash (except when pointing at a root) + let base; + // The partial path scanned in the previous round, with slash + let previous; + + current = base = splitRoot(p); + pos = current.length; + + // On windows, check that the root exists. On unix there is no need. + if (isWindows && !knownHard.has(base)) { + fs.lstat(base, (err) => { + if (err) return callback(err); + knownHard.add(base); + LOOP(); + }); + } else { + process.nextTick(LOOP); + } + + // Walk down the path, swapping out linked path parts for their real + // values + function LOOP() { + // Stop if scanned past end of path + if (pos >= p.length) { + return callback(null, encodeRealpathResult(p, options)); + } + + // find the next part + const result = nextPart(p, pos); + previous = current; + if (result === -1) { + const last = StringPrototypeSlice(p, pos); + current += last; + base = previous + last; + pos = p.length; + } else { + current += StringPrototypeSlice(p, pos, result + 1); + base = previous + StringPrototypeSlice(p, pos, result); + pos = result + 1; + } + + // Continue if not a symlink, break if a pipe/socket + if (knownHard.has(base)) { + if (isFileType(statValues, S_IFIFO) || isFileType(statValues, S_IFSOCK)) { + return callback(null, encodeRealpathResult(p, options)); + } + return process.nextTick(LOOP); + } + + return fs.lstat(base, { bigint: true }, gotStat); + } + + function gotStat(err, stats) { + if (err) return callback(err); + + // If not a symlink, skip to the next path part + if (!stats.isSymbolicLink()) { + knownHard.add(base); + return process.nextTick(LOOP); + } + + // Stat & read the link if not read before. + // Call `gotTarget()` as soon as the link target is known. + // `dev`/`ino` always return 0 on windows, so skip the check. + let id; + if (!isWindows) { + const dev = BigIntPrototypeToString(stats.dev, 32); + const ino = BigIntPrototypeToString(stats.ino, 32); + id = `${dev}:${ino}`; + if (seenLinks.has(id)) { + return gotTarget(null, seenLinks.get(id)); + } + } + fs.stat(base, (err) => { + if (err) return callback(err); + + fs.readlink(base, (err, target) => { + if (!isWindows) seenLinks.set(id, target); + gotTarget(err, target); + }); + }); + } + + function gotTarget(err, target) { + if (err) return callback(err); + + gotResolvedLink(pathModule.resolve(previous, target)); + } + + function gotResolvedLink(resolvedLink) { + // Resolve the link, then start over + p = pathModule.resolve(resolvedLink, StringPrototypeSlice(p, pos)); + current = base = splitRoot(p); + pos = current.length; + + // On windows, check that the root exists. On unix there is no need. + if (isWindows && !knownHard.has(base)) { + fs.lstat(base, (err) => { + if (err) return callback(err); + knownHard.add(base); + LOOP(); + }); + } else { + process.nextTick(LOOP); + } + } +} + +/** + * Asynchronously computes the canonical pathname by + * resolving `.`, `..` and symbolic links. + * @param {string | Buffer | URL} path + * @param {string | { encoding?: string; }} [options] + * @param {( + * err?: Error, + * resolvedPath?: string | Buffer + * ) => any} callback + * @returns {void} + */ +realpath.native = (path, options, callback) => { + callback = makeCallback(callback || options); + options = getOptions(options); + path = getValidatedPath(path); + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.realpath(path, options.encoding, req); +}; + +/** + * Creates a unique temporary directory. + * @param {string | Buffer | URL} prefix + * @param {string | { encoding?: string; }} [options] + * @param {( + * err?: Error, + * directory?: string + * ) => any} callback + * @returns {void} + */ +function mkdtemp(prefix, options, callback) { + callback = makeCallback(typeof options === "function" ? options : callback); + options = getOptions(options); + + prefix = getValidatedPath(prefix, "prefix"); + warnOnNonPortableTemplate(prefix); + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.mkdtemp(prefix, options.encoding, req); +} + +/** + * Synchronously creates a unique temporary directory. + * @param {string | Buffer | URL} prefix + * @param {string | { encoding?: string; }} [options] + * @returns {string} + */ +function mkdtempSync(prefix, options) { + options = getOptions(options); + + prefix = getValidatedPath(prefix, "prefix"); + warnOnNonPortableTemplate(prefix); + return binding.mkdtemp(prefix, options.encoding); +} + +/** + * Synchronously creates a unique temporary directory. + * The returned value is a disposable object which removes the + * directory and its contents when disposed. + * @param {string | Buffer | URL} prefix + * @param {string | { encoding?: string; }} [options] + * @returns {object} A disposable object with a "path" property. + */ +function mkdtempDisposableSync(prefix, options) { + options = getOptions(options); + + prefix = getValidatedPath(prefix, "prefix"); + warnOnNonPortableTemplate(prefix); + + const path = binding.mkdtemp(prefix, options.encoding); + // Stash the full path in case of process.chdir() + const fullPath = pathModule.resolve(process.cwd(), path); + + const remove = () => { + binding.rmSync( + fullPath, + 0 /* maxRetries */, + true /* recursive */, + 100 /* retryDelay */ + ); + }; + return { + path, + remove, + [SymbolDispose]() { + remove(); + }, + }; +} + +/** + * Asynchronously copies `src` to `dest`. By + * default, `dest` is overwritten if it already exists. + * @param {string | Buffer | URL} src + * @param {string | Buffer | URL} dest + * @param {number} [mode] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function copyFile(src, dest, mode, callback) { + if (typeof mode === "function") { + callback = mode; + mode = 0; + } + + src = getValidatedPath(src, "src"); + dest = getValidatedPath(dest, "dest"); + callback = makeCallback(callback); + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.copyFile(src, dest, mode, req); +} + +/** + * Synchronously copies `src` to `dest`. By + * default, `dest` is overwritten if it already exists. + * @param {string | Buffer | URL} src + * @param {string | Buffer | URL} dest + * @param {number} [mode] + * @returns {void} + */ +function copyFileSync(src, dest, mode) { + binding.copyFile( + getValidatedPath(src, "src"), + getValidatedPath(dest, "dest"), + mode + ); +} + +/** + * Asynchronously copies `src` to `dest`. `src` can be a file, directory, or + * symlink. The contents of directories will be copied recursively. + * @param {string | URL} src + * @param {string | URL} dest + * @param {object} [options] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function cp(src, dest, options, callback) { + if (typeof options === "function") { + callback = options; + options = undefined; + } + callback = makeCallback(callback); + options = validateCpOptions(options); + src = getValidatedPath(src, "src"); + dest = getValidatedPath(dest, "dest"); + lazyLoadCp(); + cpFn(src, dest, options, callback); +} + +/** + * Synchronously copies `src` to `dest`. `src` can be a file, directory, or + * symlink. The contents of directories will be copied recursively. + * @param {string | URL} src + * @param {string | URL} dest + * @param {object} [options] + * @returns {void} + */ +function cpSync(src, dest, options) { + options = validateCpOptions(options); + src = getValidatedPath(src, "src"); + dest = getValidatedPath(dest, "dest"); + lazyLoadCp(); + cpSyncFn(src, dest, options); +} + +function lazyLoadStreams() { + if (!ReadStream) { + ({ ReadStream, WriteStream } = require("internal/fs/streams")); + FileReadStream = ReadStream; + FileWriteStream = WriteStream; + } +} + +/** + * Creates a readable stream with a default `highWaterMark` + * of 64 KiB. + * @param {string | Buffer | URL} path + * @param {string | { + * flags?: string; + * encoding?: string; + * fd?: number | FileHandle; + * mode?: number; + * autoClose?: boolean; + * emitClose?: boolean; + * start: number; + * end?: number; + * highWaterMark?: number; + * fs?: object | null; + * signal?: AbortSignal | null; + * }} [options] + * @returns {ReadStream} + */ +function createReadStream(path, options) { + lazyLoadStreams(); + return new ReadStream(path, options); +} + +/** + * Creates a write stream. + * @param {string | Buffer | URL} path + * @param {string | { + * flags?: string; + * encoding?: string; + * fd?: number | FileHandle; + * mode?: number; + * autoClose?: boolean; + * emitClose?: boolean; + * start: number; + * fs?: object | null; + * signal?: AbortSignal | null; + * highWaterMark?: number; + * flush?: boolean; + * }} [options] + * @returns {WriteStream} + */ +function createWriteStream(path, options) { + lazyLoadStreams(); + return new WriteStream(path, options); +} + +const lazyGlob = getLazy(() => require("internal/fs/glob").Glob); + +function glob(pattern, options, callback) { + if (typeof options === "function") { + callback = options; + options = undefined; + } + callback = makeCallback(callback); + + const Glob = lazyGlob(); + PromisePrototypeThen( + ArrayFromAsync(new Glob(pattern, options).glob()), + (res) => callback(null, res), + callback + ); +} + +function globSync(pattern, options) { + const Glob = lazyGlob(); + return new Glob(pattern, options).globSync(); +} + +module.exports = fs = { + appendFile, + appendFileSync, + access, + accessSync, + chown, + chownSync, + chmod, + chmodSync, + close, + closeSync, + copyFile, + copyFileSync, + cp, + cpSync, + createReadStream, + createWriteStream, + exists, + existsSync, + fchown, + fchownSync, + fchmod, + fchmodSync, + fdatasync, + fdatasyncSync, + fstat, + fstatSync, + fsync, + fsyncSync, + ftruncate, + ftruncateSync, + futimes, + futimesSync, + glob, + globSync, + lchown, + lchownSync, + lchmod: constants.O_SYMLINK !== undefined ? lchmod : undefined, + lchmodSync: constants.O_SYMLINK !== undefined ? lchmodSync : undefined, + link, + linkSync, + lstat, + lstatSync, + lutimes, + lutimesSync, + mkdir, + mkdirSync, + mkdtemp, + mkdtempSync, + mkdtempDisposableSync, + open, + openSync, + openAsBlob, + readdir, + readdirSync, + read, + readSync, + readv, + readvSync, + readFile, + readFileSync, + readlink, + readlinkSync, + realpath, + realpathSync, + rename, + renameSync, + rm, + rmSync, + rmdir, + rmdirSync, + stat, + statfs, + statSync, + statfsSync, + symlink, + symlinkSync, + truncate, + truncateSync, + unwatchFile, + unlink, + unlinkSync, + utimes, + utimesSync, + watch, + watchFile, + writeFile, + writeFileSync, + write, + writeSync, + writev, + writevSync, + Dirent, + Stats, + + get ReadStream() { + lazyLoadStreams(); + return ReadStream; + }, + + set ReadStream(val) { + ReadStream = val; + }, + + get WriteStream() { + lazyLoadStreams(); + return WriteStream; + }, + + set WriteStream(val) { + WriteStream = val; + }, + + // Legacy names... these have to be separate because of how graceful-fs + // (and possibly other) modules monkey patch the values. + get FileReadStream() { + lazyLoadStreams(); + return FileReadStream; + }, + + set FileReadStream(val) { + FileReadStream = val; + }, + + get FileWriteStream() { + lazyLoadStreams(); + return FileWriteStream; + }, + + set FileWriteStream(val) { + FileWriteStream = val; + }, + + get Utf8Stream() { + lazyLoadUtf8Stream(); + return Utf8Stream; + }, + + // For tests + _toUnixTimestamp: toUnixTimestamp, +}; + +defineLazyProperties(fs, "internal/fs/dir", ["Dir", "opendir", "opendirSync"]); + +ObjectDefineProperties(fs, { + constants: { + __proto__: null, + configurable: false, + enumerable: true, + value: constants, + }, + promises: { + __proto__: null, + configurable: true, + enumerable: true, + get() { + promises ??= require("internal/fs/promises").exports; + return promises; + }, + }, +}); diff --git a/.codesandbox/node/hello-world.js b/.codesandbox/node/hello-world.js new file mode 100644 index 0000000000..857229108d --- /dev/null +++ b/.codesandbox/node/hello-world.js @@ -0,0 +1,14 @@ +const http = require('node:http'); + +const hostname = '127.0.0.1'; +const port = 3000; + +const server = http.createServer((req, res) => { + res.statusCode = 200; + res.setHeader('Content-Type', 'text/plain'); + res.end('Hello, World!\n'); +}); + +server.listen(port, hostname, () => { + console.log(`Server running at http://${hostname}:${port}/`); +}); \ No newline at end of file diff --git a/.codesandbox/node/https.js b/.codesandbox/node/https.js new file mode 100644 index 0000000000..05fb02fe4f --- /dev/null +++ b/.codesandbox/node/https.js @@ -0,0 +1,660 @@ +'use strict'; + +const { + ArrayPrototypeIndexOf, + ArrayPrototypePush, + ArrayPrototypeShift, + ArrayPrototypeSplice, + ArrayPrototypeUnshift, + FunctionPrototypeCall, + JSONStringify, + NumberParseInt, + ObjectAssign, + ObjectSetPrototypeOf, + ReflectApply, + ReflectConstruct, + SymbolAsyncDispose, +} = primordials; + +const { + assertCrypto, + kEmptyObject, + promisify, + once, +} = require('internal/util'); +const { ERR_PROXY_TUNNEL } = require('internal/errors').codes; +assertCrypto(); + +const tls = require('tls'); +const { + kProxyConfig, + checkShouldUseProxy, + filterEnvForProxies, + kWaitForProxyTunnel, +} = require('internal/http'); +const { Agent: HttpAgent } = require('_http_agent'); +const { + httpServerPreClose, + Server: HttpServer, + setupConnectionsTracking, + storeHTTPOptions, + _connectionListener, +} = require('_http_server'); +const { ClientRequest } = require('_http_client'); +let debug = require('internal/util/debuglog').debuglog('https', (fn) => { + debug = fn; +}); +const net = require('net'); +const { URL, urlToHttpOptions, isURL } = require('internal/url'); +const { validateObject } = require('internal/validators'); +const { isIP } = require('internal/net'); +const assert = require('internal/assert'); +const { getOptionValue } = require('internal/options'); + +function Server(opts, requestListener) { + if (!(this instanceof Server)) return new Server(opts, requestListener); + + let ALPNProtocols = ['http/1.1']; + if (typeof opts === 'function') { + requestListener = opts; + opts = kEmptyObject; + } else if (opts == null) { + opts = kEmptyObject; + } else { + validateObject(opts, 'options'); + // Only one of ALPNProtocols and ALPNCallback can be set, so make sure we + // only set a default ALPNProtocols if the caller has not set either of them + if (opts.ALPNProtocols || opts.ALPNCallback) + ALPNProtocols = undefined; + } + + FunctionPrototypeCall(storeHTTPOptions, this, opts); + FunctionPrototypeCall(tls.Server, this, + { + noDelay: true, + ALPNProtocols, + ...opts, + }, + _connectionListener); + + this.httpAllowHalfOpen = false; + + if (requestListener) { + this.addListener('request', requestListener); + } + + this.addListener('tlsClientError', function addListener(err, conn) { + if (!this.emit('clientError', err, conn)) + conn.destroy(err); + }); + + this.timeout = 0; + this.maxHeadersCount = null; + this.on('listening', setupConnectionsTracking); +} + +ObjectSetPrototypeOf(Server.prototype, tls.Server.prototype); +ObjectSetPrototypeOf(Server, tls.Server); + +Server.prototype.closeAllConnections = HttpServer.prototype.closeAllConnections; + +Server.prototype.closeIdleConnections = HttpServer.prototype.closeIdleConnections; + +Server.prototype.setTimeout = HttpServer.prototype.setTimeout; + +Server.prototype.close = function close() { + httpServerPreClose(this); + ReflectApply(tls.Server.prototype.close, this, arguments); + return this; +}; + +Server.prototype[SymbolAsyncDispose] = async function() { + await FunctionPrototypeCall(promisify(this.close), this); +}; + +/** + * Creates a new `https.Server` instance. + * @param {{ + * IncomingMessage?: IncomingMessage; + * ServerResponse?: ServerResponse; + * insecureHTTPParser?: boolean; + * maxHeaderSize?: number; + * }} [opts] + * @param {Function} [requestListener] + * @returns {Server} + */ +function createServer(opts, requestListener) { + return new Server(opts, requestListener); +} + +// When proxying a HTTPS request, the following needs to be done: +// https://datatracker.ietf.org/doc/html/rfc9110#CONNECT +// 1. Send a CONNECT request to the proxy server. +// 2. Wait for 200 connection established response to establish the tunnel. +// 3. Perform TLS handshake with the endpoint over the socket. +// 4. Tunnel the request using the established connection. +// +// This function computes the tunnel configuration for HTTPS requests. +// The handling of the tunnel connection is done in createConnection. +function getTunnelConfigForProxiedHttps(agent, reqOptions) { + if (!agent[kProxyConfig]) { + return null; + } + if ((reqOptions.protocol || agent.protocol) !== 'https:') { + return null; + } + const shouldUseProxy = checkShouldUseProxy(agent[kProxyConfig], reqOptions); + debug(`getTunnelConfigForProxiedHttps should use proxy for ${reqOptions.host}:${reqOptions.port}:`, shouldUseProxy); + if (!shouldUseProxy) { + return null; + } + const { auth, href } = agent[kProxyConfig]; + // The request is a HTTPS request, assemble the payload for establishing the tunnel. + const ipType = isIP(reqOptions.host); + // The request target must put IPv6 address in square brackets. + // Here reqOptions is already processed by urlToHttpOptions so we'll add them back if necessary. + // See https://www.rfc-editor.org/rfc/rfc3986#section-3.2.2 + const requestHost = ipType === 6 ? `[${reqOptions.host}]` : reqOptions.host; + const requestPort = reqOptions.port || agent.defaultPort; + const endpoint = `${requestHost}:${requestPort}`; + // The ClientRequest constructor should already have validated the host and the port. + // When the request options come from a string invalid characters would be stripped away, + // when it's an object ERR_INVALID_CHAR would be thrown. Here we just assert in case + // agent.createConnection() is called with invalid options. + assert(!endpoint.includes('\r')); + assert(!endpoint.includes('\n')); + + let payload = `CONNECT ${endpoint} HTTP/1.1\r\n`; + // The parseProxyConfigFromEnv() method should have already validated the authorization header + // value. + if (auth) { + payload += `proxy-authorization: ${auth}\r\n`; + } + if (agent.keepAlive || agent.maxSockets !== Infinity) { + payload += 'proxy-connection: keep-alive\r\n'; + } + payload += `host: ${endpoint}`; + payload += '\r\n\r\n'; + + const result = { + __proto__: null, + proxyTunnelPayload: payload, + requestOptions: { // Options used for the request sent after the tunnel is established. + __proto__: null, + servername: reqOptions.servername || ipType ? undefined : reqOptions.host, + ...reqOptions, + }, + }; + debug(`updated request for HTTPS proxy ${href} with`, result); + return result; +}; + +function establishTunnel(agent, socket, options, tunnelConfig, afterSocket) { + const { proxyTunnelPayload } = tunnelConfig; + // By default, the socket is in paused mode. Read to look for the 200 + // connection established response. + function read() { + let chunk; + while ((chunk = socket.read()) !== null) { + if (onProxyData(chunk) !== -1) { + break; + } + } + socket.on('readable', read); + } + + function cleanup() { + socket.removeListener('end', onProxyEnd); + socket.removeListener('error', onProxyError); + socket.removeListener('readable', read); + socket.setTimeout(0); // Clear the timeout for the tunnel establishment. + } + + function onProxyError(err) { + debug('onProxyError', err); + cleanup(); + afterSocket(err, socket); + } + + // Read the headers from the chunks and check for the status code. If it fails we + // clean up the socket and return an error. Otherwise we establish the tunnel. + let buffer = ''; + function onProxyData(chunk) { + const str = chunk.toString(); + debug('onProxyData', str); + buffer += str; + const headerEndIndex = buffer.indexOf('\r\n\r\n'); + if (headerEndIndex === -1) return headerEndIndex; + const statusLine = buffer.substring(0, buffer.indexOf('\r\n')); + const statusCode = statusLine.split(' ')[1]; + if (statusCode !== '200') { + debug(`onProxyData receives ${statusCode}, cleaning up`); + cleanup(); + const targetHost = proxyTunnelPayload.split('\r')[0].split(' ')[1]; + const message = `Failed to establish tunnel to ${targetHost} via ${agent[kProxyConfig].href}: ${statusLine}`; + const err = new ERR_PROXY_TUNNEL(message); + err.statusCode = NumberParseInt(statusCode); + afterSocket(err, socket); + } else { + // https://datatracker.ietf.org/doc/html/rfc9110#CONNECT + // RFC 9110 says that it can be 2xx but in the real world, proxy clients generally only + // accepts 200. + // Proxy servers are not supposed to send anything after the headers - the payload must be + // be empty. So after this point we will proceed with the tunnel e.g. starting TLS handshake. + debug('onProxyData receives 200, establishing tunnel'); + cleanup(); + + // Reuse the tunneled socket to perform the TLS handshake with the endpoint, + // then send the request. + const { requestOptions } = tunnelConfig; + tunnelConfig.requestOptions = null; + requestOptions.socket = socket; + let tunneldSocket; + const onTLSHandshakeError = (err) => { + debug('Propagate error event from tunneled socket to tunnel socket'); + afterSocket(err, tunneldSocket); + }; + tunneldSocket = tls.connect(requestOptions, () => { + debug('TLS handshake over tunnel succeeded'); + tunneldSocket.removeListener('error', onTLSHandshakeError); + afterSocket(null, tunneldSocket); + }); + tunneldSocket.on('free', () => { + debug('Propagate free event from tunneled socket to tunnel socket'); + socket.emit('free'); + }); + tunneldSocket.on('error', onTLSHandshakeError); + } + return headerEndIndex; + } + + function onProxyEnd() { + cleanup(); + const err = new ERR_PROXY_TUNNEL('Connection to establish proxy tunnel ended unexpectedly'); + afterSocket(err, socket); + } + + const proxyTunnelTimeout = tunnelConfig.requestOptions.timeout; + debug('proxyTunnelTimeout', proxyTunnelTimeout, options.timeout); + // It may be worth a separate timeout error/event. + // But it also makes sense to treat the tunnel establishment timeout as + // a normal timeout for the request. + function onProxyTimeout() { + debug('onProxyTimeout', proxyTunnelTimeout); + cleanup(); + const err = new ERR_PROXY_TUNNEL(`Connection to establish proxy tunnel timed out after ${proxyTunnelTimeout}ms`); + err.proxyTunnelTimeout = proxyTunnelTimeout; + afterSocket(err, socket); + } + + if (proxyTunnelTimeout && proxyTunnelTimeout > 0) { + debug('proxy tunnel setTimeout', proxyTunnelTimeout); + socket.setTimeout(proxyTunnelTimeout, onProxyTimeout); + } + + socket.on('error', onProxyError); + socket.on('end', onProxyEnd); + socket.write(proxyTunnelPayload); + + read(); +} + +// HTTPS agents. +// See ProxyConfig in internal/http.js for how the connection should be handled +// when the agent is configured to use a proxy server. +function createConnection(...args) { + // XXX: This signature (port, host, options) is different from all the other + // createConnection() methods. + let options, cb; + if (args[0] !== null && typeof args[0] === 'object') { + options = args[0]; + } else if (args[1] !== null && typeof args[1] === 'object') { + options = { ...args[1] }; + } else if (args[2] === null || typeof args[2] !== 'object') { + options = {}; + } else { + options = { ...args[2] }; + } + if (typeof args[0] === 'number') { + options.port = args[0]; + } + if (typeof args[1] === 'string') { + options.host = args[1]; + } + if (typeof args[args.length - 1] === 'function') { + cb = args[args.length - 1]; + } + + debug('createConnection', options); + + if (options._agentKey) { + const session = this._getSession(options._agentKey); + if (session) { + debug('reuse session for %j', options._agentKey); + options = { + session, + ...options, + }; + } + } + + let socket; + const tunnelConfig = getTunnelConfigForProxiedHttps(this, options); + debug(`https createConnection should use proxy for ${options.host}:${options.port}:`, tunnelConfig); + + if (!tunnelConfig) { + socket = tls.connect(options); + } else { + const connectOptions = { + ...this[kProxyConfig].proxyConnectionOptions, + }; + debug('Create proxy socket', connectOptions); + const onError = (err) => { + cleanupAndPropagate(err, socket); + }; + const proxyTunnelTimeout = tunnelConfig.requestOptions.timeout; + const onTimeout = () => { + const err = new ERR_PROXY_TUNNEL(`Connection to establish proxy tunnel timed out after ${proxyTunnelTimeout}ms`); + err.proxyTunnelTimeout = proxyTunnelTimeout; + cleanupAndPropagate(err, socket); + }; + const cleanupAndPropagate = once((err, currentSocket) => { + debug('cleanupAndPropagate', err); + socket.removeListener('error', onError); + socket.removeListener('timeout', onTimeout); + // An error occurred during tunnel establishment, in that case just destroy the socket. + // and propagate the error to the callback. + + // When the error comes from unexpected status code, the stream is still in good shape, + // in that case let req.onSocket handle the destruction instead. + if (err && err.code === 'ERR_PROXY_TUNNEL' && !err.statusCode) { + socket.destroy(); + } + // This error should go to: + // -> oncreate in Agent.prototype.createSocket + // -> closure in Agent.prototype.addRequest or Agent.prototype.removeSocket + if (cb) { + cb(err, currentSocket); + } + }); + const onProxyConnection = () => { + socket.removeListener('error', onError); + establishTunnel(this, socket, options, tunnelConfig, cleanupAndPropagate); + }; + if (this[kProxyConfig].protocol === 'http:') { + socket = net.connect(connectOptions, onProxyConnection); + } else { + socket = tls.connect(connectOptions, onProxyConnection); + } + + socket.on('error', onError); + if (proxyTunnelTimeout) { + socket.setTimeout(proxyTunnelTimeout, onTimeout); + } + socket[kWaitForProxyTunnel] = true; + } + + if (options._agentKey) { + // Cache new session for reuse + socket.on('session', (session) => { + this._cacheSession(options._agentKey, session); + }); + + // Evict session on error + socket.once('close', (err) => { + if (err) + this._evictSession(options._agentKey); + }); + } + + return socket; +} + +/** + * Creates a new `HttpAgent` instance. + * @param {{ + * keepAlive?: boolean; + * keepAliveMsecs?: number; + * maxSockets?: number; + * maxTotalSockets?: number; + * maxFreeSockets?: number; + * scheduling?: string; + * timeout?: number; + * maxCachedSessions?: number; + * servername?: string; + * defaultPort?: number; + * protocol?: string; + * proxyEnv?: object; + * }} [options] + * @class + */ +function Agent(options) { + if (!(this instanceof Agent)) + return new Agent(options); + + options = { __proto__: null, ...options }; + options.defaultPort ??= 443; + options.protocol ??= 'https:'; + FunctionPrototypeCall(HttpAgent, this, options); + + this.maxCachedSessions = this.options.maxCachedSessions; + if (this.maxCachedSessions === undefined) + this.maxCachedSessions = 100; + + this._sessionCache = { + map: {}, + list: [], + }; +} +ObjectSetPrototypeOf(Agent.prototype, HttpAgent.prototype); +ObjectSetPrototypeOf(Agent, HttpAgent); +Agent.prototype.createConnection = createConnection; + +/** + * Gets a unique name for a set of options. + * @param {{ + * host: string; + * port: number; + * localAddress: string; + * family: number; + * }} [options] + * @returns {string} + */ +Agent.prototype.getName = function getName(options = kEmptyObject) { + let name = FunctionPrototypeCall(HttpAgent.prototype.getName, this, options); + + name += ':'; + if (options.ca) + name += options.ca; + + name += ':'; + if (options.cert) + name += options.cert; + + name += ':'; + if (options.clientCertEngine) + name += options.clientCertEngine; + + name += ':'; + if (options.ciphers) + name += options.ciphers; + + name += ':'; + if (options.key) + name += options.key; + + name += ':'; + if (options.pfx) + name += options.pfx; + + name += ':'; + if (options.rejectUnauthorized !== undefined) + name += options.rejectUnauthorized; + + name += ':'; + if (options.servername && options.servername !== options.host) + name += options.servername; + + name += ':'; + if (options.minVersion) + name += options.minVersion; + + name += ':'; + if (options.maxVersion) + name += options.maxVersion; + + name += ':'; + if (options.secureProtocol) + name += options.secureProtocol; + + name += ':'; + if (options.crl) + name += options.crl; + + name += ':'; + if (options.honorCipherOrder !== undefined) + name += options.honorCipherOrder; + + name += ':'; + if (options.ecdhCurve) + name += options.ecdhCurve; + + name += ':'; + if (options.dhparam) + name += options.dhparam; + + name += ':'; + if (options.secureOptions !== undefined) + name += options.secureOptions; + + name += ':'; + if (options.sessionIdContext) + name += options.sessionIdContext; + + name += ':'; + if (options.sigalgs) + name += JSONStringify(options.sigalgs); + + name += ':'; + if (options.privateKeyIdentifier) + name += options.privateKeyIdentifier; + + name += ':'; + if (options.privateKeyEngine) + name += options.privateKeyEngine; + + return name; +}; + +Agent.prototype._getSession = function _getSession(key) { + return this._sessionCache.map[key]; +}; + +Agent.prototype._cacheSession = function _cacheSession(key, session) { + // Cache is disabled + if (this.maxCachedSessions === 0) + return; + + // Fast case - update existing entry + if (this._sessionCache.map[key]) { + this._sessionCache.map[key] = session; + return; + } + + // Put new entry + if (this._sessionCache.list.length >= this.maxCachedSessions) { + const oldKey = ArrayPrototypeShift(this._sessionCache.list); + debug('evicting %j', oldKey); + delete this._sessionCache.map[oldKey]; + } + + ArrayPrototypePush(this._sessionCache.list, key); + this._sessionCache.map[key] = session; +}; + +Agent.prototype._evictSession = function _evictSession(key) { + const index = ArrayPrototypeIndexOf(this._sessionCache.list, key); + if (index === -1) + return; + + ArrayPrototypeSplice(this._sessionCache.list, index, 1); + delete this._sessionCache.map[key]; +}; + +const globalAgent = new Agent({ + keepAlive: true, scheduling: 'lifo', timeout: 5000, + // This normalized from both --use-env-proxy and NODE_USE_ENV_PROXY settings. + proxyEnv: getOptionValue('--use-env-proxy') ? filterEnvForProxies(process.env) : undefined, +}); + +/** + * Makes a request to a secure web server. + * @param {...any} args + * @returns {ClientRequest} + */ +function request(...args) { + let options = {}; + + if (typeof args[0] === 'string') { + const urlStr = ArrayPrototypeShift(args); + options = urlToHttpOptions(new URL(urlStr)); + } else if (isURL(args[0])) { + options = urlToHttpOptions(ArrayPrototypeShift(args)); + } + + if (args[0] && typeof args[0] !== 'function') { + ObjectAssign(options, ArrayPrototypeShift(args)); + } + + options._defaultAgent = module.exports.globalAgent; + ArrayPrototypeUnshift(args, options); + + return ReflectConstruct(ClientRequest, args); +} + +/** + * Makes a GET request to a secure web server. + * @param {string | URL} input + * @param {{ + * agent?: Agent | boolean; + * auth?: string; + * createConnection?: Function; + * defaultPort?: number; + * family?: number; + * headers?: object; + * hints?: number; + * host?: string; + * hostname?: string; + * insecureHTTPParser?: boolean; + * joinDuplicateHeaders?: boolean; + * localAddress?: string; + * localPort?: number; + * lookup?: Function; + * maxHeaderSize?: number; + * method?: string; + * path?: string; + * port?: number; + * protocol?: string; + * setHost?: boolean; + * socketPath?: string; + * timeout?: number; + * signal?: AbortSignal; + * uniqueHeaders?: Array; + * } | string | URL} [options] + * @param {Function} [cb] + * @returns {ClientRequest} + */ +function get(input, options, cb) { + const req = request(input, options, cb); + req.end(); + return req; +} + +module.exports = { + Agent, + globalAgent, + Server, + createServer, + get, + request, +}; \ No newline at end of file diff --git a/.codesandbox/node/path.js b/.codesandbox/node/path.js new file mode 100644 index 0000000000..08ee62f347 --- /dev/null +++ b/.codesandbox/node/path.js @@ -0,0 +1,1790 @@ +"use strict"; + +const { + ArrayPrototypeIncludes, + ArrayPrototypeJoin, + ArrayPrototypePush, + ArrayPrototypeSlice, + FunctionPrototypeBind, + StringPrototypeCharCodeAt, + StringPrototypeIncludes, + StringPrototypeIndexOf, + StringPrototypeLastIndexOf, + StringPrototypeRepeat, + StringPrototypeReplace, + StringPrototypeSlice, + StringPrototypeSplit, + StringPrototypeToLowerCase, + StringPrototypeToUpperCase, +} = primordials; + +const { + CHAR_UPPERCASE_A, + CHAR_LOWERCASE_A, + CHAR_UPPERCASE_Z, + CHAR_LOWERCASE_Z, + CHAR_DOT, + CHAR_FORWARD_SLASH, + CHAR_BACKWARD_SLASH, + CHAR_COLON, + CHAR_QUESTION_MARK, +} = require("internal/constants"); +const { validateObject, validateString } = require("internal/validators"); + +const { isWindows, getLazy } = require("internal/util"); + +const lazyMatchGlobPattern = getLazy( + () => require("internal/fs/glob").matchGlobPattern +); + +function isPathSeparator(code) { + return code === CHAR_FORWARD_SLASH || code === CHAR_BACKWARD_SLASH; +} + +function isPosixPathSeparator(code) { + return code === CHAR_FORWARD_SLASH; +} + +const WINDOWS_RESERVED_NAMES = [ + "CON", + "PRN", + "AUX", + "NUL", + "COM1", + "COM2", + "COM3", + "COM4", + "COM5", + "COM6", + "COM7", + "COM8", + "COM9", + "LPT1", + "LPT2", + "LPT3", + "LPT4", + "LPT5", + "LPT6", + "LPT7", + "LPT8", + "LPT9", + "COM\xb9", + "COM\xb2", + "COM\xb3", + "LPT\xb9", + "LPT\xb2", + "LPT\xb3", +]; + +function isWindowsReservedName(path, colonIndex) { + const devicePart = StringPrototypeToUpperCase( + StringPrototypeSlice(path, 0, colonIndex) + ); + return ArrayPrototypeIncludes(WINDOWS_RESERVED_NAMES, devicePart); +} + +function isWindowsDeviceRoot(code) { + return ( + (code >= CHAR_UPPERCASE_A && code <= CHAR_UPPERCASE_Z) || + (code >= CHAR_LOWERCASE_A && code <= CHAR_LOWERCASE_Z) + ); +} + +// Resolves . and .. elements in a path with directory names +function normalizeString(path, allowAboveRoot, separator, isPathSeparator) { + let res = ""; + let lastSegmentLength = 0; + let lastSlash = -1; + let dots = 0; + let code = 0; + for (let i = 0; i <= path.length; ++i) { + if (i < path.length) code = StringPrototypeCharCodeAt(path, i); + else if (isPathSeparator(code)) break; + else code = CHAR_FORWARD_SLASH; + + if (isPathSeparator(code)) { + if (lastSlash === i - 1 || dots === 1) { + // NOOP + } else if (dots === 2) { + if ( + res.length < 2 || + lastSegmentLength !== 2 || + StringPrototypeCharCodeAt(res, res.length - 1) !== CHAR_DOT || + StringPrototypeCharCodeAt(res, res.length - 2) !== CHAR_DOT + ) { + if (res.length > 2) { + const lastSlashIndex = res.length - lastSegmentLength - 1; + if (lastSlashIndex === -1) { + res = ""; + lastSegmentLength = 0; + } else { + res = StringPrototypeSlice(res, 0, lastSlashIndex); + lastSegmentLength = + res.length - 1 - StringPrototypeLastIndexOf(res, separator); + } + lastSlash = i; + dots = 0; + continue; + } else if (res.length !== 0) { + res = ""; + lastSegmentLength = 0; + lastSlash = i; + dots = 0; + continue; + } + } + if (allowAboveRoot) { + res += res.length > 0 ? `${separator}..` : ".."; + lastSegmentLength = 2; + } + } else { + if (res.length > 0) + res += `${separator}${StringPrototypeSlice(path, lastSlash + 1, i)}`; + else res = StringPrototypeSlice(path, lastSlash + 1, i); + lastSegmentLength = i - lastSlash - 1; + } + lastSlash = i; + dots = 0; + } else if (code === CHAR_DOT && dots !== -1) { + ++dots; + } else { + dots = -1; + } + } + return res; +} + +function formatExt(ext) { + return ext ? `${ext[0] === "." ? "" : "."}${ext}` : ""; +} + +/** + * @param {string} sep + * @param {{ + * dir?: string; + * root?: string; + * base?: string; + * name?: string; + * ext?: string; + * }} pathObject + * @returns {string} + */ +function _format(sep, pathObject) { + validateObject(pathObject, "pathObject"); + const dir = pathObject.dir || pathObject.root; + const base = + pathObject.base || `${pathObject.name || ""}${formatExt(pathObject.ext)}`; + if (!dir) { + return base; + } + return dir === pathObject.root ? `${dir}${base}` : `${dir}${sep}${base}`; +} + +const forwardSlashRegExp = /\//g; + +const win32 = { + /** + * path.resolve([from ...], to) + * @param {...string} args + * @returns {string} + */ + resolve(...args) { + let resolvedDevice = ""; + let resolvedTail = ""; + let resolvedAbsolute = false; + + for (let i = args.length - 1; i >= -1; i--) { + let path; + if (i >= 0) { + path = args[i]; + validateString(path, `paths[${i}]`); + + // Skip empty entries + if (path.length === 0) { + continue; + } + } else if (resolvedDevice.length === 0) { + path = process.cwd(); + // Fast path for current directory + if ( + args.length === 0 || + (args.length === 1 && + (args[0] === "" || args[0] === ".") && + isPathSeparator(StringPrototypeCharCodeAt(path, 0))) + ) { + if (!isWindows) { + path = StringPrototypeReplace(path, forwardSlashRegExp, "\\"); + } + return path; + } + } else { + // Windows has the concept of drive-specific current working + // directories. If we've resolved a drive letter but not yet an + // absolute path, get cwd for that drive, or the process cwd if + // the drive cwd is not available. We're sure the device is not + // a UNC path at this points, because UNC paths are always absolute. + path = process.env[`=${resolvedDevice}`] || process.cwd(); + + // Verify that a cwd was found and that it actually points + // to our drive. If not, default to the drive's root. + if ( + path === undefined || + (StringPrototypeToLowerCase(StringPrototypeSlice(path, 0, 2)) !== + StringPrototypeToLowerCase(resolvedDevice) && + StringPrototypeCharCodeAt(path, 2) === CHAR_BACKWARD_SLASH) + ) { + path = `${resolvedDevice}\\`; + } + } + + const len = path.length; + let rootEnd = 0; + let device = ""; + let isAbsolute = false; + const code = StringPrototypeCharCodeAt(path, 0); + + // Try to match a root + if (len === 1) { + if (isPathSeparator(code)) { + // `path` contains just a path separator + rootEnd = 1; + isAbsolute = true; + } + } else if (isPathSeparator(code)) { + // Possible UNC root + + // If we started with a separator, we know we at least have an + // absolute path of some kind (UNC or otherwise) + isAbsolute = true; + + if (isPathSeparator(StringPrototypeCharCodeAt(path, 1))) { + // Matched double path separator at beginning + let j = 2; + let last = j; + // Match 1 or more non-path separators + while ( + j < len && + !isPathSeparator(StringPrototypeCharCodeAt(path, j)) + ) { + j++; + } + if (j < len && j !== last) { + const firstPart = StringPrototypeSlice(path, last, j); + // Matched! + last = j; + // Match 1 or more path separators + while ( + j < len && + isPathSeparator(StringPrototypeCharCodeAt(path, j)) + ) { + j++; + } + if (j < len && j !== last) { + // Matched! + last = j; + // Match 1 or more non-path separators + while ( + j < len && + !isPathSeparator(StringPrototypeCharCodeAt(path, j)) + ) { + j++; + } + if (j === len || j !== last) { + if (firstPart !== "." && firstPart !== "?") { + // We matched a UNC root + device = `\\\\${firstPart}\\${StringPrototypeSlice( + path, + last, + j + )}`; + rootEnd = j; + } else { + // We matched a device root (e.g. \\\\.\\PHYSICALDRIVE0) + device = `\\\\${firstPart}`; + rootEnd = 4; + } + } + } + } + } else { + rootEnd = 1; + } + } else if ( + isWindowsDeviceRoot(code) && + StringPrototypeCharCodeAt(path, 1) === CHAR_COLON + ) { + // Possible device root + device = StringPrototypeSlice(path, 0, 2); + rootEnd = 2; + if (len > 2 && isPathSeparator(StringPrototypeCharCodeAt(path, 2))) { + // Treat separator following drive name as an absolute path + // indicator + isAbsolute = true; + rootEnd = 3; + } + } + + if (device.length > 0) { + if (resolvedDevice.length > 0) { + if ( + StringPrototypeToLowerCase(device) !== + StringPrototypeToLowerCase(resolvedDevice) + ) + // This path points to another device so it is not applicable + continue; + } else { + resolvedDevice = device; + } + } + + if (resolvedAbsolute) { + if (resolvedDevice.length > 0) break; + } else { + resolvedTail = `${StringPrototypeSlice( + path, + rootEnd + )}\\${resolvedTail}`; + resolvedAbsolute = isAbsolute; + if (isAbsolute && resolvedDevice.length > 0) { + break; + } + } + } + + // At this point the path should be resolved to a full absolute path, + // but handle relative paths to be safe (might happen when process.cwd() + // fails) + + // Normalize the tail path + resolvedTail = normalizeString( + resolvedTail, + !resolvedAbsolute, + "\\", + isPathSeparator + ); + + return resolvedAbsolute + ? `${resolvedDevice}\\${resolvedTail}` + : `${resolvedDevice}${resolvedTail}` || "."; + }, + + /** + * @param {string} path + * @returns {string} + */ + normalize(path) { + validateString(path, "path"); + const len = path.length; + if (len === 0) return "."; + let rootEnd = 0; + let device; + let isAbsolute = false; + const code = StringPrototypeCharCodeAt(path, 0); + + // Try to match a root + if (len === 1) { + // `path` contains just a single char, exit early to avoid + // unnecessary work + return isPosixPathSeparator(code) ? "\\" : path; + } + if (isPathSeparator(code)) { + // Possible UNC root + + // If we started with a separator, we know we at least have an absolute + // path of some kind (UNC or otherwise) + isAbsolute = true; + + if (isPathSeparator(StringPrototypeCharCodeAt(path, 1))) { + // Matched double path separator at beginning + let j = 2; + let last = j; + // Match 1 or more non-path separators + while ( + j < len && + !isPathSeparator(StringPrototypeCharCodeAt(path, j)) + ) { + j++; + } + if (j < len && j !== last) { + const firstPart = StringPrototypeSlice(path, last, j); + // Matched! + last = j; + // Match 1 or more path separators + while ( + j < len && + isPathSeparator(StringPrototypeCharCodeAt(path, j)) + ) { + j++; + } + if (j < len && j !== last) { + // Matched! + last = j; + // Match 1 or more non-path separators + while ( + j < len && + !isPathSeparator(StringPrototypeCharCodeAt(path, j)) + ) { + j++; + } + if (j === len || j !== last) { + if (firstPart === "." || firstPart === "?") { + // We matched a device root (e.g. \\\\.\\PHYSICALDRIVE0) + device = `\\\\${firstPart}`; + rootEnd = 4; + const colonIndex = StringPrototypeIndexOf(path, ":"); + // Special case: handle \\?\COM1: or similar reserved device paths + const possibleDevice = StringPrototypeSlice( + path, + 4, + colonIndex + 1 + ); + if ( + isWindowsReservedName( + possibleDevice, + possibleDevice.length - 1 + ) + ) { + device = `\\\\?\\${possibleDevice}`; + rootEnd = 4 + possibleDevice.length; + } + } else if (j === len) { + // We matched a UNC root only + // Return the normalized version of the UNC root since there + // is nothing left to process + return `\\\\${firstPart}\\${StringPrototypeSlice( + path, + last + )}\\`; + } else { + // We matched a UNC root with leftovers + device = `\\\\${firstPart}\\${StringPrototypeSlice( + path, + last, + j + )}`; + rootEnd = j; + } + } + } + } + } else { + rootEnd = 1; + } + } else { + const colonIndex = StringPrototypeIndexOf(path, ":"); + if (colonIndex > 0) { + if (isWindowsDeviceRoot(code) && colonIndex === 1) { + device = StringPrototypeSlice(path, 0, 2); + rootEnd = 2; + if (len > 2 && isPathSeparator(StringPrototypeCharCodeAt(path, 2))) { + isAbsolute = true; + rootEnd = 3; + } + } else if (isWindowsReservedName(path, colonIndex)) { + device = StringPrototypeSlice(path, 0, colonIndex + 1); + rootEnd = colonIndex + 1; + } + } + } + + let tail = + rootEnd < len + ? normalizeString( + StringPrototypeSlice(path, rootEnd), + !isAbsolute, + "\\", + isPathSeparator + ) + : ""; + if (tail.length === 0 && !isAbsolute) tail = "."; + if ( + tail.length > 0 && + isPathSeparator(StringPrototypeCharCodeAt(path, len - 1)) + ) + tail += "\\"; + if ( + !isAbsolute && + device === undefined && + StringPrototypeIncludes(path, ":") + ) { + // If the original path was not absolute and if we have not been able to + // resolve it relative to a particular device, we need to ensure that the + // `tail` has not become something that Windows might interpret as an + // absolute path. See CVE-2024-36139. + if ( + tail.length >= 2 && + isWindowsDeviceRoot(StringPrototypeCharCodeAt(tail, 0)) && + StringPrototypeCharCodeAt(tail, 1) === CHAR_COLON + ) { + return `.\\${tail}`; + } + let index = StringPrototypeIndexOf(path, ":"); + + do { + if ( + index === len - 1 || + isPathSeparator(StringPrototypeCharCodeAt(path, index + 1)) + ) { + return `.\\${tail}`; + } + } while ((index = StringPrototypeIndexOf(path, ":", index + 1)) !== -1); + } + const colonIndex = StringPrototypeIndexOf(path, ":"); + if (isWindowsReservedName(path, colonIndex)) { + return `.\\${device ?? ""}${tail}`; + } + if (device === undefined) { + return isAbsolute ? `\\${tail}` : tail; + } + return isAbsolute ? `${device}\\${tail}` : `${device}${tail}`; + }, + + /** + * @param {string} path + * @returns {boolean} + */ + isAbsolute(path) { + validateString(path, "path"); + const len = path.length; + if (len === 0) return false; + + const code = StringPrototypeCharCodeAt(path, 0); + return ( + isPathSeparator(code) || + // Possible device root + (len > 2 && + isWindowsDeviceRoot(code) && + StringPrototypeCharCodeAt(path, 1) === CHAR_COLON && + isPathSeparator(StringPrototypeCharCodeAt(path, 2))) + ); + }, + + /** + * @param {...string} args + * @returns {string} + */ + join(...args) { + if (args.length === 0) return "."; + + const path = []; + for (let i = 0; i < args.length; ++i) { + const arg = args[i]; + validateString(arg, "path"); + if (arg.length > 0) { + ArrayPrototypePush(path, arg); + } + } + + if (path.length === 0) return "."; + + const firstPart = path[0]; + let joined = ArrayPrototypeJoin(path, "\\"); + + // Make sure that the joined path doesn't start with two slashes, because + // normalize() will mistake it for a UNC path then. + // + // This step is skipped when it is very clear that the user actually + // intended to point at a UNC path. This is assumed when the first + // non-empty string arguments starts with exactly two slashes followed by + // at least one more non-slash character. + // + // Note that for normalize() to treat a path as a UNC path it needs to + // have at least 2 components, so we don't filter for that here. + // This means that the user can use join to construct UNC paths from + // a server name and a share name; for example: + // path.join('//server', 'share') -> '\\\\server\\share\\') + let needsReplace = true; + let slashCount = 0; + if (isPathSeparator(StringPrototypeCharCodeAt(firstPart, 0))) { + ++slashCount; + const firstLen = firstPart.length; + if ( + firstLen > 1 && + isPathSeparator(StringPrototypeCharCodeAt(firstPart, 1)) + ) { + ++slashCount; + if (firstLen > 2) { + if (isPathSeparator(StringPrototypeCharCodeAt(firstPart, 2))) + ++slashCount; + else { + // We matched a UNC path in the first part + needsReplace = false; + } + } + } + } + if (needsReplace) { + // Find any more consecutive slashes we need to replace + while ( + slashCount < joined.length && + isPathSeparator(StringPrototypeCharCodeAt(joined, slashCount)) + ) { + slashCount++; + } + + // Replace the slashes if needed + if (slashCount >= 2) + joined = `\\${StringPrototypeSlice(joined, slashCount)}`; + } + + // Skip normalization when reserved device names are present + const parts = []; + let part = ""; + + for (let i = 0; i < joined.length; i++) { + if (joined[i] === "\\") { + if (part) parts.push(part); + part = ""; + // Skip consecutive backslashes + while (i + 1 < joined.length && joined[i + 1] === "\\") i++; + } else { + part += joined[i]; + } + } + // Add the final part if any + if (part) parts.push(part); + + // Check if any part has a Windows reserved name + if ( + parts.some((p) => { + const colonIndex = StringPrototypeIndexOf(p, ":"); + return colonIndex !== -1 && isWindowsReservedName(p, colonIndex); + }) + ) { + // Replace forward slashes with backslashes + let result = ""; + for (let i = 0; i < joined.length; i++) { + result += joined[i] === "/" ? "\\" : joined[i]; + } + return result; + } + + return win32.normalize(joined); + }, + + /** + * It will solve the relative path from `from` to `to`, for instance + * from = 'C:\\orandea\\test\\aaa' + * to = 'C:\\orandea\\impl\\bbb' + * The output of the function should be: '..\\..\\impl\\bbb' + * @param {string} from + * @param {string} to + * @returns {string} + */ + relative(from, to) { + validateString(from, "from"); + validateString(to, "to"); + + if (from === to) return ""; + + const fromOrig = win32.resolve(from); + const toOrig = win32.resolve(to); + + if (fromOrig === toOrig) return ""; + + from = StringPrototypeToLowerCase(fromOrig); + to = StringPrototypeToLowerCase(toOrig); + + if (from === to) return ""; + + if (fromOrig.length !== from.length || toOrig.length !== to.length) { + const fromSplit = StringPrototypeSplit(fromOrig, "\\"); + const toSplit = StringPrototypeSplit(toOrig, "\\"); + if (fromSplit[fromSplit.length - 1] === "") { + fromSplit.pop(); + } + if (toSplit[toSplit.length - 1] === "") { + toSplit.pop(); + } + + const fromLen = fromSplit.length; + const toLen = toSplit.length; + const length = fromLen < toLen ? fromLen : toLen; + + let i; + for (i = 0; i < length; i++) { + if ( + StringPrototypeToLowerCase(fromSplit[i]) !== + StringPrototypeToLowerCase(toSplit[i]) + ) { + break; + } + } + + if (i === 0) { + return toOrig; + } else if (i === length) { + if (toLen > length) { + return ArrayPrototypeJoin(ArrayPrototypeSlice(toSplit, i), "\\"); + } + if (fromLen > length) { + return StringPrototypeRepeat("..\\", fromLen - 1 - i) + ".."; + } + return ""; + } + + return ( + StringPrototypeRepeat("..\\", fromLen - i) + + ArrayPrototypeJoin(ArrayPrototypeSlice(toSplit, i), "\\") + ); + } + + // Trim any leading backslashes + let fromStart = 0; + while ( + fromStart < from.length && + StringPrototypeCharCodeAt(from, fromStart) === CHAR_BACKWARD_SLASH + ) { + fromStart++; + } + // Trim trailing backslashes (applicable to UNC paths only) + let fromEnd = from.length; + while ( + fromEnd - 1 > fromStart && + StringPrototypeCharCodeAt(from, fromEnd - 1) === CHAR_BACKWARD_SLASH + ) { + fromEnd--; + } + const fromLen = fromEnd - fromStart; + + // Trim any leading backslashes + let toStart = 0; + while ( + toStart < to.length && + StringPrototypeCharCodeAt(to, toStart) === CHAR_BACKWARD_SLASH + ) { + toStart++; + } + // Trim trailing backslashes (applicable to UNC paths only) + let toEnd = to.length; + while ( + toEnd - 1 > toStart && + StringPrototypeCharCodeAt(to, toEnd - 1) === CHAR_BACKWARD_SLASH + ) { + toEnd--; + } + const toLen = toEnd - toStart; + + // Compare paths to find the longest common path from root + const length = fromLen < toLen ? fromLen : toLen; + let lastCommonSep = -1; + let i = 0; + for (; i < length; i++) { + const fromCode = StringPrototypeCharCodeAt(from, fromStart + i); + if (fromCode !== StringPrototypeCharCodeAt(to, toStart + i)) break; + else if (fromCode === CHAR_BACKWARD_SLASH) lastCommonSep = i; + } + + // We found a mismatch before the first common path separator was seen, so + // return the original `to`. + if (i !== length) { + if (lastCommonSep === -1) return toOrig; + } else { + if (toLen > length) { + if ( + StringPrototypeCharCodeAt(to, toStart + i) === CHAR_BACKWARD_SLASH + ) { + // We get here if `from` is the exact base path for `to`. + // For example: from='C:\\foo\\bar'; to='C:\\foo\\bar\\baz' + return StringPrototypeSlice(toOrig, toStart + i + 1); + } + if (i === 2) { + // We get here if `from` is the device root. + // For example: from='C:\\'; to='C:\\foo' + return StringPrototypeSlice(toOrig, toStart + i); + } + } + if (fromLen > length) { + if ( + StringPrototypeCharCodeAt(from, fromStart + i) === CHAR_BACKWARD_SLASH + ) { + // We get here if `to` is the exact base path for `from`. + // For example: from='C:\\foo\\bar'; to='C:\\foo' + lastCommonSep = i; + } else if (i === 2) { + // We get here if `to` is the device root. + // For example: from='C:\\foo\\bar'; to='C:\\' + lastCommonSep = 3; + } + } + if (lastCommonSep === -1) lastCommonSep = 0; + } + + let out = ""; + // Generate the relative path based on the path difference between `to` and + // `from` + for (i = fromStart + lastCommonSep + 1; i <= fromEnd; ++i) { + if ( + i === fromEnd || + StringPrototypeCharCodeAt(from, i) === CHAR_BACKWARD_SLASH + ) { + out += out.length === 0 ? ".." : "\\.."; + } + } + + toStart += lastCommonSep; + + // Lastly, append the rest of the destination (`to`) path that comes after + // the common path parts + if (out.length > 0) + return `${out}${StringPrototypeSlice(toOrig, toStart, toEnd)}`; + + if (StringPrototypeCharCodeAt(toOrig, toStart) === CHAR_BACKWARD_SLASH) + ++toStart; + return StringPrototypeSlice(toOrig, toStart, toEnd); + }, + + /** + * @param {string} path + * @returns {string} + */ + toNamespacedPath(path) { + // Note: this will *probably* throw somewhere. + if (typeof path !== "string" || path.length === 0) return path; + + const resolvedPath = win32.resolve(path); + + if (resolvedPath.length <= 2) return path; + + if (StringPrototypeCharCodeAt(resolvedPath, 0) === CHAR_BACKWARD_SLASH) { + // Possible UNC root + if (StringPrototypeCharCodeAt(resolvedPath, 1) === CHAR_BACKWARD_SLASH) { + const code = StringPrototypeCharCodeAt(resolvedPath, 2); + if (code !== CHAR_QUESTION_MARK && code !== CHAR_DOT) { + // Matched non-long UNC root, convert the path to a long UNC path + return `\\\\?\\UNC\\${StringPrototypeSlice(resolvedPath, 2)}`; + } + } + } else if ( + isWindowsDeviceRoot(StringPrototypeCharCodeAt(resolvedPath, 0)) && + StringPrototypeCharCodeAt(resolvedPath, 1) === CHAR_COLON && + StringPrototypeCharCodeAt(resolvedPath, 2) === CHAR_BACKWARD_SLASH + ) { + // Matched device root, convert the path to a long UNC path + return `\\\\?\\${resolvedPath}`; + } + + return resolvedPath; + }, + + /** + * @param {string} path + * @returns {string} + */ + dirname(path) { + validateString(path, "path"); + const len = path.length; + if (len === 0) return "."; + let rootEnd = -1; + let offset = 0; + const code = StringPrototypeCharCodeAt(path, 0); + + if (len === 1) { + // `path` contains just a path separator, exit early to avoid + // unnecessary work or a dot. + return isPathSeparator(code) ? path : "."; + } + + // Try to match a root + if (isPathSeparator(code)) { + // Possible UNC root + + rootEnd = offset = 1; + + if (isPathSeparator(StringPrototypeCharCodeAt(path, 1))) { + // Matched double path separator at beginning + let j = 2; + let last = j; + // Match 1 or more non-path separators + while ( + j < len && + !isPathSeparator(StringPrototypeCharCodeAt(path, j)) + ) { + j++; + } + if (j < len && j !== last) { + // Matched! + last = j; + // Match 1 or more path separators + while ( + j < len && + isPathSeparator(StringPrototypeCharCodeAt(path, j)) + ) { + j++; + } + if (j < len && j !== last) { + // Matched! + last = j; + // Match 1 or more non-path separators + while ( + j < len && + !isPathSeparator(StringPrototypeCharCodeAt(path, j)) + ) { + j++; + } + if (j === len) { + // We matched a UNC root only + return path; + } + if (j !== last) { + // We matched a UNC root with leftovers + + // Offset by 1 to include the separator after the UNC root to + // treat it as a "normal root" on top of a (UNC) root + rootEnd = offset = j + 1; + } + } + } + } + // Possible device root + } else if ( + isWindowsDeviceRoot(code) && + StringPrototypeCharCodeAt(path, 1) === CHAR_COLON + ) { + rootEnd = + len > 2 && isPathSeparator(StringPrototypeCharCodeAt(path, 2)) ? 3 : 2; + offset = rootEnd; + } + + let end = -1; + let matchedSlash = true; + for (let i = len - 1; i >= offset; --i) { + if (isPathSeparator(StringPrototypeCharCodeAt(path, i))) { + if (!matchedSlash) { + end = i; + break; + } + } else { + // We saw the first non-path separator + matchedSlash = false; + } + } + + if (end === -1) { + if (rootEnd === -1) return "."; + + end = rootEnd; + } + return StringPrototypeSlice(path, 0, end); + }, + + /** + * @param {string} path + * @param {string} [suffix] + * @returns {string} + */ + basename(path, suffix) { + if (suffix !== undefined) validateString(suffix, "suffix"); + validateString(path, "path"); + let start = 0; + let end = -1; + let matchedSlash = true; + + // Check for a drive letter prefix so as not to mistake the following + // path separator as an extra separator at the end of the path that can be + // disregarded + if ( + path.length >= 2 && + isWindowsDeviceRoot(StringPrototypeCharCodeAt(path, 0)) && + StringPrototypeCharCodeAt(path, 1) === CHAR_COLON + ) { + start = 2; + } + + if ( + suffix !== undefined && + suffix.length > 0 && + suffix.length <= path.length + ) { + if (suffix === path) return ""; + let extIdx = suffix.length - 1; + let firstNonSlashEnd = -1; + for (let i = path.length - 1; i >= start; --i) { + const code = StringPrototypeCharCodeAt(path, i); + if (isPathSeparator(code)) { + // If we reached a path separator that was not part of a set of path + // separators at the end of the string, stop now + if (!matchedSlash) { + start = i + 1; + break; + } + } else { + if (firstNonSlashEnd === -1) { + // We saw the first non-path separator, remember this index in case + // we need it if the extension ends up not matching + matchedSlash = false; + firstNonSlashEnd = i + 1; + } + if (extIdx >= 0) { + // Try to match the explicit extension + if (code === StringPrototypeCharCodeAt(suffix, extIdx)) { + if (--extIdx === -1) { + // We matched the extension, so mark this as the end of our path + // component + end = i; + } + } else { + // Extension does not match, so our result is the entire path + // component + extIdx = -1; + end = firstNonSlashEnd; + } + } + } + } + + if (start === end) end = firstNonSlashEnd; + else if (end === -1) end = path.length; + return StringPrototypeSlice(path, start, end); + } + for (let i = path.length - 1; i >= start; --i) { + if (isPathSeparator(StringPrototypeCharCodeAt(path, i))) { + // If we reached a path separator that was not part of a set of path + // separators at the end of the string, stop now + if (!matchedSlash) { + start = i + 1; + break; + } + } else if (end === -1) { + // We saw the first non-path separator, mark this as the end of our + // path component + matchedSlash = false; + end = i + 1; + } + } + + if (end === -1) return ""; + return StringPrototypeSlice(path, start, end); + }, + + /** + * @param {string} path + * @returns {string} + */ + extname(path) { + validateString(path, "path"); + let start = 0; + let startDot = -1; + let startPart = 0; + let end = -1; + let matchedSlash = true; + // Track the state of characters (if any) we see before our first dot and + // after any path separator we find + let preDotState = 0; + + // Check for a drive letter prefix so as not to mistake the following + // path separator as an extra separator at the end of the path that can be + // disregarded + + if ( + path.length >= 2 && + StringPrototypeCharCodeAt(path, 1) === CHAR_COLON && + isWindowsDeviceRoot(StringPrototypeCharCodeAt(path, 0)) + ) { + start = startPart = 2; + } + + for (let i = path.length - 1; i >= start; --i) { + const code = StringPrototypeCharCodeAt(path, i); + if (isPathSeparator(code)) { + // If we reached a path separator that was not part of a set of path + // separators at the end of the string, stop now + if (!matchedSlash) { + startPart = i + 1; + break; + } + continue; + } + if (end === -1) { + // We saw the first non-path separator, mark this as the end of our + // extension + matchedSlash = false; + end = i + 1; + } + if (code === CHAR_DOT) { + // If this is our first dot, mark it as the start of our extension + if (startDot === -1) startDot = i; + else if (preDotState !== 1) preDotState = 1; + } else if (startDot !== -1) { + // We saw a non-dot and non-path separator before our dot, so we should + // have a good chance at having a non-empty extension + preDotState = -1; + } + } + + if ( + startDot === -1 || + end === -1 || + // We saw a non-dot character immediately before the dot + preDotState === 0 || + // The (right-most) trimmed path component is exactly '..' + (preDotState === 1 && startDot === end - 1 && startDot === startPart + 1) + ) { + return ""; + } + return StringPrototypeSlice(path, startDot, end); + }, + + format: FunctionPrototypeBind(_format, null, "\\"), + + /** + * @param {string} path + * @returns {{ + * dir: string; + * root: string; + * base: string; + * name: string; + * ext: string; + * }} + */ + parse(path) { + validateString(path, "path"); + + const ret = { root: "", dir: "", base: "", ext: "", name: "" }; + if (path.length === 0) return ret; + + const len = path.length; + let rootEnd = 0; + let code = StringPrototypeCharCodeAt(path, 0); + + if (len === 1) { + if (isPathSeparator(code)) { + // `path` contains just a path separator, exit early to avoid + // unnecessary work + ret.root = ret.dir = path; + return ret; + } + ret.base = ret.name = path; + return ret; + } + // Try to match a root + if (isPathSeparator(code)) { + // Possible UNC root + + rootEnd = 1; + if (isPathSeparator(StringPrototypeCharCodeAt(path, 1))) { + // Matched double path separator at beginning + let j = 2; + let last = j; + // Match 1 or more non-path separators + while ( + j < len && + !isPathSeparator(StringPrototypeCharCodeAt(path, j)) + ) { + j++; + } + if (j < len && j !== last) { + // Matched! + last = j; + // Match 1 or more path separators + while ( + j < len && + isPathSeparator(StringPrototypeCharCodeAt(path, j)) + ) { + j++; + } + if (j < len && j !== last) { + // Matched! + last = j; + // Match 1 or more non-path separators + while ( + j < len && + !isPathSeparator(StringPrototypeCharCodeAt(path, j)) + ) { + j++; + } + if (j === len) { + // We matched a UNC root only + rootEnd = j; + } else if (j !== last) { + // We matched a UNC root with leftovers + rootEnd = j + 1; + } + } + } + } + } else if ( + isWindowsDeviceRoot(code) && + StringPrototypeCharCodeAt(path, 1) === CHAR_COLON + ) { + // Possible device root + if (len <= 2) { + // `path` contains just a drive root, exit early to avoid + // unnecessary work + ret.root = ret.dir = path; + return ret; + } + rootEnd = 2; + if (isPathSeparator(StringPrototypeCharCodeAt(path, 2))) { + if (len === 3) { + // `path` contains just a drive root, exit early to avoid + // unnecessary work + ret.root = ret.dir = path; + return ret; + } + rootEnd = 3; + } + } + if (rootEnd > 0) ret.root = StringPrototypeSlice(path, 0, rootEnd); + + let startDot = -1; + let startPart = rootEnd; + let end = -1; + let matchedSlash = true; + let i = path.length - 1; + + // Track the state of characters (if any) we see before our first dot and + // after any path separator we find + let preDotState = 0; + + // Get non-dir info + for (; i >= rootEnd; --i) { + code = StringPrototypeCharCodeAt(path, i); + if (isPathSeparator(code)) { + // If we reached a path separator that was not part of a set of path + // separators at the end of the string, stop now + if (!matchedSlash) { + startPart = i + 1; + break; + } + continue; + } + if (end === -1) { + // We saw the first non-path separator, mark this as the end of our + // extension + matchedSlash = false; + end = i + 1; + } + if (code === CHAR_DOT) { + // If this is our first dot, mark it as the start of our extension + if (startDot === -1) startDot = i; + else if (preDotState !== 1) preDotState = 1; + } else if (startDot !== -1) { + // We saw a non-dot and non-path separator before our dot, so we should + // have a good chance at having a non-empty extension + preDotState = -1; + } + } + + if (end !== -1) { + if ( + startDot === -1 || + // We saw a non-dot character immediately before the dot + preDotState === 0 || + // The (right-most) trimmed path component is exactly '..' + (preDotState === 1 && + startDot === end - 1 && + startDot === startPart + 1) + ) { + ret.base = ret.name = StringPrototypeSlice(path, startPart, end); + } else { + ret.name = StringPrototypeSlice(path, startPart, startDot); + ret.base = StringPrototypeSlice(path, startPart, end); + ret.ext = StringPrototypeSlice(path, startDot, end); + } + } + + // If the directory is the root, use the entire root as the `dir` including + // the trailing slash if any (`C:\abc` -> `C:\`). Otherwise, strip out the + // trailing slash (`C:\abc\def` -> `C:\abc`). + if (startPart > 0 && startPart !== rootEnd) + ret.dir = StringPrototypeSlice(path, 0, startPart - 1); + else ret.dir = ret.root; + + return ret; + }, + + matchesGlob(path, pattern) { + return lazyMatchGlobPattern()(path, pattern, true); + }, + + sep: "\\", + delimiter: ";", + win32: null, + posix: null, +}; + +const posixCwd = (() => { + if (isWindows) { + // Converts Windows' backslash path separators to POSIX forward slashes + // and truncates any drive indicator + const regexp = /\\/g; + return () => { + const cwd = StringPrototypeReplace(process.cwd(), regexp, "/"); + return StringPrototypeSlice(cwd, StringPrototypeIndexOf(cwd, "/")); + }; + } + + // We're already on POSIX, no need for any transformations + return () => process.cwd(); +})(); + +const posix = { + /** + * path.resolve([from ...], to) + * @param {...string} args + * @returns {string} + */ + resolve(...args) { + if ( + args.length === 0 || + (args.length === 1 && (args[0] === "" || args[0] === ".")) + ) { + const cwd = posixCwd(); + if (StringPrototypeCharCodeAt(cwd, 0) === CHAR_FORWARD_SLASH) { + return cwd; + } + } + let resolvedPath = ""; + let resolvedAbsolute = false; + + for (let i = args.length - 1; i >= 0 && !resolvedAbsolute; i--) { + const path = args[i]; + validateString(path, `paths[${i}]`); + + // Skip empty entries + if (path.length === 0) { + continue; + } + + resolvedPath = `${path}/${resolvedPath}`; + resolvedAbsolute = + StringPrototypeCharCodeAt(path, 0) === CHAR_FORWARD_SLASH; + } + + if (!resolvedAbsolute) { + const cwd = posixCwd(); + resolvedPath = `${cwd}/${resolvedPath}`; + resolvedAbsolute = + StringPrototypeCharCodeAt(cwd, 0) === CHAR_FORWARD_SLASH; + } + + // At this point the path should be resolved to a full absolute path, but + // handle relative paths to be safe (might happen when process.cwd() fails) + + // Normalize the path + resolvedPath = normalizeString( + resolvedPath, + !resolvedAbsolute, + "/", + isPosixPathSeparator + ); + + if (resolvedAbsolute) { + return `/${resolvedPath}`; + } + return resolvedPath.length > 0 ? resolvedPath : "."; + }, + + /** + * @param {string} path + * @returns {string} + */ + normalize(path) { + validateString(path, "path"); + + if (path.length === 0) return "."; + + const isAbsolute = + StringPrototypeCharCodeAt(path, 0) === CHAR_FORWARD_SLASH; + const trailingSeparator = + StringPrototypeCharCodeAt(path, path.length - 1) === CHAR_FORWARD_SLASH; + + // Normalize the path + path = normalizeString(path, !isAbsolute, "/", isPosixPathSeparator); + + if (path.length === 0) { + if (isAbsolute) return "/"; + return trailingSeparator ? "./" : "."; + } + if (trailingSeparator) path += "/"; + + return isAbsolute ? `/${path}` : path; + }, + + /** + * @param {string} path + * @returns {boolean} + */ + isAbsolute(path) { + validateString(path, "path"); + return ( + path.length > 0 && + StringPrototypeCharCodeAt(path, 0) === CHAR_FORWARD_SLASH + ); + }, + + /** + * @param {...string} args + * @returns {string} + */ + join(...args) { + if (args.length === 0) return "."; + + const path = []; + for (let i = 0; i < args.length; ++i) { + const arg = args[i]; + validateString(arg, "path"); + if (arg.length > 0) { + path.push(arg); + } + } + + if (path.length === 0) return "."; + + return posix.normalize(ArrayPrototypeJoin(path, "/")); + }, + + /** + * @param {string} from + * @param {string} to + * @returns {string} + */ + relative(from, to) { + validateString(from, "from"); + validateString(to, "to"); + + if (from === to) return ""; + + // Trim leading forward slashes. + from = posix.resolve(from); + to = posix.resolve(to); + + if (from === to) return ""; + + const fromStart = 1; + const fromEnd = from.length; + const fromLen = fromEnd - fromStart; + const toStart = 1; + const toLen = to.length - toStart; + + // Compare paths to find the longest common path from root + const length = fromLen < toLen ? fromLen : toLen; + let lastCommonSep = -1; + let i = 0; + for (; i < length; i++) { + const fromCode = StringPrototypeCharCodeAt(from, fromStart + i); + if (fromCode !== StringPrototypeCharCodeAt(to, toStart + i)) break; + else if (fromCode === CHAR_FORWARD_SLASH) lastCommonSep = i; + } + if (i === length) { + if (toLen > length) { + if (StringPrototypeCharCodeAt(to, toStart + i) === CHAR_FORWARD_SLASH) { + // We get here if `from` is the exact base path for `to`. + // For example: from='/foo/bar'; to='/foo/bar/baz' + return StringPrototypeSlice(to, toStart + i + 1); + } + if (i === 0) { + // We get here if `from` is the root + // For example: from='/'; to='/foo' + return StringPrototypeSlice(to, toStart + i); + } + } else if (fromLen > length) { + if ( + StringPrototypeCharCodeAt(from, fromStart + i) === CHAR_FORWARD_SLASH + ) { + // We get here if `to` is the exact base path for `from`. + // For example: from='/foo/bar/baz'; to='/foo/bar' + lastCommonSep = i; + } else if (i === 0) { + // We get here if `to` is the root. + // For example: from='/foo/bar'; to='/' + lastCommonSep = 0; + } + } + } + + let out = ""; + // Generate the relative path based on the path difference between `to` + // and `from`. + for (i = fromStart + lastCommonSep + 1; i <= fromEnd; ++i) { + if ( + i === fromEnd || + StringPrototypeCharCodeAt(from, i) === CHAR_FORWARD_SLASH + ) { + out += out.length === 0 ? ".." : "/.."; + } + } + + // Lastly, append the rest of the destination (`to`) path that comes after + // the common path parts. + return `${out}${StringPrototypeSlice(to, toStart + lastCommonSep)}`; + }, + + /** + * @param {string} path + * @returns {string} + */ + toNamespacedPath(path) { + // Non-op on posix systems + return path; + }, + + /** + * @param {string} path + * @returns {string} + */ + dirname(path) { + validateString(path, "path"); + if (path.length === 0) return "."; + const hasRoot = StringPrototypeCharCodeAt(path, 0) === CHAR_FORWARD_SLASH; + let end = -1; + let matchedSlash = true; + for (let i = path.length - 1; i >= 1; --i) { + if (StringPrototypeCharCodeAt(path, i) === CHAR_FORWARD_SLASH) { + if (!matchedSlash) { + end = i; + break; + } + } else { + // We saw the first non-path separator + matchedSlash = false; + } + } + + if (end === -1) return hasRoot ? "/" : "."; + if (hasRoot && end === 1) return "//"; + return StringPrototypeSlice(path, 0, end); + }, + + /** + * @param {string} path + * @param {string} [suffix] + * @returns {string} + */ + basename(path, suffix) { + if (suffix !== undefined) validateString(suffix, "suffix"); + validateString(path, "path"); + + let start = 0; + let end = -1; + let matchedSlash = true; + + if ( + suffix !== undefined && + suffix.length > 0 && + suffix.length <= path.length + ) { + if (suffix === path) return ""; + let extIdx = suffix.length - 1; + let firstNonSlashEnd = -1; + for (let i = path.length - 1; i >= 0; --i) { + const code = StringPrototypeCharCodeAt(path, i); + if (code === CHAR_FORWARD_SLASH) { + // If we reached a path separator that was not part of a set of path + // separators at the end of the string, stop now + if (!matchedSlash) { + start = i + 1; + break; + } + } else { + if (firstNonSlashEnd === -1) { + // We saw the first non-path separator, remember this index in case + // we need it if the extension ends up not matching + matchedSlash = false; + firstNonSlashEnd = i + 1; + } + if (extIdx >= 0) { + // Try to match the explicit extension + if (code === StringPrototypeCharCodeAt(suffix, extIdx)) { + if (--extIdx === -1) { + // We matched the extension, so mark this as the end of our path + // component + end = i; + } + } else { + // Extension does not match, so our result is the entire path + // component + extIdx = -1; + end = firstNonSlashEnd; + } + } + } + } + + if (start === end) end = firstNonSlashEnd; + else if (end === -1) end = path.length; + return StringPrototypeSlice(path, start, end); + } + for (let i = path.length - 1; i >= 0; --i) { + if (StringPrototypeCharCodeAt(path, i) === CHAR_FORWARD_SLASH) { + // If we reached a path separator that was not part of a set of path + // separators at the end of the string, stop now + if (!matchedSlash) { + start = i + 1; + break; + } + } else if (end === -1) { + // We saw the first non-path separator, mark this as the end of our + // path component + matchedSlash = false; + end = i + 1; + } + } + + if (end === -1) return ""; + return StringPrototypeSlice(path, start, end); + }, + + /** + * @param {string} path + * @returns {string} + */ + extname(path) { + validateString(path, "path"); + let startDot = -1; + let startPart = 0; + let end = -1; + let matchedSlash = true; + // Track the state of characters (if any) we see before our first dot and + // after any path separator we find + let preDotState = 0; + for (let i = path.length - 1; i >= 0; --i) { + const char = path[i]; + if (char === "/") { + // If we reached a path separator that was not part of a set of path + // separators at the end of the string, stop now + if (!matchedSlash) { + startPart = i + 1; + break; + } + continue; + } + if (end === -1) { + // We saw the first non-path separator, mark this as the end of our + // extension + matchedSlash = false; + end = i + 1; + } + if (char === ".") { + // If this is our first dot, mark it as the start of our extension + if (startDot === -1) startDot = i; + else if (preDotState !== 1) preDotState = 1; + } else if (startDot !== -1) { + // We saw a non-dot and non-path separator before our dot, so we should + // have a good chance at having a non-empty extension + preDotState = -1; + } + } + + if ( + startDot === -1 || + end === -1 || + // We saw a non-dot character immediately before the dot + preDotState === 0 || + // The (right-most) trimmed path component is exactly '..' + (preDotState === 1 && startDot === end - 1 && startDot === startPart + 1) + ) { + return ""; + } + return StringPrototypeSlice(path, startDot, end); + }, + + format: FunctionPrototypeBind(_format, null, "/"), + + /** + * @param {string} path + * @returns {{ + * dir: string; + * root: string; + * base: string; + * name: string; + * ext: string; + * }} + */ + parse(path) { + validateString(path, "path"); + + const ret = { root: "", dir: "", base: "", ext: "", name: "" }; + if (path.length === 0) return ret; + const isAbsolute = + StringPrototypeCharCodeAt(path, 0) === CHAR_FORWARD_SLASH; + let start; + if (isAbsolute) { + ret.root = "/"; + start = 1; + } else { + start = 0; + } + let startDot = -1; + let startPart = 0; + let end = -1; + let matchedSlash = true; + let i = path.length - 1; + + // Track the state of characters (if any) we see before our first dot and + // after any path separator we find + let preDotState = 0; + + // Get non-dir info + for (; i >= start; --i) { + const code = StringPrototypeCharCodeAt(path, i); + if (code === CHAR_FORWARD_SLASH) { + // If we reached a path separator that was not part of a set of path + // separators at the end of the string, stop now + if (!matchedSlash) { + startPart = i + 1; + break; + } + continue; + } + if (end === -1) { + // We saw the first non-path separator, mark this as the end of our + // extension + matchedSlash = false; + end = i + 1; + } + if (code === CHAR_DOT) { + // If this is our first dot, mark it as the start of our extension + if (startDot === -1) startDot = i; + else if (preDotState !== 1) preDotState = 1; + } else if (startDot !== -1) { + // We saw a non-dot and non-path separator before our dot, so we should + // have a good chance at having a non-empty extension + preDotState = -1; + } + } + + if (end !== -1) { + const start = startPart === 0 && isAbsolute ? 1 : startPart; + if ( + startDot === -1 || + // We saw a non-dot character immediately before the dot + preDotState === 0 || + // The (right-most) trimmed path component is exactly '..' + (preDotState === 1 && + startDot === end - 1 && + startDot === startPart + 1) + ) { + ret.base = ret.name = StringPrototypeSlice(path, start, end); + } else { + ret.name = StringPrototypeSlice(path, start, startDot); + ret.base = StringPrototypeSlice(path, start, end); + ret.ext = StringPrototypeSlice(path, startDot, end); + } + } + + if (startPart > 0) ret.dir = StringPrototypeSlice(path, 0, startPart - 1); + else if (isAbsolute) ret.dir = "/"; + + return ret; + }, + + matchesGlob(path, pattern) { + return lazyMatchGlobPattern()(path, pattern, false); + }, + + sep: "/", + delimiter: ":", + win32: null, + posix: null, +}; + +posix.win32 = win32.win32 = win32; +posix.posix = win32.posix = posix; + +// Legacy internal API, docs-only deprecated: DEP0080 +win32._makeLong = win32.toNamespacedPath; +posix._makeLong = posix.toNamespacedPath; + +module.exports = isWindows ? win32 : posix; diff --git a/.codesandbox/node/perf_hooks.js b/.codesandbox/node/perf_hooks.js new file mode 100644 index 0000000000..3abbec8469 --- /dev/null +++ b/.codesandbox/node/perf_hooks.js @@ -0,0 +1,47 @@ +"use strict"; + +const { ObjectDefineProperty } = primordials; + +const { constants } = internalBinding("performance"); + +const { PerformanceEntry } = require("internal/perf/performance_entry"); +const { PerformanceResourceTiming } = require("internal/perf/resource_timing"); +const { + PerformanceObserver, + PerformanceObserverEntryList, +} = require("internal/perf/observe"); +const { + PerformanceMark, + PerformanceMeasure, +} = require("internal/perf/usertiming"); +const { Performance, performance } = require("internal/perf/performance"); + +const { createHistogram } = require("internal/histogram"); + +const monitorEventLoopDelay = require("internal/perf/event_loop_delay"); +const { + eventLoopUtilization, +} = require("internal/perf/event_loop_utilization"); +const timerify = require("internal/perf/timerify"); + +module.exports = { + Performance, + PerformanceEntry, + PerformanceMark, + PerformanceMeasure, + PerformanceObserver, + PerformanceObserverEntryList, + PerformanceResourceTiming, + monitorEventLoopDelay, + eventLoopUtilization, + timerify, + createHistogram, + performance, +}; + +ObjectDefineProperty(module.exports, "constants", { + __proto__: null, + configurable: false, + enumerable: true, + value: constants, +}); diff --git a/.codesandbox/node/section-links.js b/.codesandbox/node/section-links.js new file mode 100644 index 0000000000..e3a25655b1 --- /dev/null +++ b/.codesandbox/node/section-links.js @@ -0,0 +1,21 @@ +document.addEventListener('DOMContentLoaded', function(event) { + function f(n) { + if (n.nodeType == 1 && n.tagName.match(/^H[1-6]$/)) { + var span = document.createElement('span'); + span.className = 'section-link'; + span.textContent = '\xa0'; + var a = document.createElement('a'); + a.href = '#' + n.parentNode.id; + a.textContent = '\xb6'; + span.appendChild(a); + n.appendChild(span); + } else { + n = n.firstChild; + while (n) { + f(n); + n = n.nextSibling; + } + } + } + f(document.getElementById('sections')); + }, false); \ No newline at end of file diff --git a/.codesandbox/node/string_decoder.js b/.codesandbox/node/string_decoder.js new file mode 100644 index 0000000000..b774af1b40 --- /dev/null +++ b/.codesandbox/node/string_decoder.js @@ -0,0 +1,125 @@ +'use strict'; + +const { + ArrayBufferIsView, + ObjectDefineProperties, + Symbol, + TypedArrayPrototypeSubarray, +} = primordials; + +const { Buffer } = require('buffer'); +const { + kIncompleteCharactersStart, + kIncompleteCharactersEnd, + kMissingBytes, + kBufferedBytes, + kEncodingField, + kSize, + decode, + flush, +} = internalBinding('string_decoder'); +const { + encodingsMap, + normalizeEncoding, +} = require('internal/util'); +const { + ERR_INVALID_ARG_TYPE, + ERR_INVALID_THIS, + ERR_UNKNOWN_ENCODING, +} = require('internal/errors').codes; + +const kNativeDecoder = Symbol('kNativeDecoder'); + +/** + * StringDecoder provides an interface for efficiently splitting a series of + * buffers into a series of JS strings without breaking apart multibyte + * characters. + * @param {string} [encoding] + */ +function StringDecoder(encoding) { + this.encoding = normalizeEncoding(encoding); + if (this.encoding === undefined) { + throw new ERR_UNKNOWN_ENCODING(encoding); + } + this[kNativeDecoder] = Buffer.alloc(kSize); + this[kNativeDecoder][kEncodingField] = encodingsMap[this.encoding]; +} + +/** + * Returns a decoded string, omitting any incomplete multi-bytes + * characters at the end of the Buffer, or TypedArray, or DataView + * @param {string | Buffer | TypedArray | DataView} buf + * @returns {string} + * @throws {TypeError} Throws when buf is not in one of supported types + */ +StringDecoder.prototype.write = function write(buf) { + if (typeof buf === 'string') + return buf; + if (!ArrayBufferIsView(buf)) + throw new ERR_INVALID_ARG_TYPE('buf', + ['Buffer', 'TypedArray', 'DataView'], + buf); + if (!this[kNativeDecoder]) { + throw new ERR_INVALID_THIS('StringDecoder'); + } + return decode(this[kNativeDecoder], buf); +}; + +/** + * Returns any remaining input stored in the internal buffer as a string. + * After end() is called, the stringDecoder object can be reused for new + * input. + * @param {string | Buffer | TypedArray | DataView} [buf] + * @returns {string} + */ +StringDecoder.prototype.end = function end(buf) { + const ret = buf === undefined ? '' : this.write(buf); + if (this[kNativeDecoder][kBufferedBytes] > 0) + return ret + flush(this[kNativeDecoder]); + return ret; +}; + +/* Everything below this line is undocumented legacy stuff. */ +/** + * + * @param {string | Buffer | TypedArray | DataView} buf + * @param {number} offset + * @returns {string} + */ +StringDecoder.prototype.text = function text(buf, offset) { + this[kNativeDecoder][kMissingBytes] = 0; + this[kNativeDecoder][kBufferedBytes] = 0; + return this.write(buf.slice(offset)); +}; + +ObjectDefineProperties(StringDecoder.prototype, { + lastChar: { + __proto__: null, + configurable: true, + enumerable: true, + get() { + return TypedArrayPrototypeSubarray(this[kNativeDecoder], + kIncompleteCharactersStart, + kIncompleteCharactersEnd); + }, + }, + lastNeed: { + __proto__: null, + configurable: true, + enumerable: true, + get() { + return this[kNativeDecoder][kMissingBytes]; + }, + }, + lastTotal: { + __proto__: null, + configurable: true, + enumerable: true, + get() { + return this[kNativeDecoder][kBufferedBytes] + + this[kNativeDecoder][kMissingBytes]; + }, + }, +}); + +exports.StringDecoder = StringDecoder; \ No newline at end of file diff --git a/.codesandbox/node/task_processor.js b/.codesandbox/node/task_processor.js new file mode 100644 index 0000000000..ebb455eee0 --- /dev/null +++ b/.codesandbox/node/task_processor.js @@ -0,0 +1,91 @@ +const { parentPort } = require('node:worker_threads'); +parentPort.on('message', (task) => { + parentPort.postMessage(task.a + task.b); +}); +const { AsyncResource } = require('node:async_hooks'); +const { EventEmitter } = require('node:events'); +const path = require('node:path'); +const { Worker } = require('node:worker_threads'); + +const kTaskInfo = Symbol('kTaskInfo'); +const kWorkerFreedEvent = Symbol('kWorkerFreedEvent'); + +class WorkerPoolTaskInfo extends AsyncResource { + constructor(callback) { + super('WorkerPoolTaskInfo'); + this.callback = callback; + } + + done(err, result) { + this.runInAsyncScope(this.callback, null, err, result); + this.emitDestroy(); // `TaskInfo`s are used only once. + } +} + +class WorkerPool extends EventEmitter { + constructor(numThreads) { + super(); + this.numThreads = numThreads; + this.workers = []; + this.freeWorkers = []; + this.tasks = []; + + for (let i = 0; i < numThreads; i++) + this.addNewWorker(); + + // Any time the kWorkerFreedEvent is emitted, dispatch + // the next task pending in the queue, if any. + this.on(kWorkerFreedEvent, () => { + if (this.tasks.length > 0) { + const { task, callback } = this.tasks.shift(); + this.runTask(task, callback); + } + }); + } + + addNewWorker() { + const worker = new Worker(path.resolve(__dirname, 'task_processor.js')); + worker.on('message', (result) => { + // In case of success: Call the callback that was passed to `runTask`, + // remove the `TaskInfo` associated with the Worker, and mark it as free + // again. + worker[kTaskInfo].done(null, result); + worker[kTaskInfo] = null; + this.freeWorkers.push(worker); + this.emit(kWorkerFreedEvent); + }); + worker.on('error', (err) => { + // In case of an uncaught exception: Call the callback that was passed to + // `runTask` with the error. + if (worker[kTaskInfo]) + worker[kTaskInfo].done(err, null); + else + this.emit('error', err); + // Remove the worker from the list and start a new Worker to replace the + // current one. + this.workers.splice(this.workers.indexOf(worker), 1); + this.addNewWorker(); + }); + this.workers.push(worker); + this.freeWorkers.push(worker); + this.emit(kWorkerFreedEvent); + } + + runTask(task, callback) { + if (this.freeWorkers.length === 0) { + // No free threads, wait until a worker thread becomes free. + this.tasks.push({ task, callback }); + return; + } + + const worker = this.freeWorkers.pop(); + worker[kTaskInfo] = new WorkerPoolTaskInfo(callback); + worker.postMessage(task); + } + + close() { + for (const worker of this.workers) worker.terminate(); + } +} + +module.exports = WorkerPool; \ No newline at end of file diff --git a/.codesandbox/node/tls.js b/.codesandbox/node/tls.js new file mode 100644 index 0000000000..f412d811a3 --- /dev/null +++ b/.codesandbox/node/tls.js @@ -0,0 +1,443 @@ +'use strict'; + +const { + Array, + ArrayIsArray, + // eslint-disable-next-line no-restricted-syntax + ArrayPrototypePush, + JSONParse, + ObjectDefineProperty, + ObjectFreeze, + StringFromCharCode, +} = primordials; + +const { + ERR_TLS_CERT_ALTNAME_FORMAT, + ERR_TLS_CERT_ALTNAME_INVALID, + ERR_OUT_OF_RANGE, + ERR_INVALID_ARG_VALUE, + ERR_INVALID_ARG_TYPE, +} = require('internal/errors').codes; + +const { + getBundledRootCertificates, + getExtraCACertificates, + getSystemCACertificates, + resetRootCertStore, + getUserRootCertificates, + getSSLCiphers, + startLoadingCertificatesOffThread, +} = internalBinding('crypto'); + +// Start loading root certificates in a separate thread as early as possible +// once the tls module is loaded, so that by the time an actual TLS connection is +// made, the loading is done. +startLoadingCertificatesOffThread(); + +const internalUtil = require('internal/util'); +internalUtil.assertCrypto(); +const { + isArrayBufferView, + isUint8Array, +} = require('internal/util/types'); + +const net = require('net'); +const { getOptionValue } = require('internal/options'); +const { Buffer } = require('buffer'); +const { canonicalizeIP } = internalBinding('cares_wrap'); +const _tls_common = require('_tls_common'); +const _tls_wrap = require('_tls_wrap'); +const { validateString } = require('internal/validators'); + +const { + namespace: { + addDeserializeCallback, + addSerializeCallback, + isBuildingSnapshot, + }, +} = require('internal/v8/startup_snapshot'); + +// Allow {CLIENT_RENEG_LIMIT} client-initiated session renegotiations +// every {CLIENT_RENEG_WINDOW} seconds. An error event is emitted if more +// renegotiations are seen. The settings are applied to all remote client +// connections. +exports.CLIENT_RENEG_LIMIT = 3; +exports.CLIENT_RENEG_WINDOW = 600; + +exports.DEFAULT_CIPHERS = getOptionValue('--tls-cipher-list'); + +exports.DEFAULT_ECDH_CURVE = 'auto'; + +if (getOptionValue('--tls-min-v1.0')) + exports.DEFAULT_MIN_VERSION = 'TLSv1'; +else if (getOptionValue('--tls-min-v1.1')) + exports.DEFAULT_MIN_VERSION = 'TLSv1.1'; +else if (getOptionValue('--tls-min-v1.2')) + exports.DEFAULT_MIN_VERSION = 'TLSv1.2'; +else if (getOptionValue('--tls-min-v1.3')) + exports.DEFAULT_MIN_VERSION = 'TLSv1.3'; +else + exports.DEFAULT_MIN_VERSION = 'TLSv1.2'; + +if (getOptionValue('--tls-max-v1.3')) + exports.DEFAULT_MAX_VERSION = 'TLSv1.3'; +else if (getOptionValue('--tls-max-v1.2')) + exports.DEFAULT_MAX_VERSION = 'TLSv1.2'; +else + exports.DEFAULT_MAX_VERSION = 'TLSv1.3'; // Will depend on node version. + + +exports.getCiphers = internalUtil.cachedResult( + () => internalUtil.filterDuplicateStrings(getSSLCiphers(), true), +); + +let bundledRootCertificates; +function cacheBundledRootCertificates() { + bundledRootCertificates ||= ObjectFreeze(getBundledRootCertificates()); + + return bundledRootCertificates; +} + +ObjectDefineProperty(exports, 'rootCertificates', { + __proto__: null, + configurable: false, + enumerable: true, + get: cacheBundledRootCertificates, +}); + +let extraCACertificates; +function cacheExtraCACertificates() { + extraCACertificates ||= ObjectFreeze(getExtraCACertificates()); + + return extraCACertificates; +} + +let systemCACertificates; +function cacheSystemCACertificates() { + systemCACertificates ||= ObjectFreeze(getSystemCACertificates()); + + return systemCACertificates; +} + +let defaultCACertificates; +let hasResetDefaultCACertificates = false; + +function cacheDefaultCACertificates() { + if (defaultCACertificates) { return defaultCACertificates; } + + if (hasResetDefaultCACertificates) { + defaultCACertificates = getUserRootCertificates(); + ObjectFreeze(defaultCACertificates); + return defaultCACertificates; + } + + defaultCACertificates = []; + + if (!getOptionValue('--use-openssl-ca')) { + const bundled = cacheBundledRootCertificates(); + for (let i = 0; i < bundled.length; ++i) { + ArrayPrototypePush(defaultCACertificates, bundled[i]); + } + if (getOptionValue('--use-system-ca')) { + const system = cacheSystemCACertificates(); + for (let i = 0; i < system.length; ++i) { + + ArrayPrototypePush(defaultCACertificates, system[i]); + } + } + } + + if (process.env.NODE_EXTRA_CA_CERTS) { + const extra = cacheExtraCACertificates(); + for (let i = 0; i < extra.length; ++i) { + + ArrayPrototypePush(defaultCACertificates, extra[i]); + } + } + + ObjectFreeze(defaultCACertificates); + return defaultCACertificates; +} + +// TODO(joyeecheung): support X509Certificate output? +function getCACertificates(type = 'default') { + validateString(type, 'type'); + + switch (type) { + case 'default': + return cacheDefaultCACertificates(); + case 'bundled': + return cacheBundledRootCertificates(); + case 'system': + return cacheSystemCACertificates(); + case 'extra': + return cacheExtraCACertificates(); + default: + throw new ERR_INVALID_ARG_VALUE('type', type); + } +} +exports.getCACertificates = getCACertificates; + +function setDefaultCACertificates(certs) { + if (!ArrayIsArray(certs)) { + throw new ERR_INVALID_ARG_TYPE('certs', 'Array', certs); + } + + // Verify that all elements in the array are strings + for (let i = 0; i < certs.length; i++) { + if (typeof certs[i] !== 'string' && !isArrayBufferView(certs[i])) { + throw new ERR_INVALID_ARG_TYPE( + `certs[${i}]`, ['string', 'ArrayBufferView'], certs[i]); + } + } + + resetRootCertStore(certs); + defaultCACertificates = undefined; // Reset the cached default certificates + hasResetDefaultCACertificates = true; +} + +exports.setDefaultCACertificates = setDefaultCACertificates; + +if (isBuildingSnapshot()) { + addSerializeCallback(() => { + // Clear the cached certs so that they are reloaded at runtime. + // Bundled certificates are immutable so they are spared. + extraCACertificates = undefined; + systemCACertificates = undefined; + if (hasResetDefaultCACertificates) { + defaultCACertificates = undefined; + } + }); + addDeserializeCallback(() => { + // If the tls module is loaded during snapshotting, load the certificates from + // various sources again at runtime so that by the time an actual TLS connection is + // made, the loading is done. If the default CA certificates have been overridden, then + // the serialized overriding certificates are likely to be used and pre-loading + // from the sources would probably not yield any benefit, so skip it. + if (!hasResetDefaultCACertificates) { + startLoadingCertificatesOffThread(); + } + }); +} + +// Convert protocols array into valid OpenSSL protocols list +// ("\x06spdy/2\x08http/1.1\x08http/1.0") +function convertProtocols(protocols) { + const lens = new Array(protocols.length); + const buff = Buffer.allocUnsafe(protocols.reduce((p, c, i) => { + const len = Buffer.byteLength(c); + if (len > 255) { + throw new ERR_OUT_OF_RANGE('The byte length of the protocol at index ' + + `${i} exceeds the maximum length.`, '<= 255', len, true); + } + lens[i] = len; + return p + 1 + len; + }, 0)); + + let offset = 0; + for (let i = 0, c = protocols.length; i < c; i++) { + buff[offset++] = lens[i]; + buff.write(protocols[i], offset); + offset += lens[i]; + } + + return buff; +} + +exports.convertALPNProtocols = function convertALPNProtocols(protocols, out) { + // If protocols is Array - translate it into buffer + if (ArrayIsArray(protocols)) { + out.ALPNProtocols = convertProtocols(protocols); + } else if (isUint8Array(protocols)) { + // Copy new buffer not to be modified by user. + out.ALPNProtocols = Buffer.from(protocols); + } else if (isArrayBufferView(protocols)) { + out.ALPNProtocols = Buffer.from(protocols.buffer.slice( + protocols.byteOffset, + protocols.byteOffset + protocols.byteLength, + )); + } +}; + +function unfqdn(host) { + return host.replace(/[.]$/, ''); +} + +// String#toLowerCase() is locale-sensitive so we use +// a conservative version that only lowercases A-Z. +function toLowerCase(c) { + return StringFromCharCode(32 + c.charCodeAt(0)); +} + +function splitHost(host) { + return unfqdn(host).replace(/[A-Z]/g, toLowerCase).split('.'); +} + +function check(hostParts, pattern, wildcards) { + // Empty strings, null, undefined, etc. never match. + if (!pattern) + return false; + + const patternParts = splitHost(pattern); + + if (hostParts.length !== patternParts.length) + return false; + + // Pattern has empty components, e.g. "bad..example.com". + if (patternParts.includes('')) + return false; + + // RFC 6125 allows IDNA U-labels (Unicode) in names but we have no + // good way to detect their encoding or normalize them so we simply + // reject them. Control characters and blanks are rejected as well + // because nothing good can come from accepting them. + const isBad = (s) => /[^\u0021-\u007F]/u.test(s); + if (patternParts.some(isBad)) + return false; + + // Check host parts from right to left first. + for (let i = hostParts.length - 1; i > 0; i -= 1) { + if (hostParts[i] !== patternParts[i]) + return false; + } + + const hostSubdomain = hostParts[0]; + const patternSubdomain = patternParts[0]; + const patternSubdomainParts = patternSubdomain.split('*', 3); + + // Short-circuit when the subdomain does not contain a wildcard. + // RFC 6125 does not allow wildcard substitution for components + // containing IDNA A-labels (Punycode) so match those verbatim. + if (patternSubdomainParts.length === 1 || + patternSubdomain.includes('xn--')) + return hostSubdomain === patternSubdomain; + + if (!wildcards) + return false; + + // More than one wildcard is always wrong. + if (patternSubdomainParts.length > 2) + return false; + + // *.tld wildcards are not allowed. + if (patternParts.length <= 2) + return false; + + const { 0: prefix, 1: suffix } = patternSubdomainParts; + + if (prefix.length + suffix.length > hostSubdomain.length) + return false; + + if (!hostSubdomain.startsWith(prefix)) + return false; + + if (!hostSubdomain.endsWith(suffix)) + return false; + + return true; +} + +// This pattern is used to determine the length of escaped sequences within +// the subject alt names string. It allows any valid JSON string literal. +// This MUST match the JSON specification (ECMA-404 / RFC8259) exactly. +const jsonStringPattern = + // eslint-disable-next-line no-control-regex + /^"(?:[^"\\\u0000-\u001f]|\\(?:["\\/bfnrt]|u[0-9a-fA-F]{4}))*"/; + +function splitEscapedAltNames(altNames) { + const result = []; + let currentToken = ''; + let offset = 0; + while (offset !== altNames.length) { + const nextSep = altNames.indexOf(',', offset); + const nextQuote = altNames.indexOf('"', offset); + if (nextQuote !== -1 && (nextSep === -1 || nextQuote < nextSep)) { + // There is a quote character and there is no separator before the quote. + currentToken += altNames.substring(offset, nextQuote); + const match = jsonStringPattern.exec(altNames.substring(nextQuote)); + if (!match) { + throw new ERR_TLS_CERT_ALTNAME_FORMAT(); + } + currentToken += JSONParse(match[0]); + offset = nextQuote + match[0].length; + } else if (nextSep !== -1) { + // There is a separator and no quote before it. + currentToken += altNames.substring(offset, nextSep); + result.push(currentToken); + currentToken = ''; + offset = nextSep + 2; + } else { + currentToken += altNames.substring(offset); + offset = altNames.length; + } + } + result.push(currentToken); + return result; +} + +exports.checkServerIdentity = function checkServerIdentity(hostname, cert) { + const subject = cert.subject; + const altNames = cert.subjectaltname; + const dnsNames = []; + const ips = []; + + hostname = '' + hostname; + + if (altNames) { + const splitAltNames = altNames.includes('"') ? + splitEscapedAltNames(altNames) : + altNames.split(', '); + splitAltNames.forEach((name) => { + if (name.startsWith('DNS:')) { + dnsNames.push(name.slice(4)); + } else if (name.startsWith('IP Address:')) { + ips.push(canonicalizeIP(name.slice(11))); + } + }); + } + + let valid = false; + let reason = 'Unknown reason'; + + hostname = unfqdn(hostname); // Remove trailing dot for error messages. + + if (net.isIP(hostname)) { + valid = ips.includes(canonicalizeIP(hostname)); + if (!valid) + reason = `IP: ${hostname} is not in the cert's list: ` + ips.join(', '); + } else if (dnsNames.length > 0 || subject?.CN) { + const hostParts = splitHost(hostname); + const wildcard = (pattern) => check(hostParts, pattern, true); + + if (dnsNames.length > 0) { + valid = dnsNames.some(wildcard); + if (!valid) + reason = + `Host: ${hostname}. is not in the cert's altnames: ${altNames}`; + } else { + // Match against Common Name only if no supported identifiers exist. + const cn = subject.CN; + + if (ArrayIsArray(cn)) + valid = cn.some(wildcard); + else if (cn) + valid = wildcard(cn); + + if (!valid) + reason = `Host: ${hostname}. is not cert's CN: ${cn}`; + } + } else { + reason = 'Cert does not contain a DNS name'; + } + + if (!valid) { + return new ERR_TLS_CERT_ALTNAME_INVALID(reason, hostname, cert); + } +}; + +exports.createSecureContext = _tls_common.createSecureContext; +exports.SecureContext = _tls_common.SecureContext; +exports.TLSSocket = _tls_wrap.TLSSocket; +exports.Server = _tls_wrap.Server; +exports.createServer = _tls_wrap.createServer; +exports.connect = _tls_wrap.connect; \ No newline at end of file diff --git a/.codesandbox/node/trace_events.js b/.codesandbox/node/trace_events.js new file mode 100644 index 0000000000..0938072289 --- /dev/null +++ b/.codesandbox/node/trace_events.js @@ -0,0 +1,96 @@ +'use strict'; + +const { + ArrayPrototypeJoin, + SafeSet, +} = primordials; + +const { hasTracing } = internalBinding('config'); + +const kMaxTracingCount = 10; + +const { + ERR_TRACE_EVENTS_CATEGORY_REQUIRED, + ERR_TRACE_EVENTS_UNAVAILABLE, +} = require('internal/errors').codes; + +const { ownsProcessState } = require('internal/worker'); +if (!hasTracing || !ownsProcessState) + throw new ERR_TRACE_EVENTS_UNAVAILABLE(); + +const { CategorySet, getEnabledCategories } = internalBinding('trace_events'); +const { customInspectSymbol } = require('internal/util'); +const { format } = require('internal/util/inspect'); +const { + validateObject, + validateStringArray, +} = require('internal/validators'); + +const enabledTracingObjects = new SafeSet(); + +class Tracing { + #handle; + #categories; + #enabled = false; + + constructor(categories) { + this.#handle = new CategorySet(categories); + this.#categories = categories; + } + + enable() { + if (!this.#enabled) { + this.#enabled = true; + this.#handle.enable(); + enabledTracingObjects.add(this); + if (enabledTracingObjects.size > kMaxTracingCount) { + process.emitWarning( + 'Possible trace_events memory leak detected. There are more than ' + + `${kMaxTracingCount} enabled Tracing objects.`, + ); + } + } + } + + disable() { + if (this.#enabled) { + this.#enabled = false; + this.#handle.disable(); + enabledTracingObjects.delete(this); + } + } + + get enabled() { + return this.#enabled; + } + + get categories() { + return ArrayPrototypeJoin(this.#categories, ','); + } + + [customInspectSymbol](depth, opts) { + if (typeof depth === 'number' && depth < 0) + return this; + + const obj = { + enabled: this.enabled, + categories: this.categories, + }; + return `Tracing ${format(obj)}`; + } +} + +function createTracing(options) { + validateObject(options, 'options'); + validateStringArray(options.categories, 'options.categories'); + + if (options.categories.length <= 0) + throw new ERR_TRACE_EVENTS_CATEGORY_REQUIRED(); + + return new Tracing(options.categories); +} + +module.exports = { + createTracing, + getEnabledCategories, +}; \ No newline at end of file diff --git a/.codesandbox/node/tty.js b/.codesandbox/node/tty.js new file mode 100644 index 0000000000..f9275fab29 --- /dev/null +++ b/.codesandbox/node/tty.js @@ -0,0 +1,147 @@ +'use strict'; + +const { + NumberIsInteger, + ObjectSetPrototypeOf, +} = primordials; + +const net = require('net'); +const { TTY, isTTY } = internalBinding('tty_wrap'); +const { + ErrnoException, + codes: { + ERR_INVALID_FD, + ERR_TTY_INIT_FAILED, + }, +} = require('internal/errors'); +const { + getColorDepth, + hasColors, +} = require('internal/tty'); + +// Lazy loaded for startup performance. +let readline; + +function isatty(fd) { + return NumberIsInteger(fd) && fd >= 0 && fd <= 2147483647 && + isTTY(fd); +} + +function ReadStream(fd, options) { + if (!(this instanceof ReadStream)) + return new ReadStream(fd, options); + if (fd >> 0 !== fd || fd < 0) + throw new ERR_INVALID_FD(fd); + + const ctx = {}; + const tty = new TTY(fd, ctx); + if (ctx.code !== undefined) { + throw new ERR_TTY_INIT_FAILED(ctx); + } + + net.Socket.call(this, { + readableHighWaterMark: 0, + handle: tty, + manualStart: true, + ...options, + }); + + this.isRaw = false; + this.isTTY = true; +} + +ObjectSetPrototypeOf(ReadStream.prototype, net.Socket.prototype); +ObjectSetPrototypeOf(ReadStream, net.Socket); + +ReadStream.prototype.setRawMode = function(flag) { + flag = !!flag; + const err = this._handle?.setRawMode(flag); + if (err) { + this.emit('error', new ErrnoException(err, 'setRawMode')); + return this; + } + this.isRaw = flag; + return this; +}; + +function WriteStream(fd) { + if (!(this instanceof WriteStream)) + return new WriteStream(fd); + if (fd >> 0 !== fd || fd < 0) + throw new ERR_INVALID_FD(fd); + + const ctx = {}; + const tty = new TTY(fd, ctx); + if (ctx.code !== undefined) { + throw new ERR_TTY_INIT_FAILED(ctx); + } + + net.Socket.call(this, { + readableHighWaterMark: 0, + handle: tty, + manualStart: true, + }); + + // Prevents interleaved or dropped stdout/stderr output for terminals. + // As noted in the following reference, local TTYs tend to be quite fast and + // this behavior has become expected due historical functionality on OS X, + // even though it was originally intended to change in v1.0.2 (Libuv 1.2.1). + // Ref: https://github.com/nodejs/node/pull/1771#issuecomment-119351671 + this._handle.setBlocking(true); + + const winSize = [0, 0]; + const err = this._handle.getWindowSize(winSize); + if (!err) { + this.columns = winSize[0]; + this.rows = winSize[1]; + } +} + +ObjectSetPrototypeOf(WriteStream.prototype, net.Socket.prototype); +ObjectSetPrototypeOf(WriteStream, net.Socket); + +WriteStream.prototype.isTTY = true; + +WriteStream.prototype.getColorDepth = getColorDepth; + +WriteStream.prototype.hasColors = hasColors; + +WriteStream.prototype._refreshSize = function() { + const oldCols = this.columns; + const oldRows = this.rows; + const winSize = [0, 0]; + const err = this._handle.getWindowSize(winSize); + if (err) { + this.emit('error', new ErrnoException(err, 'getWindowSize')); + return; + } + const { 0: newCols, 1: newRows } = winSize; + if (oldCols !== newCols || oldRows !== newRows) { + this.columns = newCols; + this.rows = newRows; + this.emit('resize'); + } +}; + +// Backwards-compat +WriteStream.prototype.cursorTo = function(x, y, callback) { + if (readline === undefined) readline = require('readline'); + return readline.cursorTo(this, x, y, callback); +}; +WriteStream.prototype.moveCursor = function(dx, dy, callback) { + if (readline === undefined) readline = require('readline'); + return readline.moveCursor(this, dx, dy, callback); +}; +WriteStream.prototype.clearLine = function(dir, callback) { + if (readline === undefined) readline = require('readline'); + return readline.clearLine(this, dir, callback); +}; +WriteStream.prototype.clearScreenDown = function(callback) { + if (readline === undefined) readline = require('readline'); + return readline.clearScreenDown(this, callback); +}; +WriteStream.prototype.getWindowSize = function() { + return [this.columns, this.rows]; +}; + +module.exports = { isatty, ReadStream, WriteStream }; \ No newline at end of file diff --git a/.codesandbox/node/url.js b/.codesandbox/node/url.js new file mode 100644 index 0000000000..7e0f36ee6a --- /dev/null +++ b/.codesandbox/node/url.js @@ -0,0 +1,1028 @@ +'use strict'; + +const { + ArrayPrototypeJoin, + Boolean, + Int8Array, + ObjectAssign, + ObjectKeys, + StringPrototypeAt, + StringPrototypeCharCodeAt, + StringPrototypeIndexOf, + StringPrototypeReplaceAll, + StringPrototypeSlice, + decodeURIComponent, +} = primordials; + +const { URLPattern } = internalBinding('url_pattern'); +const { toASCII } = internalBinding('encoding_binding'); +const { encodeStr, hexTable } = require('internal/querystring'); +const querystring = require('querystring'); + +const { + ERR_INVALID_ARG_TYPE, + ERR_INVALID_URL, +} = require('internal/errors').codes; +const { + validateString, + validateObject, +} = require('internal/validators'); + +// This ensures setURLConstructor() is called before the native +// URL::ToObject() method is used. +const { spliceOne } = require('internal/util'); +const { isInsideNodeModules } = internalBinding('util'); + +// WHATWG URL implementation provided by internal/url +const { + URL, + URLSearchParams, + domainToASCII, + domainToUnicode, + fileURLToPath, + fileURLToPathBuffer, + pathToFileURL: _pathToFileURL, + urlToHttpOptions, + unsafeProtocol, + hostlessProtocol, + slashedProtocol, +} = require('internal/url'); + +const bindingUrl = internalBinding('url'); + +// Original url.parse() API + +function Url() { + this.protocol = null; + this.slashes = null; + this.auth = null; + this.host = null; + this.port = null; + this.hostname = null; + this.hash = null; + this.search = null; + this.query = null; + this.pathname = null; + this.path = null; + this.href = null; +} + +// Reference: RFC 3986, RFC 1808, RFC 2396 + +// define these here so at least they only have to be +// compiled once on the first module load. +const protocolPattern = /^[a-z0-9.+-]+:/i; +const portPattern = /:[0-9]*$/; +const hostPattern = /^\/\/[^@/]+@[^@/]+/; + +// Special case for a simple path URL +const simplePathPattern = /^(\/\/?(?!\/)[^?\s]*)(\?[^\s]*)?$/; + +const hostnameMaxLen = 255; +const { + CHAR_SPACE, + CHAR_TAB, + CHAR_CARRIAGE_RETURN, + CHAR_LINE_FEED, + CHAR_NO_BREAK_SPACE, + CHAR_ZERO_WIDTH_NOBREAK_SPACE, + CHAR_HASH, + CHAR_FORWARD_SLASH, + CHAR_LEFT_SQUARE_BRACKET, + CHAR_RIGHT_SQUARE_BRACKET, + CHAR_LEFT_ANGLE_BRACKET, + CHAR_RIGHT_ANGLE_BRACKET, + CHAR_LEFT_CURLY_BRACKET, + CHAR_RIGHT_CURLY_BRACKET, + CHAR_QUESTION_MARK, + CHAR_DOUBLE_QUOTE, + CHAR_SINGLE_QUOTE, + CHAR_PERCENT, + CHAR_SEMICOLON, + CHAR_BACKWARD_SLASH, + CHAR_CIRCUMFLEX_ACCENT, + CHAR_GRAVE_ACCENT, + CHAR_VERTICAL_LINE, + CHAR_AT, + CHAR_COLON, +} = require('internal/constants'); + +let urlParseWarned = false; + +function urlParse(url, parseQueryString, slashesDenoteHost) { + if (!urlParseWarned && !isInsideNodeModules(100, true)) { + urlParseWarned = true; + process.emitWarning( + '`url.parse()` behavior is not standardized and prone to ' + + 'errors that have security implications. Use the WHATWG URL API ' + + 'instead. CVEs are not issued for `url.parse()` vulnerabilities.', + 'DeprecationWarning', + 'DEP0169', + ); + } + + if (url instanceof Url) return url; + + const urlObject = new Url(); + urlObject.parse(url, parseQueryString, slashesDenoteHost); + return urlObject; +} + +function isIpv6Hostname(hostname) { + return ( + StringPrototypeCharCodeAt(hostname, 0) === CHAR_LEFT_SQUARE_BRACKET && + StringPrototypeCharCodeAt(hostname, hostname.length - 1) === + CHAR_RIGHT_SQUARE_BRACKET + ); +} + +// This prevents some common spoofing bugs due to our use of IDNA toASCII. For +// compatibility, the set of characters we use here is the *intersection* of +// "forbidden host code point" in the WHATWG URL Standard [1] and the +// characters in the host parsing loop in Url.prototype.parse, with the +// following additions: +// +// - ':' since this could cause a "protocol spoofing" bug +// - '@' since this could cause parts of the hostname to be confused with auth +// - '[' and ']' since this could cause a non-IPv6 hostname to be interpreted +// as IPv6 by isIpv6Hostname above +// +// [1]: https://url.spec.whatwg.org/#forbidden-host-code-point +const forbiddenHostChars = /[\0\t\n\r #%/:<>?@[\\\]^|]/; +// For IPv6, permit '[', ']', and ':'. +const forbiddenHostCharsIpv6 = /[\0\t\n\r #%/<>?@\\^|]/; + +Url.prototype.parse = function parse(url, parseQueryString, slashesDenoteHost) { + validateString(url, 'url'); + + // Copy chrome, IE, opera backslash-handling behavior. + // Back slashes before the query string get converted to forward slashes + // See: https://code.google.com/p/chromium/issues/detail?id=25916 + let hasHash = false; + let hasAt = false; + let start = -1; + let end = -1; + let rest = ''; + let lastPos = 0; + for (let i = 0, inWs = false, split = false; i < url.length; ++i) { + const code = url.charCodeAt(i); + + // Find first and last non-whitespace characters for trimming + const isWs = code < 33 || + code === CHAR_NO_BREAK_SPACE || + code === CHAR_ZERO_WIDTH_NOBREAK_SPACE; + if (start === -1) { + if (isWs) + continue; + lastPos = start = i; + } else if (inWs) { + if (!isWs) { + end = -1; + inWs = false; + } + } else if (isWs) { + end = i; + inWs = true; + } + + // Only convert backslashes while we haven't seen a split character + if (!split) { + switch (code) { + case CHAR_AT: + hasAt = true; + break; + case CHAR_HASH: + hasHash = true; + // Fall through + case CHAR_QUESTION_MARK: + split = true; + break; + case CHAR_BACKWARD_SLASH: + if (i - lastPos > 0) + rest += url.slice(lastPos, i); + rest += '/'; + lastPos = i + 1; + break; + } + } else if (!hasHash && code === CHAR_HASH) { + hasHash = true; + } + } + + // Check if string was non-empty (including strings with only whitespace) + if (start !== -1) { + if (lastPos === start) { + // We didn't convert any backslashes + + if (end === -1) { + if (start === 0) + rest = url; + else + rest = url.slice(start); + } else { + rest = url.slice(start, end); + } + } else if (end === -1 && lastPos < url.length) { + // We converted some backslashes and have only part of the entire string + rest += url.slice(lastPos); + } else if (end !== -1 && lastPos < end) { + // We converted some backslashes and have only part of the entire string + rest += url.slice(lastPos, end); + } + } + + if (!slashesDenoteHost && !hasHash && !hasAt) { + // Try fast path regexp + const simplePath = simplePathPattern.exec(rest); + if (simplePath) { + this.path = rest; + this.href = rest; + this.pathname = simplePath[1]; + if (simplePath[2]) { + this.search = simplePath[2]; + if (parseQueryString) { + this.query = querystring.parse(this.search.slice(1)); + } else { + this.query = this.search.slice(1); + } + } else if (parseQueryString) { + this.search = null; + this.query = { __proto__: null }; + } + return this; + } + } + + let proto = protocolPattern.exec(rest); + let lowerProto; + if (proto) { + proto = proto[0]; + lowerProto = proto.toLowerCase(); + this.protocol = lowerProto; + rest = rest.slice(proto.length); + } + + // Figure out if it's got a host + // user@server is *always* interpreted as a hostname, and url + // resolution will treat //foo/bar as host=foo,path=bar because that's + // how the browser resolves relative URLs. + let slashes; + if (slashesDenoteHost || proto || hostPattern.test(rest)) { + slashes = rest.charCodeAt(0) === CHAR_FORWARD_SLASH && + rest.charCodeAt(1) === CHAR_FORWARD_SLASH; + if (slashes && !(proto && hostlessProtocol.has(lowerProto))) { + rest = rest.slice(2); + this.slashes = true; + } + } + + if (!hostlessProtocol.has(lowerProto) && + (slashes || (proto && !slashedProtocol.has(proto)))) { + + // there's a hostname. + // the first instance of /, ?, ;, or # ends the host. + // + // If there is an @ in the hostname, then non-host chars *are* allowed + // to the left of the last @ sign, unless some host-ending character + // comes *before* the @-sign. + // URLs are obnoxious. + // + // ex: + // http://a@b@c/ => user:a@b host:c + // http://a@b?@c => user:a host:b path:/?@c + + let hostEnd = -1; + let atSign = -1; + let nonHost = -1; + for (let i = 0; i < rest.length; ++i) { + switch (rest.charCodeAt(i)) { + case CHAR_TAB: + case CHAR_LINE_FEED: + case CHAR_CARRIAGE_RETURN: + // WHATWG URL removes tabs, newlines, and carriage returns. Let's do that too. + rest = rest.slice(0, i) + rest.slice(i + 1); + i -= 1; + break; + case CHAR_SPACE: + case CHAR_DOUBLE_QUOTE: + case CHAR_PERCENT: + case CHAR_SINGLE_QUOTE: + case CHAR_SEMICOLON: + case CHAR_LEFT_ANGLE_BRACKET: + case CHAR_RIGHT_ANGLE_BRACKET: + case CHAR_BACKWARD_SLASH: + case CHAR_CIRCUMFLEX_ACCENT: + case CHAR_GRAVE_ACCENT: + case CHAR_LEFT_CURLY_BRACKET: + case CHAR_VERTICAL_LINE: + case CHAR_RIGHT_CURLY_BRACKET: + // Characters that are never ever allowed in a hostname from RFC 2396 + if (nonHost === -1) + nonHost = i; + break; + case CHAR_HASH: + case CHAR_FORWARD_SLASH: + case CHAR_QUESTION_MARK: + // Find the first instance of any host-ending characters + if (nonHost === -1) + nonHost = i; + hostEnd = i; + break; + case CHAR_AT: + // At this point, either we have an explicit point where the + // auth portion cannot go past, or the last @ char is the decider. + atSign = i; + nonHost = -1; + break; + } + if (hostEnd !== -1) + break; + } + start = 0; + if (atSign !== -1) { + this.auth = decodeURIComponent(rest.slice(0, atSign)); + start = atSign + 1; + } + if (nonHost === -1) { + this.host = rest.slice(start); + rest = ''; + } else { + this.host = rest.slice(start, nonHost); + rest = rest.slice(nonHost); + } + + // pull out port. + this.parseHost(); + + // We've indicated that there is a hostname, + // so even if it's empty, it has to be present. + if (typeof this.hostname !== 'string') + this.hostname = ''; + + const hostname = this.hostname; + + // If hostname begins with [ and ends with ] + // assume that it's an IPv6 address. + const ipv6Hostname = isIpv6Hostname(hostname); + + // validate a little. + if (!ipv6Hostname) { + rest = getHostname(this, rest, hostname, url); + } + + if (this.hostname.length > hostnameMaxLen) { + this.hostname = ''; + } else { + // Hostnames are always lower case. + this.hostname = this.hostname.toLowerCase(); + } + + if (this.hostname !== '') { + if (ipv6Hostname) { + if (forbiddenHostCharsIpv6.test(this.hostname)) { + throw new ERR_INVALID_URL(url); + } + } else { + // IDNA Support: Returns a punycoded representation of "domain". + // It only converts parts of the domain name that + // have non-ASCII characters, i.e. it doesn't matter if + // you call it with a domain that already is ASCII-only. + this.hostname = toASCII(this.hostname); + + // Prevent two potential routes of hostname spoofing. + // 1. If this.hostname is empty, it must have become empty due to toASCII + // since we checked this.hostname above. + // 2. If any of forbiddenHostChars appears in this.hostname, it must have + // also gotten in due to toASCII. This is since getHostname would have + // filtered them out otherwise. + // Rather than trying to correct this by moving the non-host part into + // the pathname as we've done in getHostname, throw an exception to + // convey the severity of this issue. + if (this.hostname === '' || forbiddenHostChars.test(this.hostname)) { + throw new ERR_INVALID_URL(url); + } + } + } + + const p = this.port ? ':' + this.port : ''; + const h = this.hostname || ''; + this.host = h + p; + + // strip [ and ] from the hostname + // the host field still retains them, though + if (ipv6Hostname) { + this.hostname = this.hostname.slice(1, -1); + if (rest[0] !== '/') { + rest = '/' + rest; + } + } + } + + // Now rest is set to the post-host stuff. + // Chop off any delim chars. + if (!unsafeProtocol.has(lowerProto)) { + // First, make 100% sure that any "autoEscape" chars get + // escaped, even if encodeURIComponent doesn't think they + // need to be. + rest = autoEscapeStr(rest); + } + + let questionIdx = -1; + let hashIdx = -1; + for (let i = 0; i < rest.length; ++i) { + const code = rest.charCodeAt(i); + if (code === CHAR_HASH) { + this.hash = rest.slice(i); + hashIdx = i; + break; + } else if (code === CHAR_QUESTION_MARK && questionIdx === -1) { + questionIdx = i; + } + } + + if (questionIdx !== -1) { + if (hashIdx === -1) { + this.search = rest.slice(questionIdx); + this.query = rest.slice(questionIdx + 1); + } else { + this.search = rest.slice(questionIdx, hashIdx); + this.query = rest.slice(questionIdx + 1, hashIdx); + } + if (parseQueryString) { + this.query = querystring.parse(this.query); + } + } else if (parseQueryString) { + // No query string, but parseQueryString still requested + this.search = null; + this.query = { __proto__: null }; + } + + const useQuestionIdx = + questionIdx !== -1 && (hashIdx === -1 || questionIdx < hashIdx); + const firstIdx = useQuestionIdx ? questionIdx : hashIdx; + if (firstIdx === -1) { + if (rest.length > 0) + this.pathname = rest; + } else if (firstIdx > 0) { + this.pathname = rest.slice(0, firstIdx); + } + if (slashedProtocol.has(lowerProto) && + this.hostname && !this.pathname) { + this.pathname = '/'; + } + + // To support http.request + if (this.pathname || this.search) { + const p = this.pathname || ''; + const s = this.search || ''; + this.path = p + s; + } + + // Finally, reconstruct the href based on what has been validated. + this.href = this.format(); + return this; +}; + +let warnInvalidPort = true; +function getHostname(self, rest, hostname, url) { + for (let i = 0; i < hostname.length; ++i) { + const code = hostname.charCodeAt(i); + const isValid = (code !== CHAR_FORWARD_SLASH && + code !== CHAR_BACKWARD_SLASH && + code !== CHAR_HASH && + code !== CHAR_QUESTION_MARK && + code !== CHAR_COLON); + + if (!isValid) { + // If leftover starts with :, then it represents an invalid port. + // But url.parse() is lenient about it for now. + // Issue a warning and continue. + if (warnInvalidPort && code === CHAR_COLON) { + const detail = `The URL ${url} is invalid. Future versions of Node.js will throw an error.`; + process.emitWarning(detail, 'DeprecationWarning', 'DEP0170'); + warnInvalidPort = false; + } + self.hostname = hostname.slice(0, i); + return `/${hostname.slice(i)}${rest}`; + } + } + return rest; +} + +// Escaped characters. Use empty strings to fill up unused entries. +// Using Array is faster than Object/Map +const escapedCodes = [ + /* 0 - 9 */ '', '', '', '', '', '', '', '', '', '%09', + /* 10 - 19 */ '%0A', '', '', '%0D', '', '', '', '', '', '', + /* 20 - 29 */ '', '', '', '', '', '', '', '', '', '', + /* 30 - 39 */ '', '', '%20', '', '%22', '', '', '', '', '%27', + /* 40 - 49 */ '', '', '', '', '', '', '', '', '', '', + /* 50 - 59 */ '', '', '', '', '', '', '', '', '', '', + /* 60 - 69 */ '%3C', '', '%3E', '', '', '', '', '', '', '', + /* 70 - 79 */ '', '', '', '', '', '', '', '', '', '', + /* 80 - 89 */ '', '', '', '', '', '', '', '', '', '', + /* 90 - 99 */ '', '', '%5C', '', '%5E', '', '%60', '', '', '', + /* 100 - 109 */ '', '', '', '', '', '', '', '', '', '', + /* 110 - 119 */ '', '', '', '', '', '', '', '', '', '', + /* 120 - 125 */ '', '', '', '%7B', '%7C', '%7D', +]; + +// Automatically escape all delimiters and unwise characters from RFC 2396. +// Also escape single quotes in case of an XSS attack. +// Return the escaped string. +function autoEscapeStr(rest) { + let escaped = ''; + let lastEscapedPos = 0; + for (let i = 0; i < rest.length; ++i) { + // `escaped` contains substring up to the last escaped character. + const escapedChar = escapedCodes[rest.charCodeAt(i)]; + if (escapedChar) { + // Concat if there are ordinary characters in the middle. + if (i > lastEscapedPos) + escaped += rest.slice(lastEscapedPos, i); + escaped += escapedChar; + lastEscapedPos = i + 1; + } + } + if (lastEscapedPos === 0) // Nothing has been escaped. + return rest; + + // There are ordinary characters at the end. + if (lastEscapedPos < rest.length) + escaped += rest.slice(lastEscapedPos); + + return escaped; +} + +// Format a parsed object into a url string +function urlFormat(urlObject, options) { + // Ensure it's an object, and not a string url. + // If it's an object, this is a no-op. + // this way, you can call urlParse() on strings + // to clean up potentially wonky urls. + if (typeof urlObject === 'string') { + urlObject = urlParse(urlObject); + } else if (typeof urlObject !== 'object' || urlObject === null) { + throw new ERR_INVALID_ARG_TYPE('urlObject', + ['Object', 'string'], urlObject); + } else if (urlObject instanceof URL) { + let fragment = true; + let unicode = false; + let search = true; + let auth = true; + + if (options) { + validateObject(options, 'options'); + + if (options.fragment != null) { + fragment = Boolean(options.fragment); + } + + if (options.unicode != null) { + unicode = Boolean(options.unicode); + } + + if (options.search != null) { + search = Boolean(options.search); + } + + if (options.auth != null) { + auth = Boolean(options.auth); + } + } + + return bindingUrl.format(urlObject.href, fragment, unicode, search, auth); + } + + return Url.prototype.format.call(urlObject); +} + +// These characters do not need escaping: +// ! - . _ ~ +// ' ( ) * : +// digits +// alpha (uppercase) +// alpha (lowercase) +const noEscapeAuth = new Int8Array([ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 0x00 - 0x0F + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 0x10 - 0x1F + 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0, // 0x20 - 0x2F + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, // 0x30 - 0x3F + 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 0x40 - 0x4F + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, // 0x50 - 0x5F + 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 0x60 - 0x6F + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, // 0x70 - 0x7F +]); + +Url.prototype.format = function format() { + let auth = this.auth || ''; + if (auth) { + auth = encodeStr(auth, noEscapeAuth, hexTable); + auth += '@'; + } + + let protocol = this.protocol || ''; + if (protocol && StringPrototypeCharCodeAt(protocol, protocol.length - 1) !== 58 /* : */) { + protocol += ':'; + } + + let pathname = this.pathname || ''; + let hash = this.hash || ''; + let host = ''; + let query = ''; + + if (this.host) { + host = auth + this.host; + } else if (this.hostname) { + host = auth + ( + StringPrototypeIndexOf(this.hostname, ':') !== -1 && !isIpv6Hostname(this.hostname) ? + '[' + this.hostname + ']' : + this.hostname + ); + if (this.port) { + host += ':' + this.port; + } + } + + if (this.query !== null && typeof this.query === 'object') { + query = querystring.stringify(this.query); + } + let search = this.search || (query && ('?' + query)) || ''; + + if (StringPrototypeIndexOf(pathname, '#') !== -1 || StringPrototypeIndexOf(pathname, '?') !== -1) { + let newPathname = ''; + let lastPos = 0; + const len = pathname.length; + for (let i = 0; i < len; i++) { + const code = StringPrototypeCharCodeAt(pathname, i); + if (code === CHAR_HASH || code === CHAR_QUESTION_MARK) { + if (i > lastPos) { + newPathname += StringPrototypeSlice(pathname, lastPos, i); + } + newPathname += (code === CHAR_HASH ? '%23' : '%3F'); + lastPos = i + 1; + } + } + if (lastPos < len) { + newPathname += StringPrototypeSlice(pathname, lastPos); + } + pathname = newPathname; + } + + // Only the slashedProtocols get the //. Not mailto:, xmpp:, etc. + // unless they had them to begin with. + if (this.slashes || slashedProtocol.has(protocol)) { + if (this.slashes || host) { + if (pathname && StringPrototypeCharCodeAt(pathname, 0) !== CHAR_FORWARD_SLASH) + pathname = '/' + pathname; + host = '//' + host; + } else if (protocol.length >= 4 && + StringPrototypeCharCodeAt(protocol, 0) === 102/* f */ && + StringPrototypeCharCodeAt(protocol, 1) === 105/* i */ && + StringPrototypeCharCodeAt(protocol, 2) === 108/* l */ && + StringPrototypeCharCodeAt(protocol, 3) === 101/* e */) { + host = '//'; + } + } + + // Escape '#' in search. + if (StringPrototypeIndexOf(search, '#') !== -1) { + search = StringPrototypeReplaceAll(search, '#', '%23'); + } + + if (hash && StringPrototypeCharCodeAt(hash, 0) !== CHAR_HASH) { + hash = '#' + hash; + } + if (search && StringPrototypeCharCodeAt(search, 0) !== CHAR_QUESTION_MARK) { + search = '?' + search; + } + + return protocol + host + pathname + search + hash; +}; + +function urlResolve(source, relative) { + return urlParse(source, false, true).resolve(relative); +} + +Url.prototype.resolve = function resolve(relative) { + return this.resolveObject(urlParse(relative, false, true)).format(); +}; + +function urlResolveObject(source, relative) { + if (!source) return relative; + return urlParse(source, false, true).resolveObject(relative); +} + +Url.prototype.resolveObject = function resolveObject(relative) { + if (typeof relative === 'string') { + const rel = new Url(); + rel.parse(relative, false, true); + relative = rel; + } + + const result = new Url(); + ObjectAssign(result, this); + + // Hash is always overridden, no matter what. + // even href="" will remove it. + result.hash = relative.hash; + + // If the relative url is empty, then there's nothing left to do here. + if (relative.href === '') { + result.href = result.format(); + return result; + } + + // Hrefs like //foo/bar always cut to the protocol. + if (relative.slashes && !relative.protocol) { + // Take everything except the protocol from relative + const relativeWithoutProtocol = ObjectKeys(relative).reduce((acc, key) => { + if (key !== 'protocol') { + acc[key] = relative[key]; + } + return acc; + }, {}); + ObjectAssign(result, relativeWithoutProtocol); + + // urlParse appends trailing / to urls like http://www.example.com + if (slashedProtocol.has(result.protocol) && + result.hostname && !result.pathname) { + result.path = result.pathname = '/'; + } + + result.href = result.format(); + return result; + } + + if (relative.protocol && relative.protocol !== result.protocol) { + // If it's a known url protocol, then changing + // the protocol does weird things + // first, if it's not file:, then we MUST have a host, + // and if there was a path + // to begin with, then we MUST have a path. + // if it is file:, then the host is dropped, + // because that's known to be hostless. + // anything else is assumed to be absolute. + if (!slashedProtocol.has(relative.protocol)) { + ObjectAssign(result, relative); + result.href = result.format(); + return result; + } + + result.protocol = relative.protocol; + if (!relative.host && + !/^file:?$/.test(relative.protocol) && + !hostlessProtocol.has(relative.protocol)) { + const relPath = (relative.pathname || '').split('/'); + while (relPath.length && !(relative.host = relPath.shift())); + relative.host ||= ''; + relative.hostname ||= ''; + if (relPath[0] !== '') relPath.unshift(''); + if (relPath.length < 2) relPath.unshift(''); + result.pathname = relPath.join('/'); + } else { + result.pathname = relative.pathname; + } + result.search = relative.search; + result.query = relative.query; + result.host = relative.host || ''; + result.auth = relative.auth; + result.hostname = relative.hostname || relative.host; + result.port = relative.port; + // To support http.request + if (result.pathname || result.search) { + const p = result.pathname || ''; + const s = result.search || ''; + result.path = p + s; + } + result.slashes ||= relative.slashes; + result.href = result.format(); + return result; + } + + const isSourceAbs = (result.pathname && result.pathname.charAt(0) === '/'); + const isRelAbs = ( + relative.host || (relative.pathname && relative.pathname.charAt(0) === '/') + ); + let mustEndAbs = (isRelAbs || isSourceAbs || + (result.host && relative.pathname)); + const removeAllDots = mustEndAbs; + let srcPath = (result.pathname && result.pathname.split('/')) || []; + const relPath = (relative.pathname && relative.pathname.split('/')) || []; + const noLeadingSlashes = result.protocol && + !slashedProtocol.has(result.protocol); + + // If the url is a non-slashed url, then relative + // links like ../.. should be able + // to crawl up to the hostname, as well. This is strange. + // result.protocol has already been set by now. + // Later on, put the first path part into the host field. + if (noLeadingSlashes) { + result.hostname = ''; + result.port = null; + if (result.host) { + if (srcPath[0] === '') srcPath[0] = result.host; + else srcPath.unshift(result.host); + } + result.host = ''; + if (relative.protocol) { + relative.hostname = null; + relative.port = null; + result.auth = null; + if (relative.host) { + if (relPath[0] === '') relPath[0] = relative.host; + else relPath.unshift(relative.host); + } + relative.host = null; + } + mustEndAbs &&= (relPath[0] === '' || srcPath[0] === ''); + } + + if (isRelAbs) { + // it's absolute. + if (relative.host || relative.host === '') { + if (result.host !== relative.host) result.auth = null; + result.host = relative.host; + result.port = relative.port; + } + if (relative.hostname || relative.hostname === '') { + if (result.hostname !== relative.hostname) result.auth = null; + result.hostname = relative.hostname; + } + result.search = relative.search; + result.query = relative.query; + srcPath = relPath; + // Fall through to the dot-handling below. + } else if (relPath.length) { + // it's relative + // throw away the existing file, and take the new path instead. + srcPath ||= []; + srcPath.pop(); + srcPath = srcPath.concat(relPath); + result.search = relative.search; + result.query = relative.query; + } else if (relative.search !== null && relative.search !== undefined) { + // Just pull out the search. + // like href='?foo'. + // Put this after the other two cases because it simplifies the booleans + if (noLeadingSlashes) { + result.hostname = result.host = srcPath.shift(); + // Occasionally the auth can get stuck only in host. + // This especially happens in cases like + // url.resolveObject('mailto:local1@domain1', 'local2@domain2') + const authInHost = + result.host && result.host.indexOf('@') > 0 && result.host.split('@'); + if (authInHost) { + result.auth = authInHost.shift(); + result.host = result.hostname = authInHost.shift(); + } + } + result.search = relative.search; + result.query = relative.query; + // To support http.request + if (result.pathname !== null || result.search !== null) { + result.path = (result.pathname ? result.pathname : '') + + (result.search ? result.search : ''); + } + result.href = result.format(); + return result; + } + + if (!srcPath.length) { + // No path at all. All other things were already handled above. + result.pathname = null; + // To support http.request + if (result.search) { + result.path = '/' + result.search; + } else { + result.path = null; + } + result.href = result.format(); + return result; + } + + // If a url ENDs in . or .., then it must get a trailing slash. + // however, if it ends in anything else non-slashy, + // then it must NOT get a trailing slash. + let last = srcPath[srcPath.length - 1]; + const hasTrailingSlash = ( + ((result.host || relative.host || srcPath.length > 1) && + (last === '.' || last === '..')) || last === ''); + + // Strip single dots, resolve double dots to parent dir + // if the path tries to go above the root, `up` ends up > 0 + let up = 0; + for (let i = srcPath.length - 1; i >= 0; i--) { + last = srcPath[i]; + if (last === '.') { + spliceOne(srcPath, i); + } else if (last === '..') { + spliceOne(srcPath, i); + up++; + } else if (up) { + spliceOne(srcPath, i); + up--; + } + } + + // If the path is allowed to go above the root, restore leading ..s + if (!mustEndAbs && !removeAllDots) { + while (up--) { + srcPath.unshift('..'); + } + } + + if (mustEndAbs && srcPath[0] !== '' && + (!srcPath[0] || srcPath[0].charAt(0) !== '/')) { + srcPath.unshift(''); + } + + if (hasTrailingSlash && StringPrototypeAt(ArrayPrototypeJoin(srcPath, '/'), -1) !== '/') { + srcPath.push(''); + } + + const isAbsolute = srcPath[0] === '' || + (srcPath[0] && srcPath[0].charAt(0) === '/'); + + // put the host back + if (noLeadingSlashes) { + result.hostname = + result.host = isAbsolute ? '' : srcPath.length ? srcPath.shift() : ''; + // Occasionally the auth can get stuck only in host. + // This especially happens in cases like + // url.resolveObject('mailto:local1@domain1', 'local2@domain2') + const authInHost = result.host && result.host.indexOf('@') > 0 ? + result.host.split('@') : false; + if (authInHost) { + result.auth = authInHost.shift(); + result.host = result.hostname = authInHost.shift(); + } + } + + mustEndAbs ||= (result.host && srcPath.length); + + if (mustEndAbs && !isAbsolute) { + srcPath.unshift(''); + } + + if (!srcPath.length) { + result.pathname = null; + result.path = null; + } else { + result.pathname = srcPath.join('/'); + } + + // To support request.http + if (result.pathname !== null || result.search !== null) { + result.path = (result.pathname ? result.pathname : '') + + (result.search ? result.search : ''); + } + result.auth = relative.auth || result.auth; + result.slashes ||= relative.slashes; + result.href = result.format(); + return result; +}; + +Url.prototype.parseHost = function parseHost() { + let host = this.host; + let port = portPattern.exec(host); + if (port) { + port = port[0]; + if (port !== ':') { + this.port = port.slice(1); + } + host = host.slice(0, host.length - port.length); + } + if (host) this.hostname = host; +}; + +// When used internally, we are not obligated to associate TypeError with +// this function, so non-strings can be rejected by underlying implementation. +// Public API has to validate input and throw appropriate error. +function pathToFileURL(path, options) { + validateString(path, 'path'); + + return _pathToFileURL(path, options); +} + +module.exports = { + // Original API + Url, + parse: urlParse, + resolve: urlResolve, + resolveObject: urlResolveObject, + format: urlFormat, + + // WHATWG API + URL, + URLPattern, + URLSearchParams, + domainToASCII, + domainToUnicode, + + // Utilities + pathToFileURL, + fileURLToPath, + fileURLToPathBuffer, + urlToHttpOptions, +}; \ No newline at end of file diff --git a/.codesandbox/node/util.js b/.codesandbox/node/util.js new file mode 100644 index 0000000000..d458b0a23f --- /dev/null +++ b/.codesandbox/node/util.js @@ -0,0 +1,521 @@ +'use strict'; + +const { + ArrayIsArray, + ArrayPrototypePop, + ArrayPrototypePush, + ArrayPrototypeReduce, + Error, + ErrorCaptureStackTrace, + FunctionPrototypeBind, + NumberIsSafeInteger, + ObjectDefineProperties, + ObjectDefineProperty, + ObjectGetOwnPropertyDescriptors, + ObjectKeys, + ObjectSetPrototypeOf, + ObjectValues, + ReflectApply, + RegExp, + RegExpPrototypeSymbolReplace, + StringPrototypeToWellFormed, +} = primordials; + +const { + ErrnoException, + ExceptionWithHostPort, + codes: { + ERR_FALSY_VALUE_REJECTION, + ERR_INVALID_ARG_TYPE, + ERR_OUT_OF_RANGE, + }, + isErrorStackTraceLimitWritable, +} = require('internal/errors'); +const { + format, + formatWithOptions, + inspect, + stripVTControlCharacters, +} = require('internal/util/inspect'); +const { debuglog } = require('internal/util/debuglog'); +const { + validateBoolean, + validateFunction, + validateNumber, + validateString, + validateOneOf, + validateObject, +} = require('internal/validators'); +const { + isReadableStream, + isWritableStream, + isNodeStream, +} = require('internal/streams/utils'); +const types = require('internal/util/types'); + +let utilColors; +function lazyUtilColors() { + utilColors ??= require('internal/util/colors'); + return utilColors; +} +const { getOptionValue } = require('internal/options'); + +const binding = internalBinding('util'); + +const { + deprecate, + getLazy, + getSystemErrorMap, + getSystemErrorName: internalErrorName, + getSystemErrorMessage: internalErrorMessage, + promisify, + defineLazyProperties, +} = require('internal/util'); + +let abortController; + +function lazyAbortController() { + abortController ??= require('internal/abort_controller'); + return abortController; +} + +let internalDeepEqual; + +/** + * @param {string} [code] + * @returns {string} + */ +function escapeStyleCode(code) { + if (code === undefined) return ''; + return `\u001b[${code}m`; +} + +/** + * @param {string | string[]} format + * @param {string} text + * @param {object} [options] + * @param {boolean} [options.validateStream] - Whether to validate the stream. + * @param {Stream} [options.stream] - The stream used for validation. + * @returns {string} + */ +function styleText(format, text, { validateStream = true, stream = process.stdout } = {}) { + validateString(text, 'text'); + validateBoolean(validateStream, 'options.validateStream'); + + let skipColorize; + if (validateStream) { + if ( + !isReadableStream(stream) && + !isWritableStream(stream) && + !isNodeStream(stream) + ) { + throw new ERR_INVALID_ARG_TYPE('stream', ['ReadableStream', 'WritableStream', 'Stream'], stream); + } + + // If the stream is falsy or should not be colorized, set skipColorize to true + skipColorize = !lazyUtilColors().shouldColorize(stream); + } + + // If the format is not an array, convert it to an array + const formatArray = ArrayIsArray(format) ? format : [format]; + + const codes = []; + for (const key of formatArray) { + if (key === 'none') continue; + const formatCodes = inspect.colors[key]; + // If the format is not a valid style, throw an error + if (formatCodes == null) { + validateOneOf(key, 'format', ObjectKeys(inspect.colors)); + } + if (skipColorize) continue; + ArrayPrototypePush(codes, formatCodes); + } + + if (skipColorize) { + return text; + } + + // Build opening codes + let openCodes = ''; + for (let i = 0; i < codes.length; i++) { + openCodes += escapeStyleCode(codes[i][0]); + } + + // Process the text to handle nested styles + let processedText; + if (codes.length > 0) { + processedText = ArrayPrototypeReduce( + codes, + (text, code) => RegExpPrototypeSymbolReplace( + // Find the reset code + new RegExp(`\\u001b\\[${code[1]}m`, 'g'), + text, + (match, offset) => { + // Check if there's more content after this reset + if (offset + match.length < text.length) { + if ( + code[0] === inspect.colors.dim[0] || + code[0] === inspect.colors.bold[0] + ) { + // Dim and bold are not mutually exclusive, so we need to reapply + return `${match}${escapeStyleCode(code[0])}`; + } + return escapeStyleCode(code[0]); + } + return match; + }, + ), + text, + ); + } else { + processedText = text; + } + + // Build closing codes in reverse order + let closeCodes = ''; + for (let i = codes.length - 1; i >= 0; i--) { + closeCodes += escapeStyleCode(codes[i][1]); + } + + return `${openCodes}${processedText}${closeCodes}`; +} + +/** + * Inherit the prototype methods from one constructor into another. + * + * The Function.prototype.inherits from lang.js rewritten as a standalone + * function (not on Function.prototype). NOTE: If this file is to be loaded + * during bootstrapping this function needs to be rewritten using some native + * functions as prototype setup using normal JavaScript does not work as + * expected during bootstrapping (see mirror.js in r114903). + * @param {Function} ctor Constructor function which needs to inherit the + * prototype. + * @param {Function} superCtor Constructor function to inherit prototype from. + * @throws {TypeError} Will error if either constructor is null, or if + * the super constructor lacks a prototype. + */ +function inherits(ctor, superCtor) { + + if (ctor === undefined || ctor === null) + throw new ERR_INVALID_ARG_TYPE('ctor', 'Function', ctor); + + if (superCtor === undefined || superCtor === null) + throw new ERR_INVALID_ARG_TYPE('superCtor', 'Function', superCtor); + + if (superCtor.prototype === undefined) { + throw new ERR_INVALID_ARG_TYPE('superCtor.prototype', + 'Object', superCtor.prototype); + } + ObjectDefineProperty(ctor, 'super_', { + __proto__: null, + value: superCtor, + writable: true, + configurable: true, + }); + ObjectSetPrototypeOf(ctor.prototype, superCtor.prototype); +} + +/** + * @deprecated since v6.0.0 + * @template T + * @template S + * @param {T} target + * @param {S} source + * @returns {(T & S) | null} + */ +function _extend(target, source) { + // Don't do anything if source isn't an object + if (source === null || typeof source !== 'object') return target; + + const keys = ObjectKeys(source); + let i = keys.length; + while (i--) { + target[keys[i]] = source[keys[i]]; + } + return target; +} + +const callbackifyOnRejected = (reason, cb) => { + // `!reason` guard inspired by bluebird (Ref: https://goo.gl/t5IS6M). + // Because `null` is a special error value in callbacks which means "no error + // occurred", we error-wrap so the callback consumer can distinguish between + // "the promise rejected with null" or "the promise fulfilled with undefined". + if (!reason) { + reason = new ERR_FALSY_VALUE_REJECTION.HideStackFramesError(reason); + ErrorCaptureStackTrace(reason, callbackifyOnRejected); + } + return cb(reason); +}; + +/** + * Converts a Promise-returning function to callback style + * @param {Function} original + * @returns {Function} + */ +function callbackify(original) { + validateFunction(original, 'original'); + + // We DO NOT return the promise as it gives the user a false sense that + // the promise is actually somehow related to the callback's execution + // and that the callback throwing will reject the promise. + function callbackified(...args) { + const maybeCb = ArrayPrototypePop(args); + validateFunction(maybeCb, 'last argument'); + const cb = FunctionPrototypeBind(maybeCb, this); + // In true node style we process the callback on `nextTick` with all the + // implications (stack, `uncaughtException`, `async_hooks`) + ReflectApply(original, this, args) + .then((ret) => process.nextTick(cb, null, ret), + (rej) => process.nextTick(callbackifyOnRejected, rej, cb)); + } + + const descriptors = ObjectGetOwnPropertyDescriptors(original); + // It is possible to manipulate a functions `length` or `name` property. This + // guards against the manipulation. + if (typeof descriptors.length.value === 'number') { + descriptors.length.value++; + } + if (typeof descriptors.name.value === 'string') { + descriptors.name.value += 'Callbackified'; + } + const propertiesValues = ObjectValues(descriptors); + for (let i = 0; i < propertiesValues.length; i++) { + // We want to use null-prototype objects to not rely on globally mutable + // %Object.prototype%. + ObjectSetPrototypeOf(propertiesValues[i], null); + } + ObjectDefineProperties(callbackified, descriptors); + return callbackified; +} + +/** + * @param {number} err + * @returns {string} + */ +function getSystemErrorMessage(err) { + validateNumber(err, 'err'); + if (err >= 0 || !NumberIsSafeInteger(err)) { + throw new ERR_OUT_OF_RANGE('err', 'a negative integer', err); + } + return internalErrorMessage(err); +} + +/** + * @param {number} err + * @returns {string} + */ +function getSystemErrorName(err) { + validateNumber(err, 'err'); + if (err >= 0 || !NumberIsSafeInteger(err)) { + throw new ERR_OUT_OF_RANGE('err', 'a negative integer', err); + } + return internalErrorName(err); +} + +function _errnoException(...args) { + if (isErrorStackTraceLimitWritable()) { + const limit = Error.stackTraceLimit; + Error.stackTraceLimit = 0; + const e = new ErrnoException(...args); + Error.stackTraceLimit = limit; + ErrorCaptureStackTrace(e, _exceptionWithHostPort); + return e; + } + return new ErrnoException(...args); +} + +function _exceptionWithHostPort(...args) { + if (isErrorStackTraceLimitWritable()) { + const limit = Error.stackTraceLimit; + Error.stackTraceLimit = 0; + const e = new ExceptionWithHostPort(...args); + Error.stackTraceLimit = limit; + ErrorCaptureStackTrace(e, _exceptionWithHostPort); + return e; + } + return new ExceptionWithHostPort(...args); +} + +/** + * Parses the content of a `.env` file. + * @param {string} content + * @returns {Record} + */ +function parseEnv(content) { + validateString(content, 'content'); + return binding.parseEnv(content); +} + +const lazySourceMap = getLazy(() => require('internal/source_map/source_map_cache')); + +/** + * @typedef {object} CallSite // The call site + * @property {string} scriptName // The name of the resource that contains the + * script for the function for this StackFrame + * @property {string} functionName // The name of the function associated with this stack frame + * @property {number} lineNumber // The number, 1-based, of the line for the associate function call + * @property {number} columnNumber // The 1-based column offset on the line for the associated function call + */ + +/** + * @param {CallSite} callSite // The call site object to reconstruct from source map + * @returns {CallSite | undefined} // The reconstructed call site object + */ +function reconstructCallSite(callSite) { + const { scriptName, lineNumber, columnNumber } = callSite; + const sourceMap = lazySourceMap().findSourceMap(scriptName); + if (!sourceMap) return; + const entry = sourceMap.findEntry(lineNumber - 1, columnNumber - 1); + if (!entry?.originalSource) return; + return { + __proto__: null, + // If the name is not found, it is an empty string to match the behavior of `util.getCallSite()` + functionName: entry.name ?? '', + scriptName: entry.originalSource, + lineNumber: entry.originalLine + 1, + column: entry.originalColumn + 1, + columnNumber: entry.originalColumn + 1, + }; +} + +/** + * + * The call site array to map + * @param {CallSite[]} callSites + * Array of objects with the reconstructed call site + * @returns {CallSite[]} + */ +function mapCallSite(callSites) { + const result = []; + for (let i = 0; i < callSites.length; ++i) { + const callSite = callSites[i]; + const found = reconstructCallSite(callSite); + ArrayPrototypePush(result, found ?? callSite); + } + return result; +} + +/** + * @typedef {object} CallSiteOptions // The call site options + * @property {boolean} sourceMap // Enable source map support + */ + +/** + * Returns the callSite + * @param {number} frameCount + * @param {CallSiteOptions} options + * @returns {CallSite[]} + */ +function getCallSites(frameCount = 10, options) { + // If options is not provided check if frameCount is an object + if (options === undefined) { + if (typeof frameCount === 'object') { + // If frameCount is an object, it is the options object + options = frameCount; + validateObject(options, 'options'); + if (options.sourceMap !== undefined) { + validateBoolean(options.sourceMap, 'options.sourceMap'); + } + frameCount = 10; + } else { + // If options is not provided, set it to an empty object + options = {}; + }; + } else { + // If options is provided, validate it + validateObject(options, 'options'); + if (options.sourceMap !== undefined) { + validateBoolean(options.sourceMap, 'options.sourceMap'); + } + } + + // Using kDefaultMaxCallStackSizeToCapture as reference + validateNumber(frameCount, 'frameCount', 1, 200); + // If options.sourceMaps is true or if sourceMaps are enabled but the option.sourceMaps is not set explictly to false + if (options.sourceMap === true || (getOptionValue('--enable-source-maps') && options.sourceMap !== false)) { + return mapCallSite(binding.getCallSites(frameCount)); + } + return binding.getCallSites(frameCount); +}; + +// Keep the `exports =` so that various functions can still be monkeypatched +module.exports = { + _errnoException, + _exceptionWithHostPort, + _extend: deprecate(_extend, + 'The `util._extend` API is deprecated. Please use Object.assign() instead.', + 'DEP0060'), + callbackify, + debug: debuglog, + debuglog, + deprecate, + format, + styleText, + formatWithOptions, + // Deprecated getCallSite. + // This API can be removed in next semver-minor release. + getCallSite: deprecate(getCallSites, + 'The `util.getCallSite` API has been renamed to `util.getCallSites()`.', + 'ExperimentalWarning'), + getCallSites, + getSystemErrorMap, + getSystemErrorName, + getSystemErrorMessage, + inherits, + inspect, + isArray: deprecate(ArrayIsArray, + 'The `util.isArray` API is deprecated. Please use `Array.isArray()` instead.', + 'DEP0044'), + isDeepStrictEqual(a, b, skipPrototype) { + if (internalDeepEqual === undefined) { + internalDeepEqual = require('internal/util/comparisons').isDeepStrictEqual; + } + return internalDeepEqual(a, b, skipPrototype); + }, + promisify, + stripVTControlCharacters, + toUSVString(input) { + return StringPrototypeToWellFormed(`${input}`); + }, + get transferableAbortSignal() { + return lazyAbortController().transferableAbortSignal; + }, + get transferableAbortController() { + return lazyAbortController().transferableAbortController; + }, + get aborted() { + return lazyAbortController().aborted; + }, + types, + parseEnv, +}; + +defineLazyProperties( + module.exports, + 'internal/util/parse_args/parse_args', + ['parseArgs'], +); + +defineLazyProperties( + module.exports, + 'internal/encoding', + ['TextDecoder', 'TextEncoder'], +); + +defineLazyProperties( + module.exports, + 'internal/mime', + ['MIMEType', 'MIMEParams'], +); + +defineLazyProperties( + module.exports, + 'internal/util/diff', + ['diff'], +); + +defineLazyProperties( + module.exports, + 'internal/util/trace_sigint', + ['setTraceSigInt'], +); \ No newline at end of file diff --git a/.codesandbox/node/v8.js b/.codesandbox/node/v8.js new file mode 100644 index 0000000000..a28391e83b --- /dev/null +++ b/.codesandbox/node/v8.js @@ -0,0 +1,467 @@ +'use strict'; + +const { + Array, + BigInt64Array, + BigUint64Array, + DataView, + Error, + Float32Array, + Float64Array, + Int16Array, + Int32Array, + Int8Array, + JSONParse, + ObjectPrototypeToString, + Uint16Array, + Uint32Array, + Uint8Array, + Uint8ClampedArray, + globalThis: { + Float16Array, + }, +} = primordials; + +const { Buffer } = require('buffer'); +const { + validateString, + validateUint32, + validateOneOf, +} = require('internal/validators'); +const { + Serializer, + Deserializer, +} = internalBinding('serdes'); +const { + namespace: startupSnapshot, +} = require('internal/v8/startup_snapshot'); + +let profiler = {}; +if (internalBinding('config').hasInspector) { + profiler = internalBinding('profiler'); +} + +const assert = require('internal/assert'); +const { inspect } = require('internal/util/inspect'); +const { FastBuffer } = require('internal/buffer'); +const { getValidatedPath } = require('internal/fs/utils'); +const { + createHeapSnapshotStream, + triggerHeapSnapshot, +} = internalBinding('heap_utils'); +const { + HeapSnapshotStream, + getHeapSnapshotOptions, + queryObjects, +} = require('internal/heap_utils'); +const promiseHooks = require('internal/promise_hooks'); +const { getOptionValue } = require('internal/options'); + +/** + * Generates a snapshot of the current V8 heap + * and writes it to a JSON file. + * @param {string} [filename] + * @param {{ + * exposeInternals?: boolean, + * exposeNumericValues?: boolean + * }} [options] + * @returns {string} + */ +function writeHeapSnapshot(filename, options) { + if (filename !== undefined) { + filename = getValidatedPath(filename); + } + const optionArray = getHeapSnapshotOptions(options); + return triggerHeapSnapshot(filename, optionArray); +} + +/** + * Generates a snapshot of the current V8 heap + * and returns a Readable Stream. + * @param {{ + * exposeInternals?: boolean, + * exposeNumericValues?: boolean + * }} [options] + * @returns {import('./stream.js').Readable} + */ +function getHeapSnapshot(options) { + const optionArray = getHeapSnapshotOptions(options); + const handle = createHeapSnapshotStream(optionArray); + assert(handle); + return new HeapSnapshotStream(handle); +} + +// We need to get the buffer from the binding at the callsite since +// it's re-initialized after deserialization. +const binding = internalBinding('v8'); + +const { + cachedDataVersionTag, + setFlagsFromString: _setFlagsFromString, + isStringOneByteRepresentation: _isStringOneByteRepresentation, + updateHeapStatisticsBuffer, + updateHeapSpaceStatisticsBuffer, + updateHeapCodeStatisticsBuffer, + setHeapSnapshotNearHeapLimit: _setHeapSnapshotNearHeapLimit, + + // Properties for heap statistics buffer extraction. + kTotalHeapSizeIndex, + kTotalHeapSizeExecutableIndex, + kTotalPhysicalSizeIndex, + kTotalAvailableSize, + kUsedHeapSizeIndex, + kHeapSizeLimitIndex, + kDoesZapGarbageIndex, + kMallocedMemoryIndex, + kPeakMallocedMemoryIndex, + kNumberOfNativeContextsIndex, + kNumberOfDetachedContextsIndex, + kTotalGlobalHandlesSizeIndex, + kUsedGlobalHandlesSizeIndex, + kExternalMemoryIndex, + + // Properties for heap spaces statistics buffer extraction. + kHeapSpaces, + kSpaceSizeIndex, + kSpaceUsedSizeIndex, + kSpaceAvailableSizeIndex, + kPhysicalSpaceSizeIndex, + + // Properties for heap code statistics buffer extraction. + kCodeAndMetadataSizeIndex, + kBytecodeAndMetadataSizeIndex, + kExternalScriptSourceSizeIndex, + kCPUProfilerMetaDataSizeIndex, + + heapStatisticsBuffer, + heapCodeStatisticsBuffer, + heapSpaceStatisticsBuffer, + getCppHeapStatistics: _getCppHeapStatistics, + detailLevel, +} = binding; + +const kNumberOfHeapSpaces = kHeapSpaces.length; + +/** + * Sets V8 command-line flags. + * @param {string} flags + * @returns {void} + */ +function setFlagsFromString(flags) { + validateString(flags, 'flags'); + _setFlagsFromString(flags); +} + +/** + * Return whether this string uses one byte as underlying representation or not. + * @param {string} content + * @returns {boolean} + */ +function isStringOneByteRepresentation(content) { + validateString(content, 'content'); + return _isStringOneByteRepresentation(content); +} + + +/** + * Gets the current V8 heap statistics. + * @returns {{ + * total_heap_size: number; + * total_heap_size_executable: number; + * total_physical_size: number; + * total_available_size: number; + * used_heap_size: number; + * heap_size_limit: number; + * malloced_memory: number; + * peak_malloced_memory: number; + * does_zap_garbage: number; + * number_of_native_contexts: number; + * number_of_detached_contexts: number; + * }} + */ +function getHeapStatistics() { + const buffer = heapStatisticsBuffer; + + updateHeapStatisticsBuffer(); + + return { + total_heap_size: buffer[kTotalHeapSizeIndex], + total_heap_size_executable: buffer[kTotalHeapSizeExecutableIndex], + total_physical_size: buffer[kTotalPhysicalSizeIndex], + total_available_size: buffer[kTotalAvailableSize], + used_heap_size: buffer[kUsedHeapSizeIndex], + heap_size_limit: buffer[kHeapSizeLimitIndex], + malloced_memory: buffer[kMallocedMemoryIndex], + peak_malloced_memory: buffer[kPeakMallocedMemoryIndex], + does_zap_garbage: buffer[kDoesZapGarbageIndex], + number_of_native_contexts: buffer[kNumberOfNativeContextsIndex], + number_of_detached_contexts: buffer[kNumberOfDetachedContextsIndex], + total_global_handles_size: buffer[kTotalGlobalHandlesSizeIndex], + used_global_handles_size: buffer[kUsedGlobalHandlesSizeIndex], + external_memory: buffer[kExternalMemoryIndex], + }; +} + +/** + * Gets the current V8 heap space statistics. + * @returns {{ + * space_name: string; + * space_size: number; + * space_used_size: number; + * space_available_size: number; + * physical_space_size: number; + * }[]} + */ +function getHeapSpaceStatistics() { + const heapSpaceStatistics = new Array(kNumberOfHeapSpaces); + const buffer = heapSpaceStatisticsBuffer; + + for (let i = 0; i < kNumberOfHeapSpaces; i++) { + updateHeapSpaceStatisticsBuffer(i); + heapSpaceStatistics[i] = { + space_name: kHeapSpaces[i], + space_size: buffer[kSpaceSizeIndex], + space_used_size: buffer[kSpaceUsedSizeIndex], + space_available_size: buffer[kSpaceAvailableSizeIndex], + physical_space_size: buffer[kPhysicalSpaceSizeIndex], + }; + } + + return heapSpaceStatistics; +} + +/** + * Gets the current V8 heap code statistics. + * @returns {{ + * code_and_metadata_size: number; + * bytecode_and_metadata_size: number; + * external_script_source_size: number; + * cpu_profiler_metadata_size: number; + * }} + */ +function getHeapCodeStatistics() { + const buffer = heapCodeStatisticsBuffer; + + updateHeapCodeStatisticsBuffer(); + return { + code_and_metadata_size: buffer[kCodeAndMetadataSizeIndex], + bytecode_and_metadata_size: buffer[kBytecodeAndMetadataSizeIndex], + external_script_source_size: buffer[kExternalScriptSourceSizeIndex], + cpu_profiler_metadata_size: buffer[kCPUProfilerMetaDataSizeIndex], + }; +} + +let heapSnapshotNearHeapLimitCallbackAdded = false; +function setHeapSnapshotNearHeapLimit(limit) { + validateUint32(limit, 'limit', true); + if (heapSnapshotNearHeapLimitCallbackAdded || + getOptionValue('--heapsnapshot-near-heap-limit') > 0 + ) { + return; + } + heapSnapshotNearHeapLimitCallbackAdded = true; + _setHeapSnapshotNearHeapLimit(limit); +} + +const detailLevelDict = { + __proto__: null, + detailed: detailLevel.DETAILED, + brief: detailLevel.BRIEF, +}; + +function getCppHeapStatistics(type = 'detailed') { + validateOneOf(type, 'type', ['brief', 'detailed']); + const result = _getCppHeapStatistics(detailLevelDict[type]); + result.detail_level = type; + return result; +} + +/* V8 serialization API */ + +/* JS methods for the base objects */ +Serializer.prototype._getDataCloneError = Error; + +/** + * Reads raw bytes from the deserializer's internal buffer. + * @param {number} length + * @returns {Buffer} + */ +Deserializer.prototype.readRawBytes = function readRawBytes(length) { + const offset = this._readRawBytes(length); + // `this.buffer` can be a Buffer or a plain Uint8Array, so just calling + // `.slice()` doesn't work. + return new FastBuffer(this.buffer.buffer, + this.buffer.byteOffset + offset, + length); +}; + +function arrayBufferViewTypeToIndex(abView) { + const type = ObjectPrototypeToString(abView); + if (type === '[object Int8Array]') return 0; + if (type === '[object Uint8Array]') return 1; + if (type === '[object Uint8ClampedArray]') return 2; + if (type === '[object Int16Array]') return 3; + if (type === '[object Uint16Array]') return 4; + if (type === '[object Int32Array]') return 5; + if (type === '[object Uint32Array]') return 6; + if (type === '[object Float32Array]') return 7; + if (type === '[object Float64Array]') return 8; + if (type === '[object DataView]') return 9; + // Index 10 is FastBuffer. + if (type === '[object BigInt64Array]') return 11; + if (type === '[object BigUint64Array]') return 12; + if (type === '[object Float16Array]') return 13; + return -1; +} + +function arrayBufferViewIndexToType(index) { + if (index === 0) return Int8Array; + if (index === 1) return Uint8Array; + if (index === 2) return Uint8ClampedArray; + if (index === 3) return Int16Array; + if (index === 4) return Uint16Array; + if (index === 5) return Int32Array; + if (index === 6) return Uint32Array; + if (index === 7) return Float32Array; + if (index === 8) return Float64Array; + if (index === 9) return DataView; + if (index === 10) return FastBuffer; + if (index === 11) return BigInt64Array; + if (index === 12) return BigUint64Array; + if (index === 13) return Float16Array; + return undefined; +} + +class DefaultSerializer extends Serializer { + constructor() { + super(); + + this._setTreatArrayBufferViewsAsHostObjects(true); + } + + /** + * Used to write some kind of host object, i.e. an + * object that is created by native C++ bindings. + * @param {object} abView + * @returns {void} + */ + _writeHostObject(abView) { + // Keep track of how to handle different ArrayBufferViews. The default + // Serializer for Node does not use the V8 methods for serializing those + // objects because Node's `Buffer` objects use pooled allocation in many + // cases, and their underlying `ArrayBuffer`s would show up in the + // serialization. Because a) those may contain sensitive data and the user + // may not be aware of that and b) they are often much larger than the + // `Buffer` itself, custom serialization is applied. + let i = 10; // FastBuffer + if (abView.constructor !== Buffer) { + i = arrayBufferViewTypeToIndex(abView); + if (i === -1) { + throw new this._getDataCloneError( + `Unserializable host object: ${inspect(abView)}`); + } + } + this.writeUint32(i); + this.writeUint32(abView.byteLength); + this.writeRawBytes(new Uint8Array(abView.buffer, + abView.byteOffset, + abView.byteLength)); + } +} + +class DefaultDeserializer extends Deserializer { + /** + * Used to read some kind of host object, i.e. an + * object that is created by native C++ bindings. + * @returns {any} + */ + _readHostObject() { + const typeIndex = this.readUint32(); + const ctor = arrayBufferViewIndexToType(typeIndex); + const byteLength = this.readUint32(); + const byteOffset = this._readRawBytes(byteLength); + const BYTES_PER_ELEMENT = ctor.BYTES_PER_ELEMENT || 1; + + const offset = this.buffer.byteOffset + byteOffset; + if (offset % BYTES_PER_ELEMENT === 0) { + return new ctor(this.buffer.buffer, + offset, + byteLength / BYTES_PER_ELEMENT); + } + // Copy to an aligned buffer first. + const buffer_copy = Buffer.allocUnsafe(byteLength); + buffer_copy.set(new Uint8Array(this.buffer.buffer, this.buffer.byteOffset + byteOffset, byteLength)); + return new ctor(buffer_copy.buffer, + buffer_copy.byteOffset, + byteLength / BYTES_PER_ELEMENT); + } +} + +/** + * Uses a `DefaultSerializer` to serialize `value` + * into a buffer. + * @param {any} value + * @returns {Buffer} + */ +function serialize(value) { + const ser = new DefaultSerializer(); + ser.writeHeader(); + ser.writeValue(value); + return ser.releaseBuffer(); +} + +/** + * Uses a `DefaultDeserializer` with default options + * to read a JavaScript value from a buffer. + * @param {Buffer | TypedArray | DataView} buffer + * @returns {any} + */ +function deserialize(buffer) { + const der = new DefaultDeserializer(buffer); + der.readHeader(); + return der.readValue(); +} + +class GCProfiler { + #profiler = null; + + start() { + if (!this.#profiler) { + this.#profiler = new binding.GCProfiler(); + this.#profiler.start(); + } + } + + stop() { + if (this.#profiler) { + const data = this.#profiler.stop(); + this.#profiler = null; + return JSONParse(data); + } + } +} + +module.exports = { + cachedDataVersionTag, + getHeapSnapshot, + getHeapStatistics, + getHeapSpaceStatistics, + getHeapCodeStatistics, + getCppHeapStatistics, + setFlagsFromString, + Serializer, + Deserializer, + DefaultSerializer, + DefaultDeserializer, + deserialize, + takeCoverage: profiler.takeCoverage, + stopCoverage: profiler.stopCoverage, + serialize, + writeHeapSnapshot, + promiseHooks, + queryObjects, + startupSnapshot, + setHeapSnapshotNearHeapLimit, + GCProfiler, + isStringOneByteRepresentation, +}; \ No newline at end of file diff --git a/.codesandbox/node/vm.js b/.codesandbox/node/vm.js new file mode 100644 index 0000000000..feb9c0a55f --- /dev/null +++ b/.codesandbox/node/vm.js @@ -0,0 +1,398 @@ +'use strict'; + +const { + ArrayPrototypeForEach, + ObjectFreeze, + PromiseReject, + ReflectApply, + Symbol, +} = primordials; + +const { + ContextifyScript, + makeContext, + constants, + measureMemory: _measureMemory, +} = internalBinding('contextify'); +const { + ERR_CONTEXT_NOT_INITIALIZED, + ERR_INVALID_ARG_TYPE, +} = require('internal/errors').codes; +const { + validateArray, + validateBoolean, + validateBuffer, + validateInt32, + validateOneOf, + validateObject, + validateString, + validateStringArray, + validateUint32, + kValidateObjectAllowArray, + kValidateObjectAllowNullable, +} = require('internal/validators'); +const { + emitExperimentalWarning, + kEmptyObject, + kVmBreakFirstLineSymbol, +} = require('internal/util'); +const { + getHostDefinedOptionId, + internalCompileFunction, + isContext: _isContext, + registerImportModuleDynamically, +} = require('internal/vm'); +const { + vm_dynamic_import_main_context_default, + vm_context_no_contextify, +} = internalBinding('symbols'); +const kParsingContext = Symbol('script parsing context'); + +/** + * Check if object is a context object created by vm.createContext(). + * @throws {TypeError} If object is not an object in the first place, throws TypeError. + * @param {object} object Object to check. + * @returns {boolean} + */ +function isContext(object) { + validateObject(object, 'object', kValidateObjectAllowArray); + + return _isContext(object); +} + +class Script extends ContextifyScript { + constructor(code, options = kEmptyObject) { + code = `${code}`; + if (typeof options === 'string') { + options = { filename: options }; + } else { + validateObject(options, 'options'); + } + + const { + filename = 'evalmachine.', + lineOffset = 0, + columnOffset = 0, + cachedData, + produceCachedData = false, + importModuleDynamically, + [kParsingContext]: parsingContext, + } = options; + + validateString(filename, 'options.filename'); + validateInt32(lineOffset, 'options.lineOffset'); + validateInt32(columnOffset, 'options.columnOffset'); + if (cachedData !== undefined) { + validateBuffer(cachedData, 'options.cachedData'); + } + validateBoolean(produceCachedData, 'options.produceCachedData'); + + const hostDefinedOptionId = + getHostDefinedOptionId(importModuleDynamically, filename); + // Calling `ReThrow()` on a native TryCatch does not generate a new + // abort-on-uncaught-exception check. A dummy try/catch in JS land + // protects against that. + try { // eslint-disable-line no-useless-catch + super(code, + filename, + lineOffset, + columnOffset, + cachedData, + produceCachedData, + parsingContext, + hostDefinedOptionId); + } catch (e) { + throw e; /* node-do-not-add-exception-line */ + } + + registerImportModuleDynamically(this, importModuleDynamically); + } + + runInThisContext(options) { + const { breakOnSigint, args } = getRunInContextArgs(null, options); + if (breakOnSigint && process.listenerCount('SIGINT') > 0) { + return sigintHandlersWrap(super.runInContext, this, args); + } + return ReflectApply(super.runInContext, this, args); + } + + runInContext(contextifiedObject, options) { + validateContext(contextifiedObject); + const { breakOnSigint, args } = getRunInContextArgs( + contextifiedObject, + options, + ); + if (breakOnSigint && process.listenerCount('SIGINT') > 0) { + return sigintHandlersWrap(super.runInContext, this, args); + } + return ReflectApply(super.runInContext, this, args); + } + + runInNewContext(contextObject, options) { + const context = createContext(contextObject, getContextOptions(options)); + return this.runInContext(context, options); + } +} + +function validateContext(contextifiedObject) { + if (!isContext(contextifiedObject)) { + throw new ERR_INVALID_ARG_TYPE('contextifiedObject', 'vm.Context', + contextifiedObject); + } +} + +function getRunInContextArgs(contextifiedObject, options = kEmptyObject) { + validateObject(options, 'options'); + + let timeout = options.timeout; + if (timeout === undefined) { + timeout = -1; + } else { + validateUint32(timeout, 'options.timeout', true); + } + + const { + displayErrors = true, + breakOnSigint = false, + [kVmBreakFirstLineSymbol]: breakFirstLine = false, + } = options; + + validateBoolean(displayErrors, 'options.displayErrors'); + validateBoolean(breakOnSigint, 'options.breakOnSigint'); + + return { + breakOnSigint, + args: [ + contextifiedObject, + timeout, + displayErrors, + breakOnSigint, + breakFirstLine, + ], + }; +} + +function getContextOptions(options) { + if (!options) + return {}; + const contextOptions = { + name: options.contextName, + origin: options.contextOrigin, + codeGeneration: undefined, + microtaskMode: options.microtaskMode, + }; + if (contextOptions.name !== undefined) + validateString(contextOptions.name, 'options.contextName'); + if (contextOptions.origin !== undefined) + validateString(contextOptions.origin, 'options.contextOrigin'); + if (options.contextCodeGeneration !== undefined) { + validateObject(options.contextCodeGeneration, + 'options.contextCodeGeneration'); + const { strings, wasm } = options.contextCodeGeneration; + if (strings !== undefined) + validateBoolean(strings, 'options.contextCodeGeneration.strings'); + if (wasm !== undefined) + validateBoolean(wasm, 'options.contextCodeGeneration.wasm'); + contextOptions.codeGeneration = { strings, wasm }; + } + if (options.microtaskMode !== undefined) + validateString(options.microtaskMode, 'options.microtaskMode'); + return contextOptions; +} + +let defaultContextNameIndex = 1; +function createContext(contextObject = {}, options = kEmptyObject) { + if (contextObject !== vm_context_no_contextify && isContext(contextObject)) { + return contextObject; + } + + validateObject(options, 'options'); + + const { + name = `VM Context ${defaultContextNameIndex++}`, + origin, + codeGeneration, + microtaskMode, + importModuleDynamically, + } = options; + + validateString(name, 'options.name'); + if (origin !== undefined) + validateString(origin, 'options.origin'); + if (codeGeneration !== undefined) + validateObject(codeGeneration, 'options.codeGeneration'); + + let strings = true; + let wasm = true; + if (codeGeneration !== undefined) { + ({ strings = true, wasm = true } = codeGeneration); + validateBoolean(strings, 'options.codeGeneration.strings'); + validateBoolean(wasm, 'options.codeGeneration.wasm'); + } + + validateOneOf(microtaskMode, + 'options.microtaskMode', + ['afterEvaluate', undefined]); + const microtaskQueue = (microtaskMode === 'afterEvaluate'); + + const hostDefinedOptionId = + getHostDefinedOptionId(importModuleDynamically, name); + + const result = makeContext(contextObject, name, origin, strings, wasm, microtaskQueue, hostDefinedOptionId); + // Register the context scope callback after the context was initialized. + registerImportModuleDynamically(result, importModuleDynamically); + return result; +} + +function createScript(code, options) { + return new Script(code, options); +} + +// Remove all SIGINT listeners and re-attach them after the wrapped function +// has executed, so that caught SIGINT are handled by the listeners again. +function sigintHandlersWrap(fn, thisArg, argsArray) { + const sigintListeners = process.rawListeners('SIGINT'); + + process.removeAllListeners('SIGINT'); + + try { + return ReflectApply(fn, thisArg, argsArray); + } finally { + // Add using the public methods so that the `newListener` handler of + // process can re-attach the listeners. + ArrayPrototypeForEach(sigintListeners, (listener) => { + process.addListener('SIGINT', listener); + }); + } +} + +function runInContext(code, contextifiedObject, options) { + validateContext(contextifiedObject); + if (typeof options === 'string') { + options = { + filename: options, + [kParsingContext]: contextifiedObject, + }; + } else { + options = { ...options, [kParsingContext]: contextifiedObject }; + } + return createScript(code, options) + .runInContext(contextifiedObject, options); +} + +function runInNewContext(code, contextObject, options) { + if (typeof options === 'string') { + options = { filename: options }; + } + contextObject = createContext(contextObject, getContextOptions(options)); + options = { ...options, [kParsingContext]: contextObject }; + return createScript(code, options).runInNewContext(contextObject, options); +} + +function runInThisContext(code, options) { + if (typeof options === 'string') { + options = { filename: options }; + } + return createScript(code, options).runInThisContext(options); +} + +function compileFunction(code, params, options = kEmptyObject) { + validateString(code, 'code'); + validateObject(options, 'options'); + if (params !== undefined) { + validateStringArray(params, 'params'); + } + const { + filename = '', + columnOffset = 0, + lineOffset = 0, + cachedData = undefined, + produceCachedData = false, + parsingContext = undefined, + contextExtensions = [], + importModuleDynamically, + } = options; + + validateString(filename, 'options.filename'); + validateInt32(columnOffset, 'options.columnOffset'); + validateInt32(lineOffset, 'options.lineOffset'); + if (cachedData !== undefined) + validateBuffer(cachedData, 'options.cachedData'); + validateBoolean(produceCachedData, 'options.produceCachedData'); + if (parsingContext !== undefined) { + if ( + typeof parsingContext !== 'object' || + parsingContext === null || + !isContext(parsingContext) + ) { + throw new ERR_INVALID_ARG_TYPE( + 'options.parsingContext', + 'Context', + parsingContext, + ); + } + } + validateArray(contextExtensions, 'options.contextExtensions'); + ArrayPrototypeForEach(contextExtensions, (extension, i) => { + const name = `options.contextExtensions[${i}]`; + validateObject(extension, name, kValidateObjectAllowNullable); + }); + + const hostDefinedOptionId = + getHostDefinedOptionId(importModuleDynamically, filename); + + return internalCompileFunction( + code, filename, lineOffset, columnOffset, + cachedData, produceCachedData, parsingContext, contextExtensions, + params, hostDefinedOptionId, importModuleDynamically, + ).function; +} + +const measureMemoryModes = { + summary: constants.measureMemory.mode.SUMMARY, + detailed: constants.measureMemory.mode.DETAILED, +}; + +const measureMemoryExecutions = { + default: constants.measureMemory.execution.DEFAULT, + eager: constants.measureMemory.execution.EAGER, +}; + +function measureMemory(options = kEmptyObject) { + emitExperimentalWarning('vm.measureMemory'); + validateObject(options, 'options'); + const { mode = 'summary', execution = 'default' } = options; + validateOneOf(mode, 'options.mode', ['summary', 'detailed']); + validateOneOf(execution, 'options.execution', ['default', 'eager']); + const result = _measureMemory(measureMemoryModes[mode], + measureMemoryExecutions[execution]); + if (result === undefined) { + return PromiseReject(new ERR_CONTEXT_NOT_INITIALIZED()); + } + return result; +} + +const vmConstants = { + __proto__: null, + USE_MAIN_CONTEXT_DEFAULT_LOADER: vm_dynamic_import_main_context_default, + DONT_CONTEXTIFY: vm_context_no_contextify, +}; + +ObjectFreeze(vmConstants); + +module.exports = { + Script, + createContext, + createScript, + runInContext, + runInNewContext, + runInThisContext, + isContext, + compileFunction, + measureMemory, + constants: vmConstants, +}; + +// The vm module is patched to include vm.Module, vm.SourceTextModule +// and vm.SyntheticModule in the pre-execution phase when +// --experimental-vm-modules is on. \ No newline at end of file diff --git a/.codesandbox/node/wasi.js b/.codesandbox/node/wasi.js new file mode 100644 index 0000000000..71dbc60ac8 --- /dev/null +++ b/.codesandbox/node/wasi.js @@ -0,0 +1,176 @@ +'use strict'; +const { + ArrayPrototypeForEach, + ArrayPrototypeMap, + ArrayPrototypePush, + FunctionPrototypeBind, + ObjectEntries, + String, + Symbol, +} = primordials; + +const { + ERR_INVALID_ARG_VALUE, + ERR_WASI_ALREADY_STARTED, +} = require('internal/errors').codes; +const { + emitExperimentalWarning, + kEmptyObject, +} = require('internal/util'); +const { + validateArray, + validateBoolean, + validateFunction, + validateInt32, + validateObject, + validateString, + validateUndefined, +} = require('internal/validators'); +const kExitCode = Symbol('kExitCode'); +const kSetMemory = Symbol('kSetMemory'); +const kStarted = Symbol('kStarted'); +const kInstance = Symbol('kInstance'); +const kBindingName = Symbol('kBindingName'); + +emitExperimentalWarning('WASI'); + +class WASI { + constructor(options = kEmptyObject) { + validateObject(options, 'options'); + + let _WASI; + validateString(options.version, 'options.version'); + switch (options.version) { + case 'unstable': + ({ WASI: _WASI } = internalBinding('wasi')); + this[kBindingName] = 'wasi_unstable'; + break; + case 'preview1': + ({ WASI: _WASI } = internalBinding('wasi')); + this[kBindingName] = 'wasi_snapshot_preview1'; + break; + // When adding support for additional wasi versions add case here + default: + throw new ERR_INVALID_ARG_VALUE('options.version', + options.version, + 'unsupported WASI version'); + } + + if (options.args !== undefined) + validateArray(options.args, 'options.args'); + const args = ArrayPrototypeMap(options.args || [], String); + + const env = []; + if (options.env !== undefined) { + validateObject(options.env, 'options.env'); + ArrayPrototypeForEach( + ObjectEntries(options.env), + ({ 0: key, 1: value }) => { + if (value !== undefined) + ArrayPrototypePush(env, `${key}=${value}`); + }); + } + + const preopens = []; + if (options.preopens !== undefined) { + validateObject(options.preopens, 'options.preopens'); + ArrayPrototypeForEach( + ObjectEntries(options.preopens), + ({ 0: key, 1: value }) => + ArrayPrototypePush(preopens, String(key), String(value)), + ); + } + + const { stdin = 0, stdout = 1, stderr = 2 } = options; + validateInt32(stdin, 'options.stdin', 0); + validateInt32(stdout, 'options.stdout', 0); + validateInt32(stderr, 'options.stderr', 0); + const stdio = [stdin, stdout, stderr]; + + const wrap = new _WASI(args, env, preopens, stdio); + + for (const prop in wrap) { + wrap[prop] = FunctionPrototypeBind(wrap[prop], wrap); + } + + let returnOnExit = true; + if (options.returnOnExit !== undefined) { + validateBoolean(options.returnOnExit, 'options.returnOnExit'); + returnOnExit = options.returnOnExit; + } + if (returnOnExit) + wrap.proc_exit = FunctionPrototypeBind(wasiReturnOnProcExit, this); + + this[kSetMemory] = wrap._setMemory; + delete wrap._setMemory; + this.wasiImport = wrap; + this[kStarted] = false; + this[kExitCode] = 0; + this[kInstance] = undefined; + } + + finalizeBindings(instance, { + memory = instance?.exports?.memory, + } = {}) { + if (this[kStarted]) { + throw new ERR_WASI_ALREADY_STARTED(); + } + + validateObject(instance, 'instance'); + validateObject(instance.exports, 'instance.exports'); + + this[kSetMemory](memory); + + this[kInstance] = instance; + this[kStarted] = true; + } + + // Must not export _initialize, must export _start + start(instance) { + this.finalizeBindings(instance); + + const { _start, _initialize } = this[kInstance].exports; + + validateFunction(_start, 'instance.exports._start'); + validateUndefined(_initialize, 'instance.exports._initialize'); + + try { + _start(); + } catch (err) { + if (err !== kExitCode) { + throw err; + } + } + + return this[kExitCode]; + } + + // Must not export _start, may optionally export _initialize + initialize(instance) { + this.finalizeBindings(instance); + + const { _start, _initialize } = this[kInstance].exports; + + validateUndefined(_start, 'instance.exports._start'); + if (_initialize !== undefined) { + validateFunction(_initialize, 'instance.exports._initialize'); + _initialize(); + } + } + + getImportObject() { + return { [this[kBindingName]]: this.wasiImport }; + } +} + +module.exports = { WASI }; + + +function wasiReturnOnProcExit(rval) { + // If __wasi_proc_exit() does not terminate the process, an assertion is + // triggered in the wasm runtime. Node can sidestep the assertion and return + // an exit code by recording the exit code, and throwing a JavaScript + // exception that WebAssembly cannot catch. + this[kExitCode] = rval; + throw kExitCode; +} \ No newline at end of file diff --git a/.codesandbox/node/worker_pool.js b/.codesandbox/node/worker_pool.js new file mode 100644 index 0000000000..bc0e59c7d9 --- /dev/null +++ b/.codesandbox/node/worker_pool.js @@ -0,0 +1,13 @@ +const WorkerPool = require('./worker_pool.js'); +const os = require('node:os'); + +const pool = new WorkerPool(os.availableParallelism()); + +let finished = 0; +for (let i = 0; i < 10; i++) { + pool.runTask({ a: 42, b: 100 }, (err, result) => { + console.log(i, err, result); + if (++finished === 10) + pool.close(); + }); +} \ No newline at end of file diff --git a/.codesandbox/node/worker_threads.js b/.codesandbox/node/worker_threads.js new file mode 100644 index 0000000000..7192c8d237 --- /dev/null +++ b/.codesandbox/node/worker_threads.js @@ -0,0 +1,57 @@ +'use strict'; + +const { + isInternalThread, + isMainThread, + SHARE_ENV, + resourceLimits, + setEnvironmentData, + getEnvironmentData, + threadId, + threadName, + Worker, +} = require('internal/worker'); + +const { + MessagePort, + MessageChannel, + markAsUncloneable, + moveMessagePortToContext, + receiveMessageOnPort, + BroadcastChannel, +} = require('internal/worker/io'); + +const { + postMessageToThread, +} = require('internal/worker/messaging'); + +const { + markAsUntransferable, + isMarkedAsUntransferable, +} = require('internal/buffer'); + +const { locks } = require('internal/locks'); + +module.exports = { + isInternalThread, + isMainThread, + MessagePort, + MessageChannel, + markAsUncloneable, + markAsUntransferable, + isMarkedAsUntransferable, + moveMessagePortToContext, + receiveMessageOnPort, + resourceLimits, + postMessageToThread, + threadId, + threadName, + SHARE_ENV, + Worker, + parentPort: null, + workerData: null, + BroadcastChannel, + setEnvironmentData, + getEnvironmentData, + locks, +}; \ No newline at end of file diff --git a/.codesandbox/node/zlib.js b/.codesandbox/node/zlib.js new file mode 100644 index 0000000000..d910795812 --- /dev/null +++ b/.codesandbox/node/zlib.js @@ -0,0 +1,1020 @@ +'use strict'; + +const { + ArrayBuffer, + MathMax, + NumberIsNaN, + ObjectDefineProperties, + ObjectDefineProperty, + ObjectEntries, + ObjectFreeze, + ObjectKeys, + ObjectSetPrototypeOf, + ReflectApply, + Symbol, + Uint32Array, +} = primordials; + +const { + codes: { + ERR_BROTLI_INVALID_PARAM, + ERR_BUFFER_TOO_LARGE, + ERR_INVALID_ARG_TYPE, + ERR_OUT_OF_RANGE, + ERR_TRAILING_JUNK_AFTER_STREAM_END, + ERR_ZSTD_INVALID_PARAM, + }, + genericNodeError, +} = require('internal/errors'); +const { Transform, finished } = require('stream'); +const { + deprecateInstantiation, +} = require('internal/util'); +const { + isArrayBufferView, + isAnyArrayBuffer, + isUint8Array, +} = require('internal/util/types'); +const binding = internalBinding('zlib'); +const { crc32: crc32Native } = binding; +const assert = require('internal/assert'); +const { + Buffer, + kMaxLength, +} = require('buffer'); +const { owner_symbol } = require('internal/async_hooks').symbols; +const { + checkRangesOrGetDefault, + validateFunction, + validateUint32, + validateFiniteNumber, +} = require('internal/validators'); + +const kFlushFlag = Symbol('kFlushFlag'); +const kError = Symbol('kError'); + +const constants = internalBinding('constants').zlib; +const { + // Zlib flush levels + Z_NO_FLUSH, Z_BLOCK, Z_PARTIAL_FLUSH, Z_SYNC_FLUSH, Z_FULL_FLUSH, Z_FINISH, + // Zlib option values + Z_MIN_CHUNK, Z_MIN_WINDOWBITS, Z_MAX_WINDOWBITS, Z_MIN_LEVEL, Z_MAX_LEVEL, + Z_MIN_MEMLEVEL, Z_MAX_MEMLEVEL, Z_DEFAULT_CHUNK, Z_DEFAULT_COMPRESSION, + Z_DEFAULT_STRATEGY, Z_DEFAULT_WINDOWBITS, Z_DEFAULT_MEMLEVEL, Z_FIXED, + // Node's compression stream modes (node_zlib_mode) + DEFLATE, DEFLATERAW, INFLATE, INFLATERAW, GZIP, GUNZIP, UNZIP, + BROTLI_DECODE, BROTLI_ENCODE, + ZSTD_COMPRESS, ZSTD_DECOMPRESS, + // Brotli operations (~flush levels) + BROTLI_OPERATION_PROCESS, BROTLI_OPERATION_FLUSH, + BROTLI_OPERATION_FINISH, BROTLI_OPERATION_EMIT_METADATA, + // Zstd end directives (~flush levels) + ZSTD_e_continue, ZSTD_e_flush, ZSTD_e_end, +} = constants; + +// Translation table for return codes. +const codes = { + Z_OK: constants.Z_OK, + Z_STREAM_END: constants.Z_STREAM_END, + Z_NEED_DICT: constants.Z_NEED_DICT, + Z_ERRNO: constants.Z_ERRNO, + Z_STREAM_ERROR: constants.Z_STREAM_ERROR, + Z_DATA_ERROR: constants.Z_DATA_ERROR, + Z_MEM_ERROR: constants.Z_MEM_ERROR, + Z_BUF_ERROR: constants.Z_BUF_ERROR, + Z_VERSION_ERROR: constants.Z_VERSION_ERROR, +}; + +for (const ckey of ObjectKeys(codes)) { + codes[codes[ckey]] = ckey; +} + +function zlibBuffer(engine, buffer, callback) { + validateFunction(callback, 'callback'); + // Streams do not support non-Uint8Array ArrayBufferViews yet. Convert it to a + // Buffer without copying. + if (isArrayBufferView(buffer) && !isUint8Array(buffer)) { + buffer = Buffer.from(buffer.buffer, buffer.byteOffset, buffer.byteLength); + } else if (isAnyArrayBuffer(buffer)) { + buffer = Buffer.from(buffer); + } + engine.buffers = null; + engine.nread = 0; + engine.cb = callback; + engine.on('data', zlibBufferOnData); + engine.on('error', zlibBufferOnError); + engine.on('end', zlibBufferOnEnd); + engine.end(buffer); +} + +function zlibBufferOnData(chunk) { + if (!this.buffers) { + this.buffers = [chunk]; + } else { + this.buffers.push(chunk); + } + this.nread += chunk.length; + if (this.nread > this._maxOutputLength) { + this.close(); + this.removeAllListeners('end'); + this.cb(new ERR_BUFFER_TOO_LARGE(this._maxOutputLength)); + } +} + +function zlibBufferOnError(err) { + this.removeAllListeners('end'); + this.cb(err); +} + +function zlibBufferOnEnd() { + let buf; + if (this.nread === 0) { + buf = Buffer.alloc(0); + } else { + const bufs = this.buffers; + buf = (bufs.length === 1 ? bufs[0] : Buffer.concat(bufs, this.nread)); + } + this.close(); + if (this._info) + this.cb(null, { buffer: buf, engine: this }); + else + this.cb(null, buf); +} + +function zlibBufferSync(engine, buffer) { + if (typeof buffer === 'string') { + buffer = Buffer.from(buffer); + } else if (!isArrayBufferView(buffer)) { + if (isAnyArrayBuffer(buffer)) { + buffer = Buffer.from(buffer); + } else { + throw new ERR_INVALID_ARG_TYPE( + 'buffer', + ['string', 'Buffer', 'TypedArray', 'DataView', 'ArrayBuffer'], + buffer, + ); + } + } + buffer = processChunkSync(engine, buffer, engine._finishFlushFlag); + if (engine._info) + return { buffer, engine }; + return buffer; +} + +function zlibOnError(message, errno, code) { + const self = this[owner_symbol]; + // There is no way to cleanly recover. + // Continuing only obscures problems. + + const error = genericNodeError(message, { errno, code }); + error.errno = errno; + error.code = code; + self.destroy(error); + self[kError] = error; +} + +const FLUSH_BOUND = [ + [ Z_NO_FLUSH, Z_BLOCK ], + [ BROTLI_OPERATION_PROCESS, BROTLI_OPERATION_EMIT_METADATA ], + [ ZSTD_e_continue, ZSTD_e_end ], +]; +const FLUSH_BOUND_IDX_NORMAL = 0; +const FLUSH_BOUND_IDX_BROTLI = 1; +const FLUSH_BOUND_IDX_ZSTD = 2; + +/** + * The base class for all Zlib-style streams. + * @class + */ +function ZlibBase(opts, mode, handle, { flush, finishFlush, fullFlush }) { + let chunkSize = Z_DEFAULT_CHUNK; + let maxOutputLength = kMaxLength; + // The ZlibBase class is not exported to user land, the mode should only be + // passed in by us. + assert(typeof mode === 'number'); + assert(mode >= DEFLATE && mode <= ZSTD_DECOMPRESS); + + let flushBoundIdx; + if (mode === BROTLI_ENCODE || mode === BROTLI_DECODE) { + flushBoundIdx = FLUSH_BOUND_IDX_BROTLI; + } else if (mode === ZSTD_COMPRESS || mode === ZSTD_DECOMPRESS) { + flushBoundIdx = FLUSH_BOUND_IDX_ZSTD; + } else { + flushBoundIdx = FLUSH_BOUND_IDX_NORMAL; + } + + if (opts) { + chunkSize = opts.chunkSize; + if (!validateFiniteNumber(chunkSize, 'options.chunkSize')) { + chunkSize = Z_DEFAULT_CHUNK; + } else if (chunkSize < Z_MIN_CHUNK) { + throw new ERR_OUT_OF_RANGE('options.chunkSize', + `>= ${Z_MIN_CHUNK}`, chunkSize); + } + + flush = checkRangesOrGetDefault( + opts.flush, 'options.flush', + FLUSH_BOUND[flushBoundIdx][0], FLUSH_BOUND[flushBoundIdx][1], flush); + + finishFlush = checkRangesOrGetDefault( + opts.finishFlush, 'options.finishFlush', + FLUSH_BOUND[flushBoundIdx][0], FLUSH_BOUND[flushBoundIdx][1], + finishFlush); + + maxOutputLength = checkRangesOrGetDefault( + opts.maxOutputLength, 'options.maxOutputLength', + 1, kMaxLength, kMaxLength); + + if (opts.encoding || opts.objectMode || opts.writableObjectMode) { + opts = { ...opts }; + opts.encoding = null; + opts.objectMode = false; + opts.writableObjectMode = false; + } + } + + ReflectApply(Transform, this, [{ autoDestroy: true, ...opts }]); + this[kError] = null; + this.bytesWritten = 0; + this._handle = handle; + handle[owner_symbol] = this; + // Used by processCallback() and zlibOnError() + handle.onerror = zlibOnError; + this._outBuffer = Buffer.allocUnsafe(chunkSize); + this._outOffset = 0; + + this._chunkSize = chunkSize; + this._defaultFlushFlag = flush; + this._finishFlushFlag = finishFlush; + this._defaultFullFlushFlag = fullFlush; + this._info = opts?.info; + this._maxOutputLength = maxOutputLength; + + this._rejectGarbageAfterEnd = opts?.rejectGarbageAfterEnd === true; +} +ObjectSetPrototypeOf(ZlibBase.prototype, Transform.prototype); +ObjectSetPrototypeOf(ZlibBase, Transform); + +ObjectDefineProperty(ZlibBase.prototype, '_closed', { + __proto__: null, + configurable: true, + enumerable: true, + get() { + return !this._handle; + }, +}); + +/** + * @this {ZlibBase} + * @returns {void} + */ +ZlibBase.prototype.reset = function() { + assert(this._handle, 'zlib binding closed'); + return this._handle.reset(); +}; + +/** + * @this {ZlibBase} + * This is the _flush function called by the transform class, + * internally, when the last chunk has been written. + * @returns {void} + */ +ZlibBase.prototype._flush = function(callback) { + this._transform(Buffer.alloc(0), '', callback); +}; + +/** + * @this {ZlibBase} + * Force Transform compat behavior. + * @returns {void} + */ +ZlibBase.prototype._final = function(callback) { + callback(); +}; + +// If a flush is scheduled while another flush is still pending, a way to figure +// out which one is the "stronger" flush is needed. +// This is currently only used to figure out which flush flag to use for the +// last chunk. +// Roughly, the following holds: +// Z_NO_FLUSH < Z_BLOCK < Z_PARTIAL_FLUSH < +// Z_SYNC_FLUSH < Z_FULL_FLUSH < Z_FINISH +const flushiness = []; +const kFlushFlagList = [Z_NO_FLUSH, Z_BLOCK, Z_PARTIAL_FLUSH, + Z_SYNC_FLUSH, Z_FULL_FLUSH, Z_FINISH]; +for (let i = 0; i < kFlushFlagList.length; i++) { + flushiness[kFlushFlagList[i]] = i; +} + +function maxFlush(a, b) { + return flushiness[a] > flushiness[b] ? a : b; +} + +// Set up a list of 'special' buffers that can be written using .write() +// from the .flush() code as a way of introducing flushing operations into the +// write sequence. +const kFlushBuffers = []; +{ + const dummyArrayBuffer = new ArrayBuffer(); + for (const flushFlag of kFlushFlagList) { + kFlushBuffers[flushFlag] = Buffer.from(dummyArrayBuffer); + kFlushBuffers[flushFlag][kFlushFlag] = flushFlag; + } +} + +ZlibBase.prototype.flush = function(kind, callback) { + if (typeof kind === 'function' || (kind === undefined && !callback)) { + callback = kind; + kind = this._defaultFullFlushFlag; + } + + if (this.writableFinished) { + if (callback) + process.nextTick(callback); + } else if (this.writableEnded) { + if (callback) + this.once('end', callback); + } else { + this.write(kFlushBuffers[kind], '', callback); + } +}; + +/** + * @this {import('stream').Transform} + * @param {(err?: Error) => any} [callback] + */ +ZlibBase.prototype.close = function(callback) { + if (callback) finished(this, callback); + this.destroy(); +}; + +ZlibBase.prototype._destroy = function(err, callback) { + _close(this); + callback(err); +}; + +ZlibBase.prototype._transform = function(chunk, encoding, cb) { + let flushFlag = this._defaultFlushFlag; + // We use a 'fake' zero-length chunk to carry information about flushes from + // the public API to the actual stream implementation. + if (typeof chunk[kFlushFlag] === 'number') { + flushFlag = chunk[kFlushFlag]; + } + + // For the last chunk, also apply `_finishFlushFlag`. + if (this.writableEnded && this.writableLength === chunk.byteLength) { + flushFlag = maxFlush(flushFlag, this._finishFlushFlag); + } + processChunk(this, chunk, flushFlag, cb); +}; + +ZlibBase.prototype._processChunk = function(chunk, flushFlag, cb) { + // _processChunk() is left for backwards compatibility + if (typeof cb === 'function') + processChunk(this, chunk, flushFlag, cb); + else + return processChunkSync(this, chunk, flushFlag); +}; + +function processChunkSync(self, chunk, flushFlag) { + let availInBefore = chunk.byteLength; + let availOutBefore = self._chunkSize - self._outOffset; + let inOff = 0; + let availOutAfter; + let availInAfter; + + const buffers = []; + let nread = 0; + let inputRead = 0; + const state = self._writeState; + const handle = self._handle; + let buffer = self._outBuffer; + let offset = self._outOffset; + const chunkSize = self._chunkSize; + + let error; + self.on('error', function onError(er) { + error = er; + }); + + while (true) { + handle.writeSync(flushFlag, + chunk, // in + inOff, // in_off + availInBefore, // in_len + buffer, // out + offset, // out_off + availOutBefore); // out_len + if (error) + throw error; + else if (self[kError]) + throw self[kError]; + + availOutAfter = state[0]; + availInAfter = state[1]; + + const inDelta = (availInBefore - availInAfter); + inputRead += inDelta; + + const have = availOutBefore - availOutAfter; + if (have > 0) { + const out = buffer.slice(offset, offset + have); + offset += have; + buffers.push(out); + nread += out.byteLength; + + if (nread > self._maxOutputLength) { + _close(self); + throw new ERR_BUFFER_TOO_LARGE(self._maxOutputLength); + } + + } else { + assert(have === 0, 'have should not go down'); + } + + // Exhausted the output buffer, or used all the input create a new one. + if (availOutAfter === 0 || offset >= chunkSize) { + availOutBefore = chunkSize; + offset = 0; + buffer = Buffer.allocUnsafe(chunkSize); + } + + if (availOutAfter === 0) { + // Not actually done. Need to reprocess. + // Also, update the availInBefore to the availInAfter value, + // so that if we have to hit it a third (fourth, etc.) time, + // it'll have the correct byte counts. + inOff += inDelta; + availInBefore = availInAfter; + } else { + break; + } + } + + self.bytesWritten = inputRead; + _close(self); + + if (nread === 0) + return Buffer.alloc(0); + + return (buffers.length === 1 ? buffers[0] : Buffer.concat(buffers, nread)); +} + +function processChunk(self, chunk, flushFlag, cb) { + const handle = self._handle; + if (!handle) return process.nextTick(cb); + + handle.buffer = chunk; + handle.cb = cb; + handle.availOutBefore = self._chunkSize - self._outOffset; + handle.availInBefore = chunk.byteLength; + handle.inOff = 0; + handle.flushFlag = flushFlag; + + handle.write(flushFlag, + chunk, // in + 0, // in_off + handle.availInBefore, // in_len + self._outBuffer, // out + self._outOffset, // out_off + handle.availOutBefore); // out_len +} + +function processCallback() { + // This callback's context (`this`) is the `_handle` (ZCtx) object. It is + // important to null out the values once they are no longer needed since + // `_handle` can stay in memory long after the buffer is needed. + const handle = this; + const self = this[owner_symbol]; + const state = self._writeState; + + if (self.destroyed) { + this.buffer = null; + this.cb(); + return; + } + + const availOutAfter = state[0]; + const availInAfter = state[1]; + + const inDelta = handle.availInBefore - availInAfter; + self.bytesWritten += inDelta; + + const have = handle.availOutBefore - availOutAfter; + let streamBufferIsFull = false; + if (have > 0) { + const out = self._outBuffer.slice(self._outOffset, self._outOffset + have); + self._outOffset += have; + streamBufferIsFull = !self.push(out); + } else { + assert(have === 0, 'have should not go down'); + } + + if (self.destroyed) { + this.cb(); + return; + } + + // Exhausted the output buffer, or used all the input create a new one. + if (availOutAfter === 0 || self._outOffset >= self._chunkSize) { + handle.availOutBefore = self._chunkSize; + self._outOffset = 0; + self._outBuffer = Buffer.allocUnsafe(self._chunkSize); + } + + if (availOutAfter === 0) { + // Not actually done. Need to reprocess. + // Also, update the availInBefore to the availInAfter value, + // so that if we have to hit it a third (fourth, etc.) time, + // it'll have the correct byte counts. + handle.inOff += inDelta; + handle.availInBefore = availInAfter; + + + if (!streamBufferIsFull) { + this.write(handle.flushFlag, + this.buffer, // in + handle.inOff, // in_off + handle.availInBefore, // in_len + self._outBuffer, // out + self._outOffset, // out_off + self._chunkSize); // out_len + } else { + const oldRead = self._read; + self._read = (n) => { + self._read = oldRead; + this.write(handle.flushFlag, + this.buffer, // in + handle.inOff, // in_off + handle.availInBefore, // in_len + self._outBuffer, // out + self._outOffset, // out_off + self._chunkSize); // out_len + self._read(n); + }; + } + return; + } + + if (availInAfter > 0) { + // If we have more input that should be written, but we also have output + // space available, that means that the compression library was not + // interested in receiving more data, and in particular that the input + // stream has ended early. + // This applies to streams where we don't check data past the end of + // what was consumed; that is, everything except Gunzip/Unzip. + + if (self._rejectGarbageAfterEnd) { + const err = new ERR_TRAILING_JUNK_AFTER_STREAM_END(); + self.destroy(err); + this.cb(err); + return; + } + + self.push(null); + } + + // Finished with the chunk. + this.buffer = null; + this.cb(); +} + +/** + * @param {ZlibBase} engine + * @private + */ +function _close(engine) { + // Caller may invoke .close after a zlib error (which will null _handle) + engine._handle?.close(); + engine._handle = null; +} + +const zlibDefaultOpts = { + flush: Z_NO_FLUSH, + finishFlush: Z_FINISH, + fullFlush: Z_FULL_FLUSH, +}; +// Base class for all streams actually backed by zlib and using zlib-specific +// parameters. +function Zlib(opts, mode) { + let windowBits = Z_DEFAULT_WINDOWBITS; + let level = Z_DEFAULT_COMPRESSION; + let memLevel = Z_DEFAULT_MEMLEVEL; + let strategy = Z_DEFAULT_STRATEGY; + let dictionary; + + if (opts) { + // windowBits is special. On the compression side, 0 is an invalid value. + // But on the decompression side, a value of 0 for windowBits tells zlib + // to use the window size in the zlib header of the compressed stream. + if ((opts.windowBits == null || opts.windowBits === 0) && + (mode === INFLATE || + mode === GUNZIP || + mode === UNZIP)) { + windowBits = 0; + } else { + // `{ windowBits: 8 }` is valid for deflate but not gzip. + const min = Z_MIN_WINDOWBITS + (mode === GZIP ? 1 : 0); + windowBits = checkRangesOrGetDefault( + opts.windowBits, 'options.windowBits', + min, Z_MAX_WINDOWBITS, Z_DEFAULT_WINDOWBITS); + } + + level = checkRangesOrGetDefault( + opts.level, 'options.level', + Z_MIN_LEVEL, Z_MAX_LEVEL, Z_DEFAULT_COMPRESSION); + + memLevel = checkRangesOrGetDefault( + opts.memLevel, 'options.memLevel', + Z_MIN_MEMLEVEL, Z_MAX_MEMLEVEL, Z_DEFAULT_MEMLEVEL); + + strategy = checkRangesOrGetDefault( + opts.strategy, 'options.strategy', + Z_DEFAULT_STRATEGY, Z_FIXED, Z_DEFAULT_STRATEGY); + + dictionary = opts.dictionary; + if (dictionary !== undefined && !isArrayBufferView(dictionary)) { + if (isAnyArrayBuffer(dictionary)) { + dictionary = Buffer.from(dictionary); + } else { + throw new ERR_INVALID_ARG_TYPE( + 'options.dictionary', + ['Buffer', 'TypedArray', 'DataView', 'ArrayBuffer'], + dictionary, + ); + } + } + } + + const handle = new binding.Zlib(mode); + // Ideally, we could let ZlibBase() set up _writeState. I haven't been able + // to come up with a good solution that doesn't break our internal API, + // and with it all supported npm versions at the time of writing. + this._writeState = new Uint32Array(2); + handle.init(windowBits, + level, + memLevel, + strategy, + this._writeState, + processCallback, + dictionary); + + ReflectApply(ZlibBase, this, [opts, mode, handle, zlibDefaultOpts]); + + this._level = level; + this._strategy = strategy; + this._mode = mode; +} +ObjectSetPrototypeOf(Zlib.prototype, ZlibBase.prototype); +ObjectSetPrototypeOf(Zlib, ZlibBase); + +// This callback is used by `.params()` to wait until a full flush happened +// before adjusting the parameters. In particular, the call to the native +// `params()` function should not happen while a write is currently in progress +// on the threadpool. +function paramsAfterFlushCallback(level, strategy, callback) { + assert(this._handle, 'zlib binding closed'); + this._handle.params(level, strategy); + if (!this.destroyed) { + this._level = level; + this._strategy = strategy; + if (callback) callback(); + } +} + +Zlib.prototype.params = function params(level, strategy, callback) { + checkRangesOrGetDefault(level, 'level', Z_MIN_LEVEL, Z_MAX_LEVEL); + checkRangesOrGetDefault(strategy, 'strategy', Z_DEFAULT_STRATEGY, Z_FIXED); + + if (this._level !== level || this._strategy !== strategy) { + this.flush( + Z_SYNC_FLUSH, + paramsAfterFlushCallback.bind(this, level, strategy, callback), + ); + } else { + process.nextTick(callback); + } +}; + +// generic zlib +// minimal 2-byte header +function Deflate(opts) { + if (!(this instanceof Deflate)) { + return deprecateInstantiation(Deflate, 'DEP0184', opts); + } + ReflectApply(Zlib, this, [opts, DEFLATE]); +} +ObjectSetPrototypeOf(Deflate.prototype, Zlib.prototype); +ObjectSetPrototypeOf(Deflate, Zlib); + +function Inflate(opts) { + if (!(this instanceof Inflate)) { + return deprecateInstantiation(Inflate, 'DEP0184', opts); + } + ReflectApply(Zlib, this, [opts, INFLATE]); +} +ObjectSetPrototypeOf(Inflate.prototype, Zlib.prototype); +ObjectSetPrototypeOf(Inflate, Zlib); + +function Gzip(opts) { + if (!(this instanceof Gzip)) { + return deprecateInstantiation(Gzip, 'DEP0184', opts); + } + ReflectApply(Zlib, this, [opts, GZIP]); +} +ObjectSetPrototypeOf(Gzip.prototype, Zlib.prototype); +ObjectSetPrototypeOf(Gzip, Zlib); + +function Gunzip(opts) { + if (!(this instanceof Gunzip)) { + return deprecateInstantiation(Gunzip, 'DEP0184', opts); + } + ReflectApply(Zlib, this, [opts, GUNZIP]); +} +ObjectSetPrototypeOf(Gunzip.prototype, Zlib.prototype); +ObjectSetPrototypeOf(Gunzip, Zlib); + +function DeflateRaw(opts) { + if (opts && opts.windowBits === 8) opts.windowBits = 9; + if (!(this instanceof DeflateRaw)) { + return deprecateInstantiation(DeflateRaw, 'DEP0184', opts); + } + ReflectApply(Zlib, this, [opts, DEFLATERAW]); +} +ObjectSetPrototypeOf(DeflateRaw.prototype, Zlib.prototype); +ObjectSetPrototypeOf(DeflateRaw, Zlib); + +function InflateRaw(opts) { + if (!(this instanceof InflateRaw)) { + return deprecateInstantiation(InflateRaw, 'DEP0184', opts); + } + ReflectApply(Zlib, this, [opts, INFLATERAW]); +} +ObjectSetPrototypeOf(InflateRaw.prototype, Zlib.prototype); +ObjectSetPrototypeOf(InflateRaw, Zlib); + +function Unzip(opts) { + if (!(this instanceof Unzip)) { + return deprecateInstantiation(Unzip, 'DEP0184', opts); + } + ReflectApply(Zlib, this, [opts, UNZIP]); +} +ObjectSetPrototypeOf(Unzip.prototype, Zlib.prototype); +ObjectSetPrototypeOf(Unzip, Zlib); + +function createConvenienceMethod(ctor, sync) { + if (sync) { + return function syncBufferWrapper(buffer, opts) { + return zlibBufferSync(new ctor(opts), buffer); + }; + } + return function asyncBufferWrapper(buffer, opts, callback) { + if (typeof opts === 'function') { + callback = opts; + opts = {}; + } + return zlibBuffer(new ctor(opts), buffer, callback); + }; +} + +const kMaxBrotliParam = MathMax( + ...ObjectEntries(constants) + .map(({ 0: key, 1: value }) => (key.startsWith('BROTLI_PARAM_') ? value : 0)), +); +const brotliInitParamsArray = new Uint32Array(kMaxBrotliParam + 1); + +const brotliDefaultOpts = { + flush: BROTLI_OPERATION_PROCESS, + finishFlush: BROTLI_OPERATION_FINISH, + fullFlush: BROTLI_OPERATION_FLUSH, +}; +function Brotli(opts, mode) { + assert(mode === BROTLI_DECODE || mode === BROTLI_ENCODE); + + brotliInitParamsArray.fill(-1); + if (opts?.params) { + ObjectKeys(opts.params).forEach((origKey) => { + const key = +origKey; + if (NumberIsNaN(key) || key < 0 || key > kMaxBrotliParam || + (brotliInitParamsArray[key] | 0) !== -1) { + throw new ERR_BROTLI_INVALID_PARAM(origKey); + } + + const value = opts.params[origKey]; + if (typeof value !== 'number' && typeof value !== 'boolean') { + throw new ERR_INVALID_ARG_TYPE('options.params[key]', + 'number', opts.params[origKey]); + } + brotliInitParamsArray[key] = value; + }); + } + + const handle = mode === BROTLI_DECODE ? + new binding.BrotliDecoder(mode) : new binding.BrotliEncoder(mode); + + this._writeState = new Uint32Array(2); + handle.init(brotliInitParamsArray, this._writeState, processCallback); + + ReflectApply(ZlibBase, this, [opts, mode, handle, brotliDefaultOpts]); +} +ObjectSetPrototypeOf(Brotli.prototype, Zlib.prototype); +ObjectSetPrototypeOf(Brotli, Zlib); + +function BrotliCompress(opts) { + if (!(this instanceof BrotliCompress)) { + return deprecateInstantiation(BrotliCompress, 'DEP0184', opts); + } + ReflectApply(Brotli, this, [opts, BROTLI_ENCODE]); +} +ObjectSetPrototypeOf(BrotliCompress.prototype, Brotli.prototype); +ObjectSetPrototypeOf(BrotliCompress, Brotli); + +function BrotliDecompress(opts) { + if (!(this instanceof BrotliDecompress)) { + return deprecateInstantiation(BrotliDecompress, 'DEP0184', opts); + } + ReflectApply(Brotli, this, [opts, BROTLI_DECODE]); +} +ObjectSetPrototypeOf(BrotliDecompress.prototype, Brotli.prototype); +ObjectSetPrototypeOf(BrotliDecompress, Brotli); + + +const zstdDefaultOpts = { + flush: ZSTD_e_continue, + finishFlush: ZSTD_e_end, + fullFlush: ZSTD_e_flush, +}; +class Zstd extends ZlibBase { + constructor(opts, mode, initParamsArray, maxParam) { + assert(mode === ZSTD_COMPRESS || mode === ZSTD_DECOMPRESS); + + initParamsArray.fill(-1); + if (opts?.params) { + ObjectKeys(opts.params).forEach((origKey) => { + const key = +origKey; + if (NumberIsNaN(key) || key < 0 || key > maxParam || + (initParamsArray[key] | 0) !== -1) { + throw new ERR_ZSTD_INVALID_PARAM(origKey); + } + + const value = opts.params[origKey]; + if (typeof value !== 'number' && typeof value !== 'boolean') { + throw new ERR_INVALID_ARG_TYPE('options.params[key]', + 'number', opts.params[origKey]); + } + initParamsArray[key] = value; + }); + } + + const handle = mode === ZSTD_COMPRESS ? + new binding.ZstdCompress() : new binding.ZstdDecompress(); + + const pledgedSrcSize = opts?.pledgedSrcSize ?? undefined; + + const writeState = new Uint32Array(2); + + handle.init( + initParamsArray, + pledgedSrcSize, + writeState, + processCallback, + opts?.dictionary && isArrayBufferView(opts.dictionary) ? opts.dictionary : undefined, + ); + + super(opts, mode, handle, zstdDefaultOpts); + this._writeState = writeState; + } +} + +const kMaxZstdCParam = MathMax(...ObjectKeys(constants).map( + (key) => (key.startsWith('ZSTD_c_') ? + constants[key] : + 0), +)); + +const zstdInitCParamsArray = new Uint32Array(kMaxZstdCParam + 1); + +class ZstdCompress extends Zstd { + constructor(opts) { + super(opts, ZSTD_COMPRESS, zstdInitCParamsArray, kMaxZstdCParam); + } +} + +const kMaxZstdDParam = MathMax(...ObjectKeys(constants).map( + (key) => (key.startsWith('ZSTD_d_') ? + constants[key] : + 0), +)); + +const zstdInitDParamsArray = new Uint32Array(kMaxZstdDParam + 1); + +class ZstdDecompress extends Zstd { + constructor(opts) { + super(opts, ZSTD_DECOMPRESS, zstdInitDParamsArray, kMaxZstdDParam); + } +} + +function createProperty(ctor) { + return { + __proto__: null, + configurable: true, + enumerable: true, + value: function(options) { + return new ctor(options); + }, + }; +} + +function crc32(data, value = 0) { + if (typeof data !== 'string' && !isArrayBufferView(data)) { + throw new ERR_INVALID_ARG_TYPE('data', ['Buffer', 'TypedArray', 'DataView', 'string'], data); + } + validateUint32(value, 'value'); + return crc32Native(data, value); +} + +// Legacy alias on the C++ wrapper object. This is not public API, so we may +// want to runtime-deprecate it at some point. There's no hurry, though. +ObjectDefineProperty(binding.Zlib.prototype, 'jsref', { + __proto__: null, + get() { return this[owner_symbol]; }, + set(v) { return this[owner_symbol] = v; }, +}); + +module.exports = { + crc32, + Deflate, + Inflate, + Gzip, + Gunzip, + DeflateRaw, + InflateRaw, + Unzip, + BrotliCompress, + BrotliDecompress, + ZstdCompress, + ZstdDecompress, + + // Convenience methods. + // compress/decompress a string or buffer in one step. + deflate: createConvenienceMethod(Deflate, false), + deflateSync: createConvenienceMethod(Deflate, true), + gzip: createConvenienceMethod(Gzip, false), + gzipSync: createConvenienceMethod(Gzip, true), + deflateRaw: createConvenienceMethod(DeflateRaw, false), + deflateRawSync: createConvenienceMethod(DeflateRaw, true), + unzip: createConvenienceMethod(Unzip, false), + unzipSync: createConvenienceMethod(Unzip, true), + inflate: createConvenienceMethod(Inflate, false), + inflateSync: createConvenienceMethod(Inflate, true), + gunzip: createConvenienceMethod(Gunzip, false), + gunzipSync: createConvenienceMethod(Gunzip, true), + inflateRaw: createConvenienceMethod(InflateRaw, false), + inflateRawSync: createConvenienceMethod(InflateRaw, true), + brotliCompress: createConvenienceMethod(BrotliCompress, false), + brotliCompressSync: createConvenienceMethod(BrotliCompress, true), + brotliDecompress: createConvenienceMethod(BrotliDecompress, false), + brotliDecompressSync: createConvenienceMethod(BrotliDecompress, true), + zstdCompress: createConvenienceMethod(ZstdCompress, false), + zstdCompressSync: createConvenienceMethod(ZstdCompress, true), + zstdDecompress: createConvenienceMethod(ZstdDecompress, false), + zstdDecompressSync: createConvenienceMethod(ZstdDecompress, true), +}; + +ObjectDefineProperties(module.exports, { + createDeflate: createProperty(Deflate), + createInflate: createProperty(Inflate), + createDeflateRaw: createProperty(DeflateRaw), + createInflateRaw: createProperty(InflateRaw), + createGzip: createProperty(Gzip), + createGunzip: createProperty(Gunzip), + createUnzip: createProperty(Unzip), + createBrotliCompress: createProperty(BrotliCompress), + createBrotliDecompress: createProperty(BrotliDecompress), + createZstdCompress: createProperty(ZstdCompress), + createZstdDecompress: createProperty(ZstdDecompress), + constants: { + __proto__: null, + configurable: false, + enumerable: true, + value: constants, + }, + codes: { + __proto__: null, + enumerable: true, + writable: false, + value: ObjectFreeze(codes), + }, +}); + +// These should be considered deprecated +// expose all the zlib constants +for (const { 0: key, 1: value } of ObjectEntries(constants)) { + if (key.startsWith('BROTLI')) continue; + ObjectDefineProperty(module.exports, key, { + __proto__: null, + enumerable: false, + value, + writable: false, + }); +} \ No newline at end of file diff --git a/.codesandbox/tasks.json b/.codesandbox/tasks.json new file mode 100644 index 0000000000..2285b1c8c5 --- /dev/null +++ b/.codesandbox/tasks.json @@ -0,0 +1,18 @@ +{ + // These tasks will run in order when initializing your CodeSandbox project. + "setupTasks": [ + { + "command": "pnpm install", + "name": "Installing Dependencies" + } + ], + + // These tasks can be run from CodeSandbox. Running one will open a log in the app. + "tasks": { + "postinstall": { + "name": "postinstall", + "command": "pnpm postinstall", + "runAtStart": false + } + } +} diff --git a/.gitignore b/.gitignore index 3d70248ba2..67aced909f 100644 --- a/.gitignore +++ b/.gitignore @@ -12,4 +12,6 @@ npm-debug.log* yarn-debug.log* yarn-error.log* -package-lock.json \ No newline at end of file +package-lock.json +# Local Netlify folder +.netlify diff --git a/.idx/dev.nix b/.idx/dev.nix new file mode 100644 index 0000000000..7651aa28d4 --- /dev/null +++ b/.idx/dev.nix @@ -0,0 +1,65 @@ +# To learn more about how to use Nix to configure your environment +# see: https://firebase.google.com/docs/studio/customize-workspace +{ pkgs, ... }: { + # Which nixpkgs channel to use. + channel = "stable-24.05"; # or "unstable" + + # Use https://search.nixos.org/packages to find packages + packages = [ + + # pkgs.go + # pkgs.python311 + # pkgs.python311Packages.pip + # pkgs.nodejs_20 + # pkgs.nodePackages.nodemon + # pkgs.sudo + # pkgs.sudo-rs + # pkgs.doas-sudo-shim + # pkgs.python312Packages.pip + # pkgs.openssh + # pkgs.openshh_hpn + # pkgs.openssh_gssapi + # pkgs.busybox + # pkgs.speech-tools + ]; + + # Sets environment variables in the workspace + env = {}; + idx = { + # Search for the extensions you want on https://open-vsx.org/ and use "publisher.id" + extensions = [ + # "vscodevim.vim" + ]; + + # Enable previews + previews = { + enable = true; + previews = { + # web = { + # # Example: run "npm run dev" with PORT set to IDX's defined port for previews, + # # and show it in IDX's web preview panel + # command = ["npm" "run" "dev"]; + # manager = "web"; + # env = { + # # Environment variables to set for your server + # PORT = "$PORT"; + # }; + # }; + }; + }; + + # Workspace lifecycle hooks + workspace = { + # Runs when a workspace is first created + onCreate = { + # Example: install JS dependencies from NPM + # npm-install = "npm install"; + }; + # Runs when the workspace is (re)started + onStart = { + # Example: start a background task to watch and re-build backend code + # watch-backend = "npm run watch-backend"; + }; + }; + }; +} diff --git a/.package.json.swp b/.package.json.swp new file mode 100644 index 0000000000..55fb00c678 Binary files /dev/null and b/.package.json.swp differ diff --git a/.vscode/$PROFILE b/.vscode/$PROFILE new file mode 100644 index 0000000000..f7069873c1 --- /dev/null +++ b/.vscode/$PROFILE @@ -0,0 +1,6 @@ +echo "function yarn { corepack yarn `$args }" >> $PROFILE +echo "function yarnpkg { corepack yarnpkg `$args }" >> $PROFILE +echo "function pnpm { corepack pnpm `$args }" >> $PROFILE +echo "function pnpx { corepack pnpx `$args }" >> $PROFILE +echo "function npm { corepack npm `$args }" >> $PROFILE +echo "function npx { corepack npx `$args }" >> $PROFILE \ No newline at end of file diff --git a/.vscode/.bash_aliases b/.vscode/.bash_aliases new file mode 100644 index 0000000000..e562182892 --- /dev/null +++ b/.vscode/.bash_aliases @@ -0,0 +1,6 @@ +alias yarn="corepack yarn" +alias yarnpkg="corepack yarnpkg" +alias pnpm="corepack pnpm" +alias pnpx="corepack pnpx" +alias npm="corepack npm" +alias npx="corepack npx" \ No newline at end of file diff --git a/.vscode/.gitignore b/.vscode/.gitignore new file mode 100644 index 0000000000..3d70248ba2 --- /dev/null +++ b/.vscode/.gitignore @@ -0,0 +1,15 @@ +node_modules +.DS_Store +.env +.env.local +.env.development.local +.env.test.local +.env.production.local + +build + +npm-debug.log* +yarn-debug.log* +yarn-error.log* + +package-lock.json \ No newline at end of file diff --git a/.vscode/App.css b/.vscode/App.css new file mode 100644 index 0000000000..a571b3ae9a --- /dev/null +++ b/.vscode/App.css @@ -0,0 +1,43 @@ +#root { + max-width: 1280px; + margin: 0 auto; + padding: 2rem; + text-align: center; + } + + .logo { + height: 6em; + padding: 1.5em; + will-change: filter; + transition: filter 300ms; + } + .logo:hover { + filter: drop-shadow(0 0 2em #646cffaa); + } + .logo.react:hover { + filter: drop-shadow(0 0 2em #61dafbaa); + } + + @keyframes logo-spin { + from { + transform: rotate(0deg); + } + to { + transform: rotate(360deg); + } + } + + @media (prefers-reduced-motion: no-preference) { + a:nth-of-type(2) .logo { + animation: logo-spin infinite 20s linear; + } + } + + .card { + padding: 2em; + } + + .read-the-docs { + color: #888; + } + \ No newline at end of file diff --git a/.vscode/App.jsx b/.vscode/App.jsx new file mode 100644 index 0000000000..6ca85721ce --- /dev/null +++ b/.vscode/App.jsx @@ -0,0 +1,69 @@ +:root { + font-family: system-ui, Avenir, Helvetica, Arial, sans-serif; + line-height: 1.5; + font-weight: 400; + + color-scheme: light dark; + color: rgba(255, 255, 255, 0.87); + background-color: #242424; + + font-synthesis: none; + text-rendering: optimizeLegibility; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; + } + + a { + font-weight: 500; + color: #646cff; + text-decoration: inherit; + } + a:hover { + color: #535bf2; + } + + body { + margin: 0; + display: flex; + place-items: center; + min-width: 320px; + min-height: 100vh; + } + + h1 { + font-size: 3.2em; + line-height: 1.1; + } + + button { + border-radius: 8px; + border: 1px solid transparent; + padding: 0.6em 1.2em; + font-size: 1em; + font-weight: 500; + font-family: inherit; + background-color: #1a1a1a; + cursor: pointer; + transition: border-color 0.25s; + } + button:hover { + border-color: #646cff; + } + button:focus, + button:focus-visible { + outline: 4px auto -webkit-focus-ring-color; + } + + @media (prefers-color-scheme: light) { + :root { + color: #213547; + background-color: #ffffff; + } + a:hover { + color: #747bff; + } + button { + background-color: #f9f9f9; + } + } + \ No newline at end of file diff --git a/.vscode/[ b/.vscode/[ new file mode 100644 index 0000000000..3664692d69 --- /dev/null +++ b/.vscode/[ @@ -0,0 +1 @@ +more is not installed, but available in the following packages, pick one to run it, Ctrl+C to cancel. \ No newline at end of file diff --git a/.vscode/app.js b/.vscode/app.js new file mode 100644 index 0000000000..d33fe40d85 --- /dev/null +++ b/.vscode/app.js @@ -0,0 +1,59 @@ +require('dotenv').config(); + +var createError = require('http-errors'); +var express = require('express'); +var path = require('path'); +var cookieParser = require('cookie-parser'); +var logger = require('morgan'); +var session = require('express-session'); +var passport = require('passport'); +var SQLiteStore = require('connect-sqlite3')(session); +var indexRouter = require('./routes/index'); +var authRouter = require('./routes/auth'); + +var app = express(); + +app.locals.pluralize = require('pluralize'); + +// view engine setup +app.set('views', path.join(__dirname, 'views')); +app.set('view engine', 'ejs'); + +app.use(logger('dev')); +app.use(express.json()); +app.use(express.urlencoded({ extended: false })); +app.use(cookieParser()); +app.use(express.static(path.join(__dirname, 'public'))); +app.use(express.static(path.join(__dirname, 'public'))); +app.use(session({ + secret: 'keyboard cat', + resave: false, + saveUninitialized: false, + store: new SQLiteStore({ db: 'sessions.db', dir: './var/db' }) +})); +app.use('/', indexRouter); +app.use('/', authRouter); +app.use(session({ + secret: 'keyboard cat', + resave: false, + saveUninitialized: false, + store: new SQLiteStore({ db: 'sessions.db', dir: './var/db' }) +})); +app.use(passport.authenticate('session')); +// catch 404 and forward to error handler +app.use(function(req, res, next) { + next(createError(404)); +}); + +// error handler +app.use(function(err, req, res, next) { + // set locals, only providing error in development + res.locals.message = err.message; + res.locals.error = req.app.get('env') === 'development' ? err : {}; + + // render the error page + res.status(err.status || 500); + res.render('error'); +}); + +module.exports = app; diff --git a/.vscode/auth.js b/.vscode/auth.js new file mode 100644 index 0000000000..4a302adb10 --- /dev/null +++ b/.vscode/auth.js @@ -0,0 +1,79 @@ +var express = require('express'); +var passport = require('passport'); +var FacebookStrategy = require('passport-facebook'); +var session = require('express-session'); +var passport = require('passport'); +var db = require('../db'); +var router = express.Router(); +passport.use(new FacebookStrategy({ + clientID: process.env['FACEBOOK_CLIENT_ID'], + clientSecret: process.env['FACEBOOK_CLIENT_SECRET'], + callbackURL: '/oauth2/redirect/facebook', + state: true +}, function verify(accessToken, refreshToken, profile, cb) { + db.get('SELECT * FROM federated_credentials WHERE provider = ? AND subject = ?', [ + 'https://www.facebook.com', + profile.id + ], function(err, row) { + if (err) { return cb(err); } + if (!row) { + db.run('INSERT INTO users (name) VALUES (?)', [ + profile.displayName + ], function(err) { + if (err) { return cb(err); } + + var id = this.lastID; + db.run('INSERT INTO federated_credentials (user_id, provider, subject) VALUES (?, ?, ?)', [ + id, + 'https://www.facebook.com', + profile.id + ], function(err) { + if (err) { return cb(err); } + var user = { + id: id, + name: profile.displayName + }; + return cb(null, user); + }); + }); + } else { + db.get('SELECT * FROM users WHERE id = ?', [ row.user_id ], function(err, row) { + if (err) { return cb(err); } + if (!row) { return cb(null, false); } + return cb(null, row); + }); + } + }); + passport.serializeUser(function(user, cb) { + process.nextTick(function() { + cb(null, { id: user.id, username: user.username, name: user.name }); + }); + }); + + passport.deserializeUser(function(user, cb) { + process.nextTick(function() { + return cb(null, user); + }); + }); +})); +router.get('/login', function(req, res, next) { + res.render('login'); +}); +router.get('/login/federated/facebook', passport.authenticate('facebook')); +router.get('/oauth2/redirect/facebook', passport.authenticate('facebook', { + successRedirect: '/', + failureRedirect: '/login' +})); +router.post('/logout', function(req, res, next) { + req.logout(function(err) { + if (err) { return next(err); } + res.redirect('/'); + router.post('/logout', function(req, res, next) { + req.logout(function(err) { + if (err) { return next(err); } + res.redirect('/'); + }); + }); + }); +}); +module.exports = router; \ No newline at end of file diff --git a/.vscode/edit.jsx b/.vscode/edit.jsx new file mode 100644 index 0000000000..56b56fd04a --- /dev/null +++ b/.vscode/edit.jsx @@ -0,0 +1,144 @@ +import { Form, useLoaderData } from "react-router-dom"; + +export default function EditContact() { + const { contact } = useLoaderData(); + + return ( +
+

+ Name + + +

+ + +