first_commit
This commit is contained in:
83
GTA_P_V2/node_modules/vite-node/dist/chunk-browser.cjs
generated
vendored
Normal file
83
GTA_P_V2/node_modules/vite-node/dist/chunk-browser.cjs
generated
vendored
Normal file
@@ -0,0 +1,83 @@
|
||||
'use strict';
|
||||
|
||||
// src/index.ts
|
||||
var f = {
|
||||
reset: [0, 0],
|
||||
bold: [1, 22, "\x1B[22m\x1B[1m"],
|
||||
dim: [2, 22, "\x1B[22m\x1B[2m"],
|
||||
italic: [3, 23],
|
||||
underline: [4, 24],
|
||||
inverse: [7, 27],
|
||||
hidden: [8, 28],
|
||||
strikethrough: [9, 29],
|
||||
black: [30, 39],
|
||||
red: [31, 39],
|
||||
green: [32, 39],
|
||||
yellow: [33, 39],
|
||||
blue: [34, 39],
|
||||
magenta: [35, 39],
|
||||
cyan: [36, 39],
|
||||
white: [37, 39],
|
||||
gray: [90, 39],
|
||||
bgBlack: [40, 49],
|
||||
bgRed: [41, 49],
|
||||
bgGreen: [42, 49],
|
||||
bgYellow: [43, 49],
|
||||
bgBlue: [44, 49],
|
||||
bgMagenta: [45, 49],
|
||||
bgCyan: [46, 49],
|
||||
bgWhite: [47, 49],
|
||||
blackBright: [90, 39],
|
||||
redBright: [91, 39],
|
||||
greenBright: [92, 39],
|
||||
yellowBright: [93, 39],
|
||||
blueBright: [94, 39],
|
||||
magentaBright: [95, 39],
|
||||
cyanBright: [96, 39],
|
||||
whiteBright: [97, 39],
|
||||
bgBlackBright: [100, 49],
|
||||
bgRedBright: [101, 49],
|
||||
bgGreenBright: [102, 49],
|
||||
bgYellowBright: [103, 49],
|
||||
bgBlueBright: [104, 49],
|
||||
bgMagentaBright: [105, 49],
|
||||
bgCyanBright: [106, 49],
|
||||
bgWhiteBright: [107, 49]
|
||||
}, h = Object.entries(f);
|
||||
function a(n) {
|
||||
return String(n);
|
||||
}
|
||||
a.open = "";
|
||||
a.close = "";
|
||||
function C(n = false) {
|
||||
let e = typeof process != "undefined" ? process : void 0, i = (e == null ? void 0 : e.env) || {}, g = (e == null ? void 0 : e.argv) || [];
|
||||
return !("NO_COLOR" in i || g.includes("--no-color")) && ("FORCE_COLOR" in i || g.includes("--color") || (e == null ? void 0 : e.platform) === "win32" || n && i.TERM !== "dumb" || "CI" in i) || typeof window != "undefined" && !!window.chrome;
|
||||
}
|
||||
function p(n = false) {
|
||||
let e = C(n), i = (r, t, c, o) => {
|
||||
let l = "", s = 0;
|
||||
do
|
||||
l += r.substring(s, o) + c, s = o + t.length, o = r.indexOf(t, s);
|
||||
while (~o);
|
||||
return l + r.substring(s);
|
||||
}, g = (r, t, c = r) => {
|
||||
let o = (l) => {
|
||||
let s = String(l), b = s.indexOf(t, r.length);
|
||||
return ~b ? r + i(s, t, c, b) + t : r + s + t;
|
||||
};
|
||||
return o.open = r, o.close = t, o;
|
||||
}, u = {
|
||||
isColorSupported: e
|
||||
}, d = (r) => `\x1B[${r}m`;
|
||||
for (let [r, t] of h)
|
||||
u[r] = e ? g(
|
||||
d(t[0]),
|
||||
d(t[1]),
|
||||
t[2]
|
||||
) : a;
|
||||
return u;
|
||||
}
|
||||
|
||||
var s = p();
|
||||
|
||||
exports.s = s;
|
||||
81
GTA_P_V2/node_modules/vite-node/dist/chunk-browser.mjs
generated
vendored
Normal file
81
GTA_P_V2/node_modules/vite-node/dist/chunk-browser.mjs
generated
vendored
Normal file
@@ -0,0 +1,81 @@
|
||||
// src/index.ts
|
||||
var f = {
|
||||
reset: [0, 0],
|
||||
bold: [1, 22, "\x1B[22m\x1B[1m"],
|
||||
dim: [2, 22, "\x1B[22m\x1B[2m"],
|
||||
italic: [3, 23],
|
||||
underline: [4, 24],
|
||||
inverse: [7, 27],
|
||||
hidden: [8, 28],
|
||||
strikethrough: [9, 29],
|
||||
black: [30, 39],
|
||||
red: [31, 39],
|
||||
green: [32, 39],
|
||||
yellow: [33, 39],
|
||||
blue: [34, 39],
|
||||
magenta: [35, 39],
|
||||
cyan: [36, 39],
|
||||
white: [37, 39],
|
||||
gray: [90, 39],
|
||||
bgBlack: [40, 49],
|
||||
bgRed: [41, 49],
|
||||
bgGreen: [42, 49],
|
||||
bgYellow: [43, 49],
|
||||
bgBlue: [44, 49],
|
||||
bgMagenta: [45, 49],
|
||||
bgCyan: [46, 49],
|
||||
bgWhite: [47, 49],
|
||||
blackBright: [90, 39],
|
||||
redBright: [91, 39],
|
||||
greenBright: [92, 39],
|
||||
yellowBright: [93, 39],
|
||||
blueBright: [94, 39],
|
||||
magentaBright: [95, 39],
|
||||
cyanBright: [96, 39],
|
||||
whiteBright: [97, 39],
|
||||
bgBlackBright: [100, 49],
|
||||
bgRedBright: [101, 49],
|
||||
bgGreenBright: [102, 49],
|
||||
bgYellowBright: [103, 49],
|
||||
bgBlueBright: [104, 49],
|
||||
bgMagentaBright: [105, 49],
|
||||
bgCyanBright: [106, 49],
|
||||
bgWhiteBright: [107, 49]
|
||||
}, h = Object.entries(f);
|
||||
function a(n) {
|
||||
return String(n);
|
||||
}
|
||||
a.open = "";
|
||||
a.close = "";
|
||||
function C(n = false) {
|
||||
let e = typeof process != "undefined" ? process : void 0, i = (e == null ? void 0 : e.env) || {}, g = (e == null ? void 0 : e.argv) || [];
|
||||
return !("NO_COLOR" in i || g.includes("--no-color")) && ("FORCE_COLOR" in i || g.includes("--color") || (e == null ? void 0 : e.platform) === "win32" || n && i.TERM !== "dumb" || "CI" in i) || typeof window != "undefined" && !!window.chrome;
|
||||
}
|
||||
function p(n = false) {
|
||||
let e = C(n), i = (r, t, c, o) => {
|
||||
let l = "", s = 0;
|
||||
do
|
||||
l += r.substring(s, o) + c, s = o + t.length, o = r.indexOf(t, s);
|
||||
while (~o);
|
||||
return l + r.substring(s);
|
||||
}, g = (r, t, c = r) => {
|
||||
let o = (l) => {
|
||||
let s = String(l), b = s.indexOf(t, r.length);
|
||||
return ~b ? r + i(s, t, c, b) + t : r + s + t;
|
||||
};
|
||||
return o.open = r, o.close = t, o;
|
||||
}, u = {
|
||||
isColorSupported: e
|
||||
}, d = (r) => `\x1B[${r}m`;
|
||||
for (let [r, t] of h)
|
||||
u[r] = e ? g(
|
||||
d(t[0]),
|
||||
d(t[1]),
|
||||
t[2]
|
||||
) : a;
|
||||
return u;
|
||||
}
|
||||
|
||||
var s = p();
|
||||
|
||||
export { s };
|
||||
249
GTA_P_V2/node_modules/vite-node/dist/chunk-hmr.cjs
generated
vendored
Normal file
249
GTA_P_V2/node_modules/vite-node/dist/chunk-hmr.cjs
generated
vendored
Normal file
@@ -0,0 +1,249 @@
|
||||
'use strict';
|
||||
|
||||
var node_events = require('node:events');
|
||||
var createDebug = require('debug');
|
||||
var browser = require('./chunk-browser.cjs');
|
||||
var utils = require('./utils.cjs');
|
||||
|
||||
function createHmrEmitter() {
|
||||
const emitter = new node_events.EventEmitter();
|
||||
return emitter;
|
||||
}
|
||||
function viteNodeHmrPlugin() {
|
||||
const emitter = createHmrEmitter();
|
||||
return {
|
||||
name: "vite-node:hmr",
|
||||
config() {
|
||||
// chokidar fsevents is unstable on macos when emitting "ready" event
|
||||
if (process.platform === "darwin" && false);
|
||||
},
|
||||
configureServer(server) {
|
||||
const _send = server.ws.send;
|
||||
server.emitter = emitter;
|
||||
server.ws.send = function(payload) {
|
||||
_send(payload);
|
||||
emitter.emit("message", payload);
|
||||
};
|
||||
// eslint-disable-next-line ts/ban-ts-comment
|
||||
// @ts-ignore Vite 6 compat
|
||||
const environments = server.environments;
|
||||
if (environments) environments.ssr.hot.send = function(payload) {
|
||||
_send(payload);
|
||||
emitter.emit("message", payload);
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const debugHmr = createDebug("vite-node:hmr");
|
||||
const cache = /* @__PURE__ */ new WeakMap();
|
||||
function getCache(runner) {
|
||||
if (!cache.has(runner)) cache.set(runner, {
|
||||
hotModulesMap: /* @__PURE__ */ new Map(),
|
||||
dataMap: /* @__PURE__ */ new Map(),
|
||||
disposeMap: /* @__PURE__ */ new Map(),
|
||||
pruneMap: /* @__PURE__ */ new Map(),
|
||||
customListenersMap: /* @__PURE__ */ new Map(),
|
||||
ctxToListenersMap: /* @__PURE__ */ new Map(),
|
||||
messageBuffer: [],
|
||||
isFirstUpdate: false,
|
||||
pending: false,
|
||||
queued: []
|
||||
});
|
||||
return cache.get(runner);
|
||||
}
|
||||
function sendMessageBuffer(runner, emitter) {
|
||||
const maps = getCache(runner);
|
||||
maps.messageBuffer.forEach((msg) => emitter.emit("custom", msg));
|
||||
maps.messageBuffer.length = 0;
|
||||
}
|
||||
async function reload(runner, files) {
|
||||
// invalidate module cache but not node_modules
|
||||
Array.from(runner.moduleCache.keys()).forEach((fsPath) => {
|
||||
if (!fsPath.includes("node_modules")) runner.moduleCache.delete(fsPath);
|
||||
});
|
||||
return Promise.all(files.map((file) => runner.executeId(file)));
|
||||
}
|
||||
async function notifyListeners(runner, event, data) {
|
||||
const maps = getCache(runner);
|
||||
const cbs = maps.customListenersMap.get(event);
|
||||
if (cbs) await Promise.all(cbs.map((cb) => cb(data)));
|
||||
}
|
||||
async function queueUpdate(runner, p) {
|
||||
const maps = getCache(runner);
|
||||
maps.queued.push(p);
|
||||
if (!maps.pending) {
|
||||
maps.pending = true;
|
||||
await Promise.resolve();
|
||||
maps.pending = false;
|
||||
const loading = [...maps.queued];
|
||||
maps.queued = [];
|
||||
(await Promise.all(loading)).forEach((fn) => fn && fn());
|
||||
}
|
||||
}
|
||||
async function fetchUpdate(runner, { path, acceptedPath }) {
|
||||
path = utils.normalizeRequestId(path);
|
||||
acceptedPath = utils.normalizeRequestId(acceptedPath);
|
||||
const maps = getCache(runner);
|
||||
const mod = maps.hotModulesMap.get(path);
|
||||
if (!mod)
|
||||
// In a code-splitting project,
|
||||
// it is common that the hot-updating module is not loaded yet.
|
||||
// https://github.com/vitejs/vite/issues/721
|
||||
return;
|
||||
const isSelfUpdate = path === acceptedPath;
|
||||
let fetchedModule;
|
||||
// determine the qualified callbacks before we re-import the modules
|
||||
const qualifiedCallbacks = mod.callbacks.filter(({ deps }) => deps.includes(acceptedPath));
|
||||
if (isSelfUpdate || qualifiedCallbacks.length > 0) {
|
||||
const disposer = maps.disposeMap.get(acceptedPath);
|
||||
if (disposer) await disposer(maps.dataMap.get(acceptedPath));
|
||||
try {
|
||||
[fetchedModule] = await reload(runner, [acceptedPath]);
|
||||
} catch (e) {
|
||||
warnFailedFetch(e, acceptedPath);
|
||||
}
|
||||
}
|
||||
return () => {
|
||||
for (const { deps, fn } of qualifiedCallbacks) fn(deps.map((dep) => dep === acceptedPath ? fetchedModule : void 0));
|
||||
const loggedPath = isSelfUpdate ? path : `${acceptedPath} via ${path}`;
|
||||
console.log(`${browser.s.cyan("[vite-node]")} hot updated: ${loggedPath}`);
|
||||
};
|
||||
}
|
||||
function warnFailedFetch(err, path) {
|
||||
if (!(err instanceof Error) || !err.message.match("fetch")) console.error(err);
|
||||
console.error(`[hmr] Failed to reload ${path}. This could be due to syntax errors or importing non-existent modules. (see errors above)`);
|
||||
}
|
||||
async function handleMessage(runner, emitter, files, payload) {
|
||||
const maps = getCache(runner);
|
||||
switch (payload.type) {
|
||||
case "connected":
|
||||
sendMessageBuffer(runner, emitter);
|
||||
break;
|
||||
case "update":
|
||||
await notifyListeners(runner, "vite:beforeUpdate", payload);
|
||||
await Promise.all(payload.updates.map((update) => {
|
||||
if (update.type === "js-update") return queueUpdate(runner, fetchUpdate(runner, update));
|
||||
// css-update
|
||||
console.error(`${browser.s.cyan("[vite-node]")} no support css hmr.}`);
|
||||
return null;
|
||||
}));
|
||||
await notifyListeners(runner, "vite:afterUpdate", payload);
|
||||
break;
|
||||
case "full-reload":
|
||||
await notifyListeners(runner, "vite:beforeFullReload", payload);
|
||||
maps.customListenersMap.delete("vite:beforeFullReload");
|
||||
await reload(runner, files);
|
||||
break;
|
||||
case "custom":
|
||||
await notifyListeners(runner, payload.event, payload.data);
|
||||
break;
|
||||
case "prune":
|
||||
await notifyListeners(runner, "vite:beforePrune", payload);
|
||||
payload.paths.forEach((path) => {
|
||||
const fn = maps.pruneMap.get(path);
|
||||
if (fn) fn(maps.dataMap.get(path));
|
||||
});
|
||||
break;
|
||||
case "error": {
|
||||
await notifyListeners(runner, "vite:error", payload);
|
||||
const err = payload.err;
|
||||
console.error(`${browser.s.cyan("[vite-node]")} Internal Server Error\n${err.message}\n${err.stack}`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
function createHotContext(runner, emitter, files, ownerPath) {
|
||||
debugHmr("createHotContext", ownerPath);
|
||||
const maps = getCache(runner);
|
||||
if (!maps.dataMap.has(ownerPath)) maps.dataMap.set(ownerPath, {});
|
||||
// when a file is hot updated, a new context is created
|
||||
// clear its stale callbacks
|
||||
const mod = maps.hotModulesMap.get(ownerPath);
|
||||
if (mod) mod.callbacks = [];
|
||||
const newListeners = /* @__PURE__ */ new Map();
|
||||
maps.ctxToListenersMap.set(ownerPath, newListeners);
|
||||
function acceptDeps(deps, callback = () => {}) {
|
||||
const mod = maps.hotModulesMap.get(ownerPath) || {
|
||||
id: ownerPath,
|
||||
callbacks: []
|
||||
};
|
||||
mod.callbacks.push({
|
||||
deps,
|
||||
fn: callback
|
||||
});
|
||||
maps.hotModulesMap.set(ownerPath, mod);
|
||||
}
|
||||
const hot = {
|
||||
get data() {
|
||||
return maps.dataMap.get(ownerPath);
|
||||
},
|
||||
acceptExports(_, callback) {
|
||||
acceptDeps([ownerPath], callback && (([mod]) => callback(mod)));
|
||||
},
|
||||
accept(deps, callback) {
|
||||
if (typeof deps === "function" || !deps)
|
||||
// self-accept: hot.accept(() => {})
|
||||
acceptDeps([ownerPath], ([mod]) => deps && deps(mod));
|
||||
else if (typeof deps === "string")
|
||||
// explicit deps
|
||||
acceptDeps([deps], ([mod]) => callback && callback(mod));
|
||||
else if (Array.isArray(deps)) acceptDeps(deps, callback);
|
||||
else throw new TypeError("invalid hot.accept() usage.");
|
||||
},
|
||||
dispose(cb) {
|
||||
maps.disposeMap.set(ownerPath, cb);
|
||||
},
|
||||
prune(cb) {
|
||||
maps.pruneMap.set(ownerPath, cb);
|
||||
},
|
||||
invalidate() {
|
||||
notifyListeners(runner, "vite:invalidate", {
|
||||
path: ownerPath,
|
||||
message: void 0,
|
||||
firstInvalidatedBy: ownerPath
|
||||
});
|
||||
return reload(runner, files);
|
||||
},
|
||||
on(event, cb) {
|
||||
const addToMap = (map) => {
|
||||
const existing = map.get(event) || [];
|
||||
existing.push(cb);
|
||||
map.set(event, existing);
|
||||
};
|
||||
addToMap(maps.customListenersMap);
|
||||
addToMap(newListeners);
|
||||
},
|
||||
off(event, cb) {
|
||||
const removeFromMap = (map) => {
|
||||
const existing = map.get(event);
|
||||
if (existing === void 0) return;
|
||||
const pruned = existing.filter((l) => l !== cb);
|
||||
if (pruned.length === 0) {
|
||||
map.delete(event);
|
||||
return;
|
||||
}
|
||||
map.set(event, pruned);
|
||||
};
|
||||
removeFromMap(maps.customListenersMap);
|
||||
removeFromMap(newListeners);
|
||||
},
|
||||
send(event, data) {
|
||||
maps.messageBuffer.push(JSON.stringify({
|
||||
type: "custom",
|
||||
event,
|
||||
data
|
||||
}));
|
||||
sendMessageBuffer(runner, emitter);
|
||||
}
|
||||
};
|
||||
return hot;
|
||||
}
|
||||
|
||||
exports.createHmrEmitter = createHmrEmitter;
|
||||
exports.createHotContext = createHotContext;
|
||||
exports.getCache = getCache;
|
||||
exports.handleMessage = handleMessage;
|
||||
exports.reload = reload;
|
||||
exports.sendMessageBuffer = sendMessageBuffer;
|
||||
exports.viteNodeHmrPlugin = viteNodeHmrPlugin;
|
||||
241
GTA_P_V2/node_modules/vite-node/dist/chunk-hmr.mjs
generated
vendored
Normal file
241
GTA_P_V2/node_modules/vite-node/dist/chunk-hmr.mjs
generated
vendored
Normal file
@@ -0,0 +1,241 @@
|
||||
import { EventEmitter } from 'node:events';
|
||||
import createDebug from 'debug';
|
||||
import { s } from './chunk-browser.mjs';
|
||||
import { normalizeRequestId } from './utils.mjs';
|
||||
|
||||
function createHmrEmitter() {
|
||||
const emitter = new EventEmitter();
|
||||
return emitter;
|
||||
}
|
||||
function viteNodeHmrPlugin() {
|
||||
const emitter = createHmrEmitter();
|
||||
return {
|
||||
name: "vite-node:hmr",
|
||||
config() {
|
||||
// chokidar fsevents is unstable on macos when emitting "ready" event
|
||||
if (process.platform === "darwin" && false);
|
||||
},
|
||||
configureServer(server) {
|
||||
const _send = server.ws.send;
|
||||
server.emitter = emitter;
|
||||
server.ws.send = function(payload) {
|
||||
_send(payload);
|
||||
emitter.emit("message", payload);
|
||||
};
|
||||
// eslint-disable-next-line ts/ban-ts-comment
|
||||
// @ts-ignore Vite 6 compat
|
||||
const environments = server.environments;
|
||||
if (environments) environments.ssr.hot.send = function(payload) {
|
||||
_send(payload);
|
||||
emitter.emit("message", payload);
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const debugHmr = createDebug("vite-node:hmr");
|
||||
const cache = /* @__PURE__ */ new WeakMap();
|
||||
function getCache(runner) {
|
||||
if (!cache.has(runner)) cache.set(runner, {
|
||||
hotModulesMap: /* @__PURE__ */ new Map(),
|
||||
dataMap: /* @__PURE__ */ new Map(),
|
||||
disposeMap: /* @__PURE__ */ new Map(),
|
||||
pruneMap: /* @__PURE__ */ new Map(),
|
||||
customListenersMap: /* @__PURE__ */ new Map(),
|
||||
ctxToListenersMap: /* @__PURE__ */ new Map(),
|
||||
messageBuffer: [],
|
||||
isFirstUpdate: false,
|
||||
pending: false,
|
||||
queued: []
|
||||
});
|
||||
return cache.get(runner);
|
||||
}
|
||||
function sendMessageBuffer(runner, emitter) {
|
||||
const maps = getCache(runner);
|
||||
maps.messageBuffer.forEach((msg) => emitter.emit("custom", msg));
|
||||
maps.messageBuffer.length = 0;
|
||||
}
|
||||
async function reload(runner, files) {
|
||||
// invalidate module cache but not node_modules
|
||||
Array.from(runner.moduleCache.keys()).forEach((fsPath) => {
|
||||
if (!fsPath.includes("node_modules")) runner.moduleCache.delete(fsPath);
|
||||
});
|
||||
return Promise.all(files.map((file) => runner.executeId(file)));
|
||||
}
|
||||
async function notifyListeners(runner, event, data) {
|
||||
const maps = getCache(runner);
|
||||
const cbs = maps.customListenersMap.get(event);
|
||||
if (cbs) await Promise.all(cbs.map((cb) => cb(data)));
|
||||
}
|
||||
async function queueUpdate(runner, p) {
|
||||
const maps = getCache(runner);
|
||||
maps.queued.push(p);
|
||||
if (!maps.pending) {
|
||||
maps.pending = true;
|
||||
await Promise.resolve();
|
||||
maps.pending = false;
|
||||
const loading = [...maps.queued];
|
||||
maps.queued = [];
|
||||
(await Promise.all(loading)).forEach((fn) => fn && fn());
|
||||
}
|
||||
}
|
||||
async function fetchUpdate(runner, { path, acceptedPath }) {
|
||||
path = normalizeRequestId(path);
|
||||
acceptedPath = normalizeRequestId(acceptedPath);
|
||||
const maps = getCache(runner);
|
||||
const mod = maps.hotModulesMap.get(path);
|
||||
if (!mod)
|
||||
// In a code-splitting project,
|
||||
// it is common that the hot-updating module is not loaded yet.
|
||||
// https://github.com/vitejs/vite/issues/721
|
||||
return;
|
||||
const isSelfUpdate = path === acceptedPath;
|
||||
let fetchedModule;
|
||||
// determine the qualified callbacks before we re-import the modules
|
||||
const qualifiedCallbacks = mod.callbacks.filter(({ deps }) => deps.includes(acceptedPath));
|
||||
if (isSelfUpdate || qualifiedCallbacks.length > 0) {
|
||||
const disposer = maps.disposeMap.get(acceptedPath);
|
||||
if (disposer) await disposer(maps.dataMap.get(acceptedPath));
|
||||
try {
|
||||
[fetchedModule] = await reload(runner, [acceptedPath]);
|
||||
} catch (e) {
|
||||
warnFailedFetch(e, acceptedPath);
|
||||
}
|
||||
}
|
||||
return () => {
|
||||
for (const { deps, fn } of qualifiedCallbacks) fn(deps.map((dep) => dep === acceptedPath ? fetchedModule : void 0));
|
||||
const loggedPath = isSelfUpdate ? path : `${acceptedPath} via ${path}`;
|
||||
console.log(`${s.cyan("[vite-node]")} hot updated: ${loggedPath}`);
|
||||
};
|
||||
}
|
||||
function warnFailedFetch(err, path) {
|
||||
if (!(err instanceof Error) || !err.message.match("fetch")) console.error(err);
|
||||
console.error(`[hmr] Failed to reload ${path}. This could be due to syntax errors or importing non-existent modules. (see errors above)`);
|
||||
}
|
||||
async function handleMessage(runner, emitter, files, payload) {
|
||||
const maps = getCache(runner);
|
||||
switch (payload.type) {
|
||||
case "connected":
|
||||
sendMessageBuffer(runner, emitter);
|
||||
break;
|
||||
case "update":
|
||||
await notifyListeners(runner, "vite:beforeUpdate", payload);
|
||||
await Promise.all(payload.updates.map((update) => {
|
||||
if (update.type === "js-update") return queueUpdate(runner, fetchUpdate(runner, update));
|
||||
// css-update
|
||||
console.error(`${s.cyan("[vite-node]")} no support css hmr.}`);
|
||||
return null;
|
||||
}));
|
||||
await notifyListeners(runner, "vite:afterUpdate", payload);
|
||||
break;
|
||||
case "full-reload":
|
||||
await notifyListeners(runner, "vite:beforeFullReload", payload);
|
||||
maps.customListenersMap.delete("vite:beforeFullReload");
|
||||
await reload(runner, files);
|
||||
break;
|
||||
case "custom":
|
||||
await notifyListeners(runner, payload.event, payload.data);
|
||||
break;
|
||||
case "prune":
|
||||
await notifyListeners(runner, "vite:beforePrune", payload);
|
||||
payload.paths.forEach((path) => {
|
||||
const fn = maps.pruneMap.get(path);
|
||||
if (fn) fn(maps.dataMap.get(path));
|
||||
});
|
||||
break;
|
||||
case "error": {
|
||||
await notifyListeners(runner, "vite:error", payload);
|
||||
const err = payload.err;
|
||||
console.error(`${s.cyan("[vite-node]")} Internal Server Error\n${err.message}\n${err.stack}`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
function createHotContext(runner, emitter, files, ownerPath) {
|
||||
debugHmr("createHotContext", ownerPath);
|
||||
const maps = getCache(runner);
|
||||
if (!maps.dataMap.has(ownerPath)) maps.dataMap.set(ownerPath, {});
|
||||
// when a file is hot updated, a new context is created
|
||||
// clear its stale callbacks
|
||||
const mod = maps.hotModulesMap.get(ownerPath);
|
||||
if (mod) mod.callbacks = [];
|
||||
const newListeners = /* @__PURE__ */ new Map();
|
||||
maps.ctxToListenersMap.set(ownerPath, newListeners);
|
||||
function acceptDeps(deps, callback = () => {}) {
|
||||
const mod = maps.hotModulesMap.get(ownerPath) || {
|
||||
id: ownerPath,
|
||||
callbacks: []
|
||||
};
|
||||
mod.callbacks.push({
|
||||
deps,
|
||||
fn: callback
|
||||
});
|
||||
maps.hotModulesMap.set(ownerPath, mod);
|
||||
}
|
||||
const hot = {
|
||||
get data() {
|
||||
return maps.dataMap.get(ownerPath);
|
||||
},
|
||||
acceptExports(_, callback) {
|
||||
acceptDeps([ownerPath], callback && (([mod]) => callback(mod)));
|
||||
},
|
||||
accept(deps, callback) {
|
||||
if (typeof deps === "function" || !deps)
|
||||
// self-accept: hot.accept(() => {})
|
||||
acceptDeps([ownerPath], ([mod]) => deps && deps(mod));
|
||||
else if (typeof deps === "string")
|
||||
// explicit deps
|
||||
acceptDeps([deps], ([mod]) => callback && callback(mod));
|
||||
else if (Array.isArray(deps)) acceptDeps(deps, callback);
|
||||
else throw new TypeError("invalid hot.accept() usage.");
|
||||
},
|
||||
dispose(cb) {
|
||||
maps.disposeMap.set(ownerPath, cb);
|
||||
},
|
||||
prune(cb) {
|
||||
maps.pruneMap.set(ownerPath, cb);
|
||||
},
|
||||
invalidate() {
|
||||
notifyListeners(runner, "vite:invalidate", {
|
||||
path: ownerPath,
|
||||
message: void 0,
|
||||
firstInvalidatedBy: ownerPath
|
||||
});
|
||||
return reload(runner, files);
|
||||
},
|
||||
on(event, cb) {
|
||||
const addToMap = (map) => {
|
||||
const existing = map.get(event) || [];
|
||||
existing.push(cb);
|
||||
map.set(event, existing);
|
||||
};
|
||||
addToMap(maps.customListenersMap);
|
||||
addToMap(newListeners);
|
||||
},
|
||||
off(event, cb) {
|
||||
const removeFromMap = (map) => {
|
||||
const existing = map.get(event);
|
||||
if (existing === void 0) return;
|
||||
const pruned = existing.filter((l) => l !== cb);
|
||||
if (pruned.length === 0) {
|
||||
map.delete(event);
|
||||
return;
|
||||
}
|
||||
map.set(event, pruned);
|
||||
};
|
||||
removeFromMap(maps.customListenersMap);
|
||||
removeFromMap(newListeners);
|
||||
},
|
||||
send(event, data) {
|
||||
maps.messageBuffer.push(JSON.stringify({
|
||||
type: "custom",
|
||||
event,
|
||||
data
|
||||
}));
|
||||
sendMessageBuffer(runner, emitter);
|
||||
}
|
||||
};
|
||||
return hot;
|
||||
}
|
||||
|
||||
export { createHotContext as a, createHmrEmitter as c, getCache as g, handleMessage as h, reload as r, sendMessageBuffer as s, viteNodeHmrPlugin as v };
|
||||
135
GTA_P_V2/node_modules/vite-node/dist/cli.cjs
generated
vendored
Normal file
135
GTA_P_V2/node_modules/vite-node/dist/cli.cjs
generated
vendored
Normal file
@@ -0,0 +1,135 @@
|
||||
'use strict';
|
||||
|
||||
var path = require('node:path');
|
||||
var cac = require('cac');
|
||||
var browser = require('./chunk-browser.cjs');
|
||||
var vite = require('vite');
|
||||
var client = require('./client.cjs');
|
||||
var hmr = require('./chunk-hmr.cjs');
|
||||
var server = require('./server.cjs');
|
||||
var sourceMap = require('./source-map.cjs');
|
||||
var utils = require('./utils.cjs');
|
||||
require('node:module');
|
||||
require('node:url');
|
||||
require('node:vm');
|
||||
require('debug');
|
||||
require('pathe');
|
||||
require('node:fs');
|
||||
require('node:events');
|
||||
require('node:assert');
|
||||
require('node:perf_hooks');
|
||||
require('es-module-lexer');
|
||||
require('./constants.cjs');
|
||||
|
||||
var version = "3.2.4";
|
||||
|
||||
const cli = cac("vite-node");
|
||||
cli.option("-r, --root <path>", "Use specified root directory").option("-c, --config <path>", "Use specified config file").option("-m, --mode <mode>", "Set env mode").option("-w, --watch", "Restart on file changes, similar to \"nodemon\"").option("--script", "Use vite-node as a script runner").option("--options <options>", "Use specified Vite server options").option("-v, --version", "Output the version number").option("-h, --help", "Display help for command");
|
||||
cli.command("[...files]").allowUnknownOptions().action(run);
|
||||
cli.parse(process.argv, { run: false });
|
||||
if (cli.args.length === 0) cli.runMatchedCommand();
|
||||
else {
|
||||
const i = cli.rawArgs.indexOf(cli.args[0]) + 1;
|
||||
const scriptArgs = cli.rawArgs.slice(i).filter((it) => it !== "--");
|
||||
const executeArgs = [
|
||||
...cli.rawArgs.slice(0, i),
|
||||
"--",
|
||||
...scriptArgs
|
||||
];
|
||||
cli.parse(executeArgs);
|
||||
}
|
||||
async function run(files, options = {}) {
|
||||
var _server$emitter;
|
||||
if (options.script) {
|
||||
files = [files[0]];
|
||||
options = {};
|
||||
process.argv = [
|
||||
process.argv[0],
|
||||
path.resolve(files[0]),
|
||||
...process.argv.slice(2).filter((arg) => arg !== "--script" && arg !== files[0])
|
||||
];
|
||||
} else process.argv = [...process.argv.slice(0, 2), ...options["--"] || []];
|
||||
if (options.version) {
|
||||
cli.version(version);
|
||||
cli.outputVersion();
|
||||
process.exit(0);
|
||||
}
|
||||
if (options.help) {
|
||||
cli.version(version).outputHelp();
|
||||
process.exit(0);
|
||||
}
|
||||
if (!files.length) {
|
||||
console.error(browser.s.red("No files specified."));
|
||||
cli.version(version).outputHelp();
|
||||
process.exit(1);
|
||||
}
|
||||
const serverOptions = options.options ? parseServerOptions(options.options) : {};
|
||||
const server$1 = await vite.createServer({
|
||||
logLevel: "error",
|
||||
configFile: options.config,
|
||||
root: options.root,
|
||||
mode: options.mode,
|
||||
server: {
|
||||
hmr: !!options.watch,
|
||||
watch: options.watch ? void 0 : null
|
||||
},
|
||||
plugins: [options.watch && hmr.viteNodeHmrPlugin()]
|
||||
});
|
||||
if (Number(vite.version.split(".")[0]) < 6) await server$1.pluginContainer.buildStart({});
|
||||
else
|
||||
// directly access client plugin container until https://github.com/vitejs/vite/issues/19607
|
||||
await server$1.environments.client.pluginContainer.buildStart({});
|
||||
const env = vite.loadEnv(server$1.config.mode, server$1.config.envDir, "");
|
||||
for (const key in env) {
|
||||
var _process$env;
|
||||
(_process$env = process.env)[key] ?? (_process$env[key] = env[key]);
|
||||
}
|
||||
const node = new server.ViteNodeServer(server$1, serverOptions);
|
||||
sourceMap.installSourcemapsSupport({ getSourceMap: (source) => node.getSourceMap(source) });
|
||||
const runner = new client.ViteNodeRunner({
|
||||
root: server$1.config.root,
|
||||
base: server$1.config.base,
|
||||
fetchModule(id) {
|
||||
return node.fetchModule(id);
|
||||
},
|
||||
resolveId(id, importer) {
|
||||
return node.resolveId(id, importer);
|
||||
},
|
||||
createHotContext(runner, url) {
|
||||
return hmr.createHotContext(runner, server$1.emitter, files, url);
|
||||
}
|
||||
});
|
||||
// provide the vite define variable in this context
|
||||
await runner.executeId("/@vite/env");
|
||||
for (const file of files) await runner.executeFile(file);
|
||||
if (!options.watch) await server$1.close();
|
||||
(_server$emitter = server$1.emitter) === null || _server$emitter === void 0 || _server$emitter.on("message", (payload) => {
|
||||
hmr.handleMessage(runner, server$1.emitter, files, payload);
|
||||
});
|
||||
if (options.watch) process.on("uncaughtException", (err) => {
|
||||
console.error(browser.s.red("[vite-node] Failed to execute file: \n"), err);
|
||||
});
|
||||
}
|
||||
function parseServerOptions(serverOptions) {
|
||||
var _serverOptions$deps, _serverOptions$deps2, _serverOptions$deps3, _serverOptions$deps4, _serverOptions$deps5, _serverOptions$deps6, _serverOptions$transf, _serverOptions$transf2;
|
||||
const inlineOptions = ((_serverOptions$deps = serverOptions.deps) === null || _serverOptions$deps === void 0 ? void 0 : _serverOptions$deps.inline) === true ? true : utils.toArray((_serverOptions$deps2 = serverOptions.deps) === null || _serverOptions$deps2 === void 0 ? void 0 : _serverOptions$deps2.inline);
|
||||
return {
|
||||
...serverOptions,
|
||||
deps: {
|
||||
...serverOptions.deps,
|
||||
inlineFiles: utils.toArray((_serverOptions$deps3 = serverOptions.deps) === null || _serverOptions$deps3 === void 0 ? void 0 : _serverOptions$deps3.inlineFiles),
|
||||
inline: inlineOptions !== true ? inlineOptions.map((dep) => {
|
||||
return dep.startsWith("/") && dep.endsWith("/") ? new RegExp(dep) : dep;
|
||||
}) : true,
|
||||
external: utils.toArray((_serverOptions$deps4 = serverOptions.deps) === null || _serverOptions$deps4 === void 0 ? void 0 : _serverOptions$deps4.external).map((dep) => {
|
||||
return dep.startsWith("/") && dep.endsWith("/") ? new RegExp(dep) : dep;
|
||||
}),
|
||||
moduleDirectories: ((_serverOptions$deps5 = serverOptions.deps) === null || _serverOptions$deps5 === void 0 ? void 0 : _serverOptions$deps5.moduleDirectories) ? utils.toArray((_serverOptions$deps6 = serverOptions.deps) === null || _serverOptions$deps6 === void 0 ? void 0 : _serverOptions$deps6.moduleDirectories) : void 0
|
||||
},
|
||||
transformMode: {
|
||||
...serverOptions.transformMode,
|
||||
ssr: utils.toArray((_serverOptions$transf = serverOptions.transformMode) === null || _serverOptions$transf === void 0 ? void 0 : _serverOptions$transf.ssr).map((dep) => new RegExp(dep)),
|
||||
web: utils.toArray((_serverOptions$transf2 = serverOptions.transformMode) === null || _serverOptions$transf2 === void 0 ? void 0 : _serverOptions$transf2.web).map((dep) => new RegExp(dep))
|
||||
}
|
||||
};
|
||||
}
|
||||
19
GTA_P_V2/node_modules/vite-node/dist/cli.d.ts
generated
vendored
Normal file
19
GTA_P_V2/node_modules/vite-node/dist/cli.d.ts
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
import { V as ViteNodeServerOptions } from './index.d-DGmxD2U7.js';
|
||||
import './trace-mapping.d-DLVdEqOp.js';
|
||||
|
||||
interface CliOptions {
|
||||
"root"?: string;
|
||||
"script"?: boolean;
|
||||
"config"?: string;
|
||||
"mode"?: string;
|
||||
"watch"?: boolean;
|
||||
"options"?: ViteNodeServerOptionsCLI;
|
||||
"version"?: boolean;
|
||||
"help"?: boolean;
|
||||
"--"?: string[];
|
||||
}
|
||||
type Optional<T> = T | undefined;
|
||||
type ComputeViteNodeServerOptionsCLI<T extends Record<string, any>> = { [K in keyof T] : T[K] extends Optional<RegExp[]> ? string | string[] : T[K] extends Optional<(string | RegExp)[]> ? string | string[] : T[K] extends Optional<(string | RegExp)[] | true> ? string | string[] | true : T[K] extends Optional<Record<string, any>> ? ComputeViteNodeServerOptionsCLI<T[K]> : T[K] };
|
||||
type ViteNodeServerOptionsCLI = ComputeViteNodeServerOptionsCLI<ViteNodeServerOptions>;
|
||||
|
||||
export type { CliOptions, ViteNodeServerOptionsCLI };
|
||||
133
GTA_P_V2/node_modules/vite-node/dist/cli.mjs
generated
vendored
Normal file
133
GTA_P_V2/node_modules/vite-node/dist/cli.mjs
generated
vendored
Normal file
@@ -0,0 +1,133 @@
|
||||
import { resolve } from 'node:path';
|
||||
import cac from 'cac';
|
||||
import { s } from './chunk-browser.mjs';
|
||||
import { createServer, version as version$1, loadEnv } from 'vite';
|
||||
import { ViteNodeRunner } from './client.mjs';
|
||||
import { v as viteNodeHmrPlugin, a as createHotContext, h as handleMessage } from './chunk-hmr.mjs';
|
||||
import { ViteNodeServer } from './server.mjs';
|
||||
import { installSourcemapsSupport } from './source-map.mjs';
|
||||
import { toArray } from './utils.mjs';
|
||||
import 'node:module';
|
||||
import 'node:url';
|
||||
import 'node:vm';
|
||||
import 'debug';
|
||||
import 'pathe';
|
||||
import 'node:fs';
|
||||
import 'node:events';
|
||||
import 'node:assert';
|
||||
import 'node:perf_hooks';
|
||||
import 'es-module-lexer';
|
||||
import './constants.mjs';
|
||||
|
||||
var version = "3.2.4";
|
||||
|
||||
const cli = cac("vite-node");
|
||||
cli.option("-r, --root <path>", "Use specified root directory").option("-c, --config <path>", "Use specified config file").option("-m, --mode <mode>", "Set env mode").option("-w, --watch", "Restart on file changes, similar to \"nodemon\"").option("--script", "Use vite-node as a script runner").option("--options <options>", "Use specified Vite server options").option("-v, --version", "Output the version number").option("-h, --help", "Display help for command");
|
||||
cli.command("[...files]").allowUnknownOptions().action(run);
|
||||
cli.parse(process.argv, { run: false });
|
||||
if (cli.args.length === 0) cli.runMatchedCommand();
|
||||
else {
|
||||
const i = cli.rawArgs.indexOf(cli.args[0]) + 1;
|
||||
const scriptArgs = cli.rawArgs.slice(i).filter((it) => it !== "--");
|
||||
const executeArgs = [
|
||||
...cli.rawArgs.slice(0, i),
|
||||
"--",
|
||||
...scriptArgs
|
||||
];
|
||||
cli.parse(executeArgs);
|
||||
}
|
||||
async function run(files, options = {}) {
|
||||
var _server$emitter;
|
||||
if (options.script) {
|
||||
files = [files[0]];
|
||||
options = {};
|
||||
process.argv = [
|
||||
process.argv[0],
|
||||
resolve(files[0]),
|
||||
...process.argv.slice(2).filter((arg) => arg !== "--script" && arg !== files[0])
|
||||
];
|
||||
} else process.argv = [...process.argv.slice(0, 2), ...options["--"] || []];
|
||||
if (options.version) {
|
||||
cli.version(version);
|
||||
cli.outputVersion();
|
||||
process.exit(0);
|
||||
}
|
||||
if (options.help) {
|
||||
cli.version(version).outputHelp();
|
||||
process.exit(0);
|
||||
}
|
||||
if (!files.length) {
|
||||
console.error(s.red("No files specified."));
|
||||
cli.version(version).outputHelp();
|
||||
process.exit(1);
|
||||
}
|
||||
const serverOptions = options.options ? parseServerOptions(options.options) : {};
|
||||
const server = await createServer({
|
||||
logLevel: "error",
|
||||
configFile: options.config,
|
||||
root: options.root,
|
||||
mode: options.mode,
|
||||
server: {
|
||||
hmr: !!options.watch,
|
||||
watch: options.watch ? void 0 : null
|
||||
},
|
||||
plugins: [options.watch && viteNodeHmrPlugin()]
|
||||
});
|
||||
if (Number(version$1.split(".")[0]) < 6) await server.pluginContainer.buildStart({});
|
||||
else
|
||||
// directly access client plugin container until https://github.com/vitejs/vite/issues/19607
|
||||
await server.environments.client.pluginContainer.buildStart({});
|
||||
const env = loadEnv(server.config.mode, server.config.envDir, "");
|
||||
for (const key in env) {
|
||||
var _process$env;
|
||||
(_process$env = process.env)[key] ?? (_process$env[key] = env[key]);
|
||||
}
|
||||
const node = new ViteNodeServer(server, serverOptions);
|
||||
installSourcemapsSupport({ getSourceMap: (source) => node.getSourceMap(source) });
|
||||
const runner = new ViteNodeRunner({
|
||||
root: server.config.root,
|
||||
base: server.config.base,
|
||||
fetchModule(id) {
|
||||
return node.fetchModule(id);
|
||||
},
|
||||
resolveId(id, importer) {
|
||||
return node.resolveId(id, importer);
|
||||
},
|
||||
createHotContext(runner, url) {
|
||||
return createHotContext(runner, server.emitter, files, url);
|
||||
}
|
||||
});
|
||||
// provide the vite define variable in this context
|
||||
await runner.executeId("/@vite/env");
|
||||
for (const file of files) await runner.executeFile(file);
|
||||
if (!options.watch) await server.close();
|
||||
(_server$emitter = server.emitter) === null || _server$emitter === void 0 || _server$emitter.on("message", (payload) => {
|
||||
handleMessage(runner, server.emitter, files, payload);
|
||||
});
|
||||
if (options.watch) process.on("uncaughtException", (err) => {
|
||||
console.error(s.red("[vite-node] Failed to execute file: \n"), err);
|
||||
});
|
||||
}
|
||||
function parseServerOptions(serverOptions) {
|
||||
var _serverOptions$deps, _serverOptions$deps2, _serverOptions$deps3, _serverOptions$deps4, _serverOptions$deps5, _serverOptions$deps6, _serverOptions$transf, _serverOptions$transf2;
|
||||
const inlineOptions = ((_serverOptions$deps = serverOptions.deps) === null || _serverOptions$deps === void 0 ? void 0 : _serverOptions$deps.inline) === true ? true : toArray((_serverOptions$deps2 = serverOptions.deps) === null || _serverOptions$deps2 === void 0 ? void 0 : _serverOptions$deps2.inline);
|
||||
return {
|
||||
...serverOptions,
|
||||
deps: {
|
||||
...serverOptions.deps,
|
||||
inlineFiles: toArray((_serverOptions$deps3 = serverOptions.deps) === null || _serverOptions$deps3 === void 0 ? void 0 : _serverOptions$deps3.inlineFiles),
|
||||
inline: inlineOptions !== true ? inlineOptions.map((dep) => {
|
||||
return dep.startsWith("/") && dep.endsWith("/") ? new RegExp(dep) : dep;
|
||||
}) : true,
|
||||
external: toArray((_serverOptions$deps4 = serverOptions.deps) === null || _serverOptions$deps4 === void 0 ? void 0 : _serverOptions$deps4.external).map((dep) => {
|
||||
return dep.startsWith("/") && dep.endsWith("/") ? new RegExp(dep) : dep;
|
||||
}),
|
||||
moduleDirectories: ((_serverOptions$deps5 = serverOptions.deps) === null || _serverOptions$deps5 === void 0 ? void 0 : _serverOptions$deps5.moduleDirectories) ? toArray((_serverOptions$deps6 = serverOptions.deps) === null || _serverOptions$deps6 === void 0 ? void 0 : _serverOptions$deps6.moduleDirectories) : void 0
|
||||
},
|
||||
transformMode: {
|
||||
...serverOptions.transformMode,
|
||||
ssr: toArray((_serverOptions$transf = serverOptions.transformMode) === null || _serverOptions$transf === void 0 ? void 0 : _serverOptions$transf.ssr).map((dep) => new RegExp(dep)),
|
||||
web: toArray((_serverOptions$transf2 = serverOptions.transformMode) === null || _serverOptions$transf2 === void 0 ? void 0 : _serverOptions$transf2.web).map((dep) => new RegExp(dep))
|
||||
}
|
||||
};
|
||||
}
|
||||
511
GTA_P_V2/node_modules/vite-node/dist/client.cjs
generated
vendored
Normal file
511
GTA_P_V2/node_modules/vite-node/dist/client.cjs
generated
vendored
Normal file
@@ -0,0 +1,511 @@
|
||||
'use strict';
|
||||
|
||||
var node_module = require('node:module');
|
||||
var path = require('node:path');
|
||||
var node_url = require('node:url');
|
||||
var vm = require('node:vm');
|
||||
var createDebug = require('debug');
|
||||
var sourceMap = require('./source-map.cjs');
|
||||
var utils = require('./utils.cjs');
|
||||
require('pathe');
|
||||
require('node:fs');
|
||||
|
||||
const { setTimeout, clearTimeout } = globalThis;
|
||||
const debugExecute = createDebug("vite-node:client:execute");
|
||||
const debugNative = createDebug("vite-node:client:native");
|
||||
const clientStub = {
|
||||
injectQuery: (id) => id,
|
||||
createHotContext: () => {
|
||||
return {
|
||||
accept: () => {},
|
||||
prune: () => {},
|
||||
dispose: () => {},
|
||||
decline: () => {},
|
||||
invalidate: () => {},
|
||||
on: () => {},
|
||||
send: () => {}
|
||||
};
|
||||
},
|
||||
updateStyle: () => {},
|
||||
removeStyle: () => {}
|
||||
};
|
||||
const env = utils.createImportMetaEnvProxy();
|
||||
const DEFAULT_REQUEST_STUBS = {
|
||||
"/@vite/client": clientStub,
|
||||
"@vite/client": clientStub
|
||||
};
|
||||
class ModuleCacheMap extends Map {
|
||||
normalizePath(fsPath) {
|
||||
return utils.normalizeModuleId(fsPath);
|
||||
}
|
||||
/**
|
||||
* Assign partial data to the map
|
||||
*/
|
||||
update(fsPath, mod) {
|
||||
fsPath = this.normalizePath(fsPath);
|
||||
if (!super.has(fsPath)) this.setByModuleId(fsPath, mod);
|
||||
else Object.assign(super.get(fsPath), mod);
|
||||
return this;
|
||||
}
|
||||
setByModuleId(modulePath, mod) {
|
||||
return super.set(modulePath, mod);
|
||||
}
|
||||
set(fsPath, mod) {
|
||||
return this.setByModuleId(this.normalizePath(fsPath), mod);
|
||||
}
|
||||
getByModuleId(modulePath) {
|
||||
if (!super.has(modulePath)) this.setByModuleId(modulePath, {});
|
||||
const mod = super.get(modulePath);
|
||||
if (!mod.imports) Object.assign(mod, {
|
||||
imports: /* @__PURE__ */ new Set(),
|
||||
importers: /* @__PURE__ */ new Set()
|
||||
});
|
||||
return mod;
|
||||
}
|
||||
get(fsPath) {
|
||||
return this.getByModuleId(this.normalizePath(fsPath));
|
||||
}
|
||||
deleteByModuleId(modulePath) {
|
||||
return super.delete(modulePath);
|
||||
}
|
||||
delete(fsPath) {
|
||||
return this.deleteByModuleId(this.normalizePath(fsPath));
|
||||
}
|
||||
invalidateModule(mod) {
|
||||
var _mod$importers, _mod$imports;
|
||||
delete mod.evaluated;
|
||||
delete mod.resolving;
|
||||
delete mod.promise;
|
||||
delete mod.exports;
|
||||
(_mod$importers = mod.importers) === null || _mod$importers === void 0 || _mod$importers.clear();
|
||||
(_mod$imports = mod.imports) === null || _mod$imports === void 0 || _mod$imports.clear();
|
||||
return true;
|
||||
}
|
||||
/**
|
||||
* Invalidate modules that dependent on the given modules, up to the main entry
|
||||
*/
|
||||
invalidateDepTree(ids, invalidated = /* @__PURE__ */ new Set()) {
|
||||
for (const _id of ids) {
|
||||
const id = this.normalizePath(_id);
|
||||
if (invalidated.has(id)) continue;
|
||||
invalidated.add(id);
|
||||
const mod = super.get(id);
|
||||
if (mod === null || mod === void 0 ? void 0 : mod.importers) this.invalidateDepTree(mod.importers, invalidated);
|
||||
super.delete(id);
|
||||
}
|
||||
return invalidated;
|
||||
}
|
||||
/**
|
||||
* Invalidate dependency modules of the given modules, down to the bottom-level dependencies
|
||||
*/
|
||||
invalidateSubDepTree(ids, invalidated = /* @__PURE__ */ new Set()) {
|
||||
for (const _id of ids) {
|
||||
const id = this.normalizePath(_id);
|
||||
if (invalidated.has(id)) continue;
|
||||
invalidated.add(id);
|
||||
const subIds = Array.from(super.entries()).filter(([, mod]) => {
|
||||
var _mod$importers2;
|
||||
return (_mod$importers2 = mod.importers) === null || _mod$importers2 === void 0 ? void 0 : _mod$importers2.has(id);
|
||||
}).map(([key]) => key);
|
||||
if (subIds.length) this.invalidateSubDepTree(subIds, invalidated);
|
||||
super.delete(id);
|
||||
}
|
||||
return invalidated;
|
||||
}
|
||||
/**
|
||||
* Return parsed source map based on inlined source map of the module
|
||||
*/
|
||||
getSourceMap(id) {
|
||||
const cache = this.get(id);
|
||||
if (cache.map) return cache.map;
|
||||
const map = cache.code && sourceMap.extractSourceMap(cache.code);
|
||||
if (map) {
|
||||
cache.map = map;
|
||||
return map;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
class ViteNodeRunner {
|
||||
root;
|
||||
debug;
|
||||
/**
|
||||
* Holds the cache of modules
|
||||
* Keys of the map are filepaths, or plain package names
|
||||
*/
|
||||
moduleCache;
|
||||
/**
|
||||
* Tracks the stack of modules being executed for the purpose of calculating import self-time.
|
||||
*
|
||||
* Note that while in most cases, imports are a linear stack of modules,
|
||||
* this is occasionally not the case, for example when you have parallel top-level dynamic imports like so:
|
||||
*
|
||||
* ```ts
|
||||
* await Promise.all([
|
||||
* import('./module1'),
|
||||
* import('./module2'),
|
||||
* ]);
|
||||
* ```
|
||||
*
|
||||
* In this case, the self time will be reported incorrectly for one of the modules (could go negative).
|
||||
* As top-level awaits with dynamic imports like this are uncommon, we don't handle this case specifically.
|
||||
*/
|
||||
executionStack = [];
|
||||
// `performance` can be mocked, so make sure we're using the original function
|
||||
performanceNow = performance.now.bind(performance);
|
||||
constructor(options) {
|
||||
this.options = options;
|
||||
this.root = options.root ?? process.cwd();
|
||||
this.moduleCache = options.moduleCache ?? new ModuleCacheMap();
|
||||
this.debug = options.debug ?? (typeof process !== "undefined" ? !!process.env.VITE_NODE_DEBUG_RUNNER : false);
|
||||
}
|
||||
async executeFile(file) {
|
||||
const url = `/@fs/${utils.slash(path.resolve(file))}`;
|
||||
return await this.cachedRequest(url, url, []);
|
||||
}
|
||||
async executeId(rawId) {
|
||||
const [id, url] = await this.resolveUrl(rawId);
|
||||
return await this.cachedRequest(id, url, []);
|
||||
}
|
||||
/** @internal */
|
||||
async cachedRequest(id, fsPath, callstack) {
|
||||
const importee = callstack[callstack.length - 1];
|
||||
const mod = this.moduleCache.get(fsPath);
|
||||
const { imports, importers } = mod;
|
||||
if (importee) importers.add(importee);
|
||||
const getStack = () => `stack:\n${[...callstack, fsPath].reverse().map((p) => ` - ${p}`).join("\n")}`;
|
||||
// check circular dependency
|
||||
if (callstack.includes(fsPath) || Array.from(imports.values()).some((i) => importers.has(i))) {
|
||||
if (mod.exports) return mod.exports;
|
||||
}
|
||||
let debugTimer;
|
||||
if (this.debug) debugTimer = setTimeout(() => console.warn(`[vite-node] module ${fsPath} takes over 2s to load.\n${getStack()}`), 2e3);
|
||||
try {
|
||||
// cached module
|
||||
if (mod.promise) return await mod.promise;
|
||||
const promise = this.directRequest(id, fsPath, callstack);
|
||||
Object.assign(mod, {
|
||||
promise,
|
||||
evaluated: false
|
||||
});
|
||||
return await promise;
|
||||
} finally {
|
||||
mod.evaluated = true;
|
||||
if (debugTimer) clearTimeout(debugTimer);
|
||||
}
|
||||
}
|
||||
shouldResolveId(id, _importee) {
|
||||
return !utils.isInternalRequest(id) && !utils.isNodeBuiltin(id) && !id.startsWith("data:");
|
||||
}
|
||||
async _resolveUrl(id, importer) {
|
||||
var _resolved$meta;
|
||||
const dep = utils.normalizeRequestId(id, this.options.base);
|
||||
if (!this.shouldResolveId(dep)) return [dep, dep];
|
||||
const { path, exists } = utils.toFilePath(dep, this.root);
|
||||
if (!this.options.resolveId || exists) return [dep, path];
|
||||
const resolved = await this.options.resolveId(dep, importer);
|
||||
// supported since Vite 5-beta.19
|
||||
if (resolved === null || resolved === void 0 || (_resolved$meta = resolved.meta) === null || _resolved$meta === void 0 || (_resolved$meta = _resolved$meta["vite:alias"]) === null || _resolved$meta === void 0 ? void 0 : _resolved$meta.noResolved) {
|
||||
const error = new Error(`Cannot find module '${id}'${importer ? ` imported from '${importer}'` : ""}.
|
||||
|
||||
- If you rely on tsconfig.json's "paths" to resolve modules, please install "vite-tsconfig-paths" plugin to handle module resolution.
|
||||
- Make sure you don't have relative aliases in your Vitest config. Use absolute paths instead. Read more: https://vitest.dev/guide/common-errors`);
|
||||
Object.defineProperty(error, "code", {
|
||||
value: "ERR_MODULE_NOT_FOUND",
|
||||
enumerable: true
|
||||
});
|
||||
Object.defineProperty(error, Symbol.for("vitest.error.not_found.data"), {
|
||||
value: {
|
||||
id: dep,
|
||||
importer
|
||||
},
|
||||
enumerable: false
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
const resolvedId = resolved ? utils.normalizeRequestId(resolved.id, this.options.base) : dep;
|
||||
return [resolvedId, resolvedId];
|
||||
}
|
||||
async resolveUrl(id, importee) {
|
||||
const resolveKey = `resolve:${id}`;
|
||||
// put info about new import as soon as possible, so we can start tracking it
|
||||
this.moduleCache.setByModuleId(resolveKey, { resolving: true });
|
||||
try {
|
||||
return await this._resolveUrl(id, importee);
|
||||
} finally {
|
||||
this.moduleCache.deleteByModuleId(resolveKey);
|
||||
}
|
||||
}
|
||||
/** @internal */
|
||||
async dependencyRequest(id, fsPath, callstack) {
|
||||
return await this.cachedRequest(id, fsPath, callstack);
|
||||
}
|
||||
async _fetchModule(id, importer) {
|
||||
try {
|
||||
return await this.options.fetchModule(id);
|
||||
} catch (cause) {
|
||||
// rethrow vite error if it cannot load the module because it's not resolved
|
||||
if (typeof cause === "object" && cause.code === "ERR_LOAD_URL" || typeof (cause === null || cause === void 0 ? void 0 : cause.message) === "string" && cause.message.includes("Failed to load url")) {
|
||||
const error = new Error(`Cannot find ${utils.isBareImport(id) ? "package" : "module"} '${id}'${importer ? ` imported from '${importer}'` : ""}`, { cause });
|
||||
error.code = "ERR_MODULE_NOT_FOUND";
|
||||
throw error;
|
||||
}
|
||||
throw cause;
|
||||
}
|
||||
}
|
||||
/** @internal */
|
||||
async directRequest(id, fsPath, _callstack) {
|
||||
const moduleId = utils.normalizeModuleId(fsPath);
|
||||
const callstack = [..._callstack, moduleId];
|
||||
const mod = this.moduleCache.getByModuleId(moduleId);
|
||||
const request = async (dep) => {
|
||||
const [id, depFsPath] = await this.resolveUrl(String(dep), fsPath);
|
||||
const depMod = this.moduleCache.getByModuleId(depFsPath);
|
||||
depMod.importers.add(moduleId);
|
||||
mod.imports.add(depFsPath);
|
||||
return this.dependencyRequest(id, depFsPath, callstack);
|
||||
};
|
||||
const requestStubs = this.options.requestStubs || DEFAULT_REQUEST_STUBS;
|
||||
if (id in requestStubs) return requestStubs[id];
|
||||
let { code: transformed, externalize } = await this._fetchModule(id, callstack[callstack.length - 2]);
|
||||
if (externalize) {
|
||||
debugNative(externalize);
|
||||
const exports = await this.interopedImport(externalize);
|
||||
mod.exports = exports;
|
||||
return exports;
|
||||
}
|
||||
if (transformed == null) throw new Error(`[vite-node] Failed to load "${id}" imported from ${callstack[callstack.length - 2]}`);
|
||||
const { Object, Reflect, Symbol } = this.getContextPrimitives();
|
||||
const modulePath = utils.cleanUrl(moduleId);
|
||||
// disambiguate the `<UNIT>:/` on windows: see nodejs/node#31710
|
||||
const href = node_url.pathToFileURL(modulePath).href;
|
||||
const __filename = node_url.fileURLToPath(href);
|
||||
const __dirname = path.dirname(__filename);
|
||||
const meta = {
|
||||
url: href,
|
||||
env,
|
||||
filename: __filename,
|
||||
dirname: __dirname
|
||||
};
|
||||
const exports = Object.create(null);
|
||||
Object.defineProperty(exports, Symbol.toStringTag, {
|
||||
value: "Module",
|
||||
enumerable: false,
|
||||
configurable: false
|
||||
});
|
||||
const SYMBOL_NOT_DEFINED = Symbol("not defined");
|
||||
let moduleExports = SYMBOL_NOT_DEFINED;
|
||||
// this proxy is triggered only on exports.{name} and module.exports access
|
||||
// inside the module itself. imported module is always "exports"
|
||||
const cjsExports = new Proxy(exports, {
|
||||
get: (target, p, receiver) => {
|
||||
if (Reflect.has(target, p)) return Reflect.get(target, p, receiver);
|
||||
return Reflect.get(Object.prototype, p, receiver);
|
||||
},
|
||||
getPrototypeOf: () => Object.prototype,
|
||||
set: (_, p, value) => {
|
||||
// treat "module.exports =" the same as "exports.default =" to not have nested "default.default",
|
||||
// so "exports.default" becomes the actual module
|
||||
if (p === "default" && this.shouldInterop(modulePath, { default: value }) && cjsExports !== value) {
|
||||
exportAll(cjsExports, value);
|
||||
exports.default = value;
|
||||
return true;
|
||||
}
|
||||
if (!Reflect.has(exports, "default")) exports.default = {};
|
||||
// returns undefined, when accessing named exports, if default is not an object
|
||||
// but is still present inside hasOwnKeys, this is Node behaviour for CJS
|
||||
if (moduleExports !== SYMBOL_NOT_DEFINED && utils.isPrimitive(moduleExports)) {
|
||||
defineExport(exports, p, () => void 0);
|
||||
return true;
|
||||
}
|
||||
if (!utils.isPrimitive(exports.default)) exports.default[p] = value;
|
||||
if (p !== "default") defineExport(exports, p, () => value);
|
||||
return true;
|
||||
}
|
||||
});
|
||||
Object.assign(mod, {
|
||||
code: transformed,
|
||||
exports
|
||||
});
|
||||
const moduleProxy = {
|
||||
set exports(value) {
|
||||
exportAll(cjsExports, value);
|
||||
exports.default = value;
|
||||
moduleExports = value;
|
||||
},
|
||||
get exports() {
|
||||
return cjsExports;
|
||||
}
|
||||
};
|
||||
// Vite hot context
|
||||
let hotContext;
|
||||
if (this.options.createHotContext) Object.defineProperty(meta, "hot", {
|
||||
enumerable: true,
|
||||
get: () => {
|
||||
var _this$options$createH, _this$options;
|
||||
hotContext || (hotContext = (_this$options$createH = (_this$options = this.options).createHotContext) === null || _this$options$createH === void 0 ? void 0 : _this$options$createH.call(_this$options, this, moduleId));
|
||||
return hotContext;
|
||||
},
|
||||
set: (value) => {
|
||||
hotContext = value;
|
||||
}
|
||||
});
|
||||
// Be careful when changing this
|
||||
// changing context will change amount of code added on line :114 (vm.runInThisContext)
|
||||
// this messes up sourcemaps for coverage
|
||||
// adjust `WRAPPER_LENGTH` variable in packages/coverage-v8/src/provider.ts if you do change this
|
||||
const context = this.prepareContext({
|
||||
__vite_ssr_import__: request,
|
||||
__vite_ssr_dynamic_import__: request,
|
||||
__vite_ssr_exports__: exports,
|
||||
__vite_ssr_exportAll__: (obj) => exportAll(exports, obj),
|
||||
__vite_ssr_exportName__: (name, getter) => Object.defineProperty(exports, name, {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
get: getter
|
||||
}),
|
||||
__vite_ssr_import_meta__: meta,
|
||||
require: node_module.createRequire(href),
|
||||
exports: cjsExports,
|
||||
module: moduleProxy,
|
||||
__filename,
|
||||
__dirname
|
||||
});
|
||||
debugExecute(__filename);
|
||||
// remove shebang
|
||||
if (transformed[0] === "#") transformed = transformed.replace(/^#!.*/, (s) => " ".repeat(s.length));
|
||||
await this.runModule(context, transformed);
|
||||
return exports;
|
||||
}
|
||||
getContextPrimitives() {
|
||||
return {
|
||||
Object,
|
||||
Reflect,
|
||||
Symbol
|
||||
};
|
||||
}
|
||||
async runModule(context, transformed) {
|
||||
// add 'use strict' since ESM enables it by default
|
||||
const codeDefinition = `'use strict';async (${Object.keys(context).join(",")})=>{{`;
|
||||
const code = `${codeDefinition}${transformed}\n}}`;
|
||||
const options = {
|
||||
filename: context.__filename,
|
||||
lineOffset: 0,
|
||||
columnOffset: -codeDefinition.length
|
||||
};
|
||||
const finishModuleExecutionInfo = this.startCalculateModuleExecutionInfo(options.filename, codeDefinition.length);
|
||||
try {
|
||||
const fn = vm.runInThisContext(code, options);
|
||||
await fn(...Object.values(context));
|
||||
} finally {
|
||||
var _this$options$moduleE;
|
||||
(_this$options$moduleE = this.options.moduleExecutionInfo) === null || _this$options$moduleE === void 0 || _this$options$moduleE.set(options.filename, finishModuleExecutionInfo());
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Starts calculating the module execution info such as the total duration and self time spent on executing the module.
|
||||
* Returns a function to call once the module has finished executing.
|
||||
*/
|
||||
startCalculateModuleExecutionInfo(filename, startOffset) {
|
||||
const startTime = this.performanceNow();
|
||||
this.executionStack.push({
|
||||
filename,
|
||||
startTime,
|
||||
subImportTime: 0
|
||||
});
|
||||
return () => {
|
||||
const duration = this.performanceNow() - startTime;
|
||||
const currentExecution = this.executionStack.pop();
|
||||
if (currentExecution == null) throw new Error("Execution stack is empty, this should never happen");
|
||||
const selfTime = duration - currentExecution.subImportTime;
|
||||
if (this.executionStack.length > 0) this.executionStack.at(-1).subImportTime += duration;
|
||||
return {
|
||||
startOffset,
|
||||
duration,
|
||||
selfTime
|
||||
};
|
||||
};
|
||||
}
|
||||
prepareContext(context) {
|
||||
return context;
|
||||
}
|
||||
/**
|
||||
* Define if a module should be interop-ed
|
||||
* This function mostly for the ability to override by subclass
|
||||
*/
|
||||
shouldInterop(path, mod) {
|
||||
if (this.options.interopDefault === false) return false;
|
||||
// never interop ESM modules
|
||||
// TODO: should also skip for `.js` with `type="module"`
|
||||
return !path.endsWith(".mjs") && "default" in mod;
|
||||
}
|
||||
importExternalModule(path) {
|
||||
return import(
|
||||
/* @vite-ignore */
|
||||
path
|
||||
);
|
||||
}
|
||||
/**
|
||||
* Import a module and interop it
|
||||
*/
|
||||
async interopedImport(path) {
|
||||
const importedModule = await this.importExternalModule(path);
|
||||
if (!this.shouldInterop(path, importedModule)) return importedModule;
|
||||
const { mod, defaultExport } = interopModule(importedModule);
|
||||
return new Proxy(mod, {
|
||||
get(mod, prop) {
|
||||
if (prop === "default") return defaultExport;
|
||||
return mod[prop] ?? (defaultExport === null || defaultExport === void 0 ? void 0 : defaultExport[prop]);
|
||||
},
|
||||
has(mod, prop) {
|
||||
if (prop === "default") return defaultExport !== void 0;
|
||||
return prop in mod || defaultExport && prop in defaultExport;
|
||||
},
|
||||
getOwnPropertyDescriptor(mod, prop) {
|
||||
const descriptor = Reflect.getOwnPropertyDescriptor(mod, prop);
|
||||
if (descriptor) return descriptor;
|
||||
if (prop === "default" && defaultExport !== void 0) return {
|
||||
value: defaultExport,
|
||||
enumerable: true,
|
||||
configurable: true
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
function interopModule(mod) {
|
||||
if (utils.isPrimitive(mod)) return {
|
||||
mod: { default: mod },
|
||||
defaultExport: mod
|
||||
};
|
||||
let defaultExport = "default" in mod ? mod.default : mod;
|
||||
if (!utils.isPrimitive(defaultExport) && "__esModule" in defaultExport) {
|
||||
mod = defaultExport;
|
||||
if ("default" in defaultExport) defaultExport = defaultExport.default;
|
||||
}
|
||||
return {
|
||||
mod,
|
||||
defaultExport
|
||||
};
|
||||
}
|
||||
// keep consistency with Vite on how exports are defined
|
||||
function defineExport(exports, key, value) {
|
||||
Object.defineProperty(exports, key, {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
get: value
|
||||
});
|
||||
}
|
||||
function exportAll(exports, sourceModule) {
|
||||
// #1120 when a module exports itself it causes
|
||||
// call stack error
|
||||
if (exports === sourceModule) return;
|
||||
if (utils.isPrimitive(sourceModule) || Array.isArray(sourceModule) || sourceModule instanceof Promise) return;
|
||||
for (const key in sourceModule) if (key !== "default" && !(key in exports)) try {
|
||||
defineExport(exports, key, () => sourceModule[key]);
|
||||
} catch {}
|
||||
}
|
||||
|
||||
exports.DEFAULT_REQUEST_STUBS = DEFAULT_REQUEST_STUBS;
|
||||
exports.ModuleCacheMap = ModuleCacheMap;
|
||||
exports.ViteNodeRunner = ViteNodeRunner;
|
||||
2
GTA_P_V2/node_modules/vite-node/dist/client.d.ts
generated
vendored
Normal file
2
GTA_P_V2/node_modules/vite-node/dist/client.d.ts
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
import './trace-mapping.d-DLVdEqOp.js';
|
||||
export { e as DEFAULT_REQUEST_STUBS, M as ModuleCacheMap, f as ModuleExecutionInfo, g as ModuleExecutionInfoEntry, a as ViteNodeRunner } from './index.d-DGmxD2U7.js';
|
||||
507
GTA_P_V2/node_modules/vite-node/dist/client.mjs
generated
vendored
Normal file
507
GTA_P_V2/node_modules/vite-node/dist/client.mjs
generated
vendored
Normal file
@@ -0,0 +1,507 @@
|
||||
import { createRequire } from 'node:module';
|
||||
import { resolve, dirname } from 'node:path';
|
||||
import { pathToFileURL, fileURLToPath } from 'node:url';
|
||||
import vm from 'node:vm';
|
||||
import createDebug from 'debug';
|
||||
import { extractSourceMap } from './source-map.mjs';
|
||||
import { createImportMetaEnvProxy, slash, isInternalRequest, isNodeBuiltin, normalizeRequestId, toFilePath, normalizeModuleId, cleanUrl, isPrimitive, isBareImport } from './utils.mjs';
|
||||
import 'pathe';
|
||||
import 'node:fs';
|
||||
|
||||
const { setTimeout, clearTimeout } = globalThis;
|
||||
const debugExecute = createDebug("vite-node:client:execute");
|
||||
const debugNative = createDebug("vite-node:client:native");
|
||||
const clientStub = {
|
||||
injectQuery: (id) => id,
|
||||
createHotContext: () => {
|
||||
return {
|
||||
accept: () => {},
|
||||
prune: () => {},
|
||||
dispose: () => {},
|
||||
decline: () => {},
|
||||
invalidate: () => {},
|
||||
on: () => {},
|
||||
send: () => {}
|
||||
};
|
||||
},
|
||||
updateStyle: () => {},
|
||||
removeStyle: () => {}
|
||||
};
|
||||
const env = createImportMetaEnvProxy();
|
||||
const DEFAULT_REQUEST_STUBS = {
|
||||
"/@vite/client": clientStub,
|
||||
"@vite/client": clientStub
|
||||
};
|
||||
class ModuleCacheMap extends Map {
|
||||
normalizePath(fsPath) {
|
||||
return normalizeModuleId(fsPath);
|
||||
}
|
||||
/**
|
||||
* Assign partial data to the map
|
||||
*/
|
||||
update(fsPath, mod) {
|
||||
fsPath = this.normalizePath(fsPath);
|
||||
if (!super.has(fsPath)) this.setByModuleId(fsPath, mod);
|
||||
else Object.assign(super.get(fsPath), mod);
|
||||
return this;
|
||||
}
|
||||
setByModuleId(modulePath, mod) {
|
||||
return super.set(modulePath, mod);
|
||||
}
|
||||
set(fsPath, mod) {
|
||||
return this.setByModuleId(this.normalizePath(fsPath), mod);
|
||||
}
|
||||
getByModuleId(modulePath) {
|
||||
if (!super.has(modulePath)) this.setByModuleId(modulePath, {});
|
||||
const mod = super.get(modulePath);
|
||||
if (!mod.imports) Object.assign(mod, {
|
||||
imports: /* @__PURE__ */ new Set(),
|
||||
importers: /* @__PURE__ */ new Set()
|
||||
});
|
||||
return mod;
|
||||
}
|
||||
get(fsPath) {
|
||||
return this.getByModuleId(this.normalizePath(fsPath));
|
||||
}
|
||||
deleteByModuleId(modulePath) {
|
||||
return super.delete(modulePath);
|
||||
}
|
||||
delete(fsPath) {
|
||||
return this.deleteByModuleId(this.normalizePath(fsPath));
|
||||
}
|
||||
invalidateModule(mod) {
|
||||
var _mod$importers, _mod$imports;
|
||||
delete mod.evaluated;
|
||||
delete mod.resolving;
|
||||
delete mod.promise;
|
||||
delete mod.exports;
|
||||
(_mod$importers = mod.importers) === null || _mod$importers === void 0 || _mod$importers.clear();
|
||||
(_mod$imports = mod.imports) === null || _mod$imports === void 0 || _mod$imports.clear();
|
||||
return true;
|
||||
}
|
||||
/**
|
||||
* Invalidate modules that dependent on the given modules, up to the main entry
|
||||
*/
|
||||
invalidateDepTree(ids, invalidated = /* @__PURE__ */ new Set()) {
|
||||
for (const _id of ids) {
|
||||
const id = this.normalizePath(_id);
|
||||
if (invalidated.has(id)) continue;
|
||||
invalidated.add(id);
|
||||
const mod = super.get(id);
|
||||
if (mod === null || mod === void 0 ? void 0 : mod.importers) this.invalidateDepTree(mod.importers, invalidated);
|
||||
super.delete(id);
|
||||
}
|
||||
return invalidated;
|
||||
}
|
||||
/**
|
||||
* Invalidate dependency modules of the given modules, down to the bottom-level dependencies
|
||||
*/
|
||||
invalidateSubDepTree(ids, invalidated = /* @__PURE__ */ new Set()) {
|
||||
for (const _id of ids) {
|
||||
const id = this.normalizePath(_id);
|
||||
if (invalidated.has(id)) continue;
|
||||
invalidated.add(id);
|
||||
const subIds = Array.from(super.entries()).filter(([, mod]) => {
|
||||
var _mod$importers2;
|
||||
return (_mod$importers2 = mod.importers) === null || _mod$importers2 === void 0 ? void 0 : _mod$importers2.has(id);
|
||||
}).map(([key]) => key);
|
||||
if (subIds.length) this.invalidateSubDepTree(subIds, invalidated);
|
||||
super.delete(id);
|
||||
}
|
||||
return invalidated;
|
||||
}
|
||||
/**
|
||||
* Return parsed source map based on inlined source map of the module
|
||||
*/
|
||||
getSourceMap(id) {
|
||||
const cache = this.get(id);
|
||||
if (cache.map) return cache.map;
|
||||
const map = cache.code && extractSourceMap(cache.code);
|
||||
if (map) {
|
||||
cache.map = map;
|
||||
return map;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
class ViteNodeRunner {
|
||||
root;
|
||||
debug;
|
||||
/**
|
||||
* Holds the cache of modules
|
||||
* Keys of the map are filepaths, or plain package names
|
||||
*/
|
||||
moduleCache;
|
||||
/**
|
||||
* Tracks the stack of modules being executed for the purpose of calculating import self-time.
|
||||
*
|
||||
* Note that while in most cases, imports are a linear stack of modules,
|
||||
* this is occasionally not the case, for example when you have parallel top-level dynamic imports like so:
|
||||
*
|
||||
* ```ts
|
||||
* await Promise.all([
|
||||
* import('./module1'),
|
||||
* import('./module2'),
|
||||
* ]);
|
||||
* ```
|
||||
*
|
||||
* In this case, the self time will be reported incorrectly for one of the modules (could go negative).
|
||||
* As top-level awaits with dynamic imports like this are uncommon, we don't handle this case specifically.
|
||||
*/
|
||||
executionStack = [];
|
||||
// `performance` can be mocked, so make sure we're using the original function
|
||||
performanceNow = performance.now.bind(performance);
|
||||
constructor(options) {
|
||||
this.options = options;
|
||||
this.root = options.root ?? process.cwd();
|
||||
this.moduleCache = options.moduleCache ?? new ModuleCacheMap();
|
||||
this.debug = options.debug ?? (typeof process !== "undefined" ? !!process.env.VITE_NODE_DEBUG_RUNNER : false);
|
||||
}
|
||||
async executeFile(file) {
|
||||
const url = `/@fs/${slash(resolve(file))}`;
|
||||
return await this.cachedRequest(url, url, []);
|
||||
}
|
||||
async executeId(rawId) {
|
||||
const [id, url] = await this.resolveUrl(rawId);
|
||||
return await this.cachedRequest(id, url, []);
|
||||
}
|
||||
/** @internal */
|
||||
async cachedRequest(id, fsPath, callstack) {
|
||||
const importee = callstack[callstack.length - 1];
|
||||
const mod = this.moduleCache.get(fsPath);
|
||||
const { imports, importers } = mod;
|
||||
if (importee) importers.add(importee);
|
||||
const getStack = () => `stack:\n${[...callstack, fsPath].reverse().map((p) => ` - ${p}`).join("\n")}`;
|
||||
// check circular dependency
|
||||
if (callstack.includes(fsPath) || Array.from(imports.values()).some((i) => importers.has(i))) {
|
||||
if (mod.exports) return mod.exports;
|
||||
}
|
||||
let debugTimer;
|
||||
if (this.debug) debugTimer = setTimeout(() => console.warn(`[vite-node] module ${fsPath} takes over 2s to load.\n${getStack()}`), 2e3);
|
||||
try {
|
||||
// cached module
|
||||
if (mod.promise) return await mod.promise;
|
||||
const promise = this.directRequest(id, fsPath, callstack);
|
||||
Object.assign(mod, {
|
||||
promise,
|
||||
evaluated: false
|
||||
});
|
||||
return await promise;
|
||||
} finally {
|
||||
mod.evaluated = true;
|
||||
if (debugTimer) clearTimeout(debugTimer);
|
||||
}
|
||||
}
|
||||
shouldResolveId(id, _importee) {
|
||||
return !isInternalRequest(id) && !isNodeBuiltin(id) && !id.startsWith("data:");
|
||||
}
|
||||
async _resolveUrl(id, importer) {
|
||||
var _resolved$meta;
|
||||
const dep = normalizeRequestId(id, this.options.base);
|
||||
if (!this.shouldResolveId(dep)) return [dep, dep];
|
||||
const { path, exists } = toFilePath(dep, this.root);
|
||||
if (!this.options.resolveId || exists) return [dep, path];
|
||||
const resolved = await this.options.resolveId(dep, importer);
|
||||
// supported since Vite 5-beta.19
|
||||
if (resolved === null || resolved === void 0 || (_resolved$meta = resolved.meta) === null || _resolved$meta === void 0 || (_resolved$meta = _resolved$meta["vite:alias"]) === null || _resolved$meta === void 0 ? void 0 : _resolved$meta.noResolved) {
|
||||
const error = new Error(`Cannot find module '${id}'${importer ? ` imported from '${importer}'` : ""}.
|
||||
|
||||
- If you rely on tsconfig.json's "paths" to resolve modules, please install "vite-tsconfig-paths" plugin to handle module resolution.
|
||||
- Make sure you don't have relative aliases in your Vitest config. Use absolute paths instead. Read more: https://vitest.dev/guide/common-errors`);
|
||||
Object.defineProperty(error, "code", {
|
||||
value: "ERR_MODULE_NOT_FOUND",
|
||||
enumerable: true
|
||||
});
|
||||
Object.defineProperty(error, Symbol.for("vitest.error.not_found.data"), {
|
||||
value: {
|
||||
id: dep,
|
||||
importer
|
||||
},
|
||||
enumerable: false
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
const resolvedId = resolved ? normalizeRequestId(resolved.id, this.options.base) : dep;
|
||||
return [resolvedId, resolvedId];
|
||||
}
|
||||
async resolveUrl(id, importee) {
|
||||
const resolveKey = `resolve:${id}`;
|
||||
// put info about new import as soon as possible, so we can start tracking it
|
||||
this.moduleCache.setByModuleId(resolveKey, { resolving: true });
|
||||
try {
|
||||
return await this._resolveUrl(id, importee);
|
||||
} finally {
|
||||
this.moduleCache.deleteByModuleId(resolveKey);
|
||||
}
|
||||
}
|
||||
/** @internal */
|
||||
async dependencyRequest(id, fsPath, callstack) {
|
||||
return await this.cachedRequest(id, fsPath, callstack);
|
||||
}
|
||||
async _fetchModule(id, importer) {
|
||||
try {
|
||||
return await this.options.fetchModule(id);
|
||||
} catch (cause) {
|
||||
// rethrow vite error if it cannot load the module because it's not resolved
|
||||
if (typeof cause === "object" && cause.code === "ERR_LOAD_URL" || typeof (cause === null || cause === void 0 ? void 0 : cause.message) === "string" && cause.message.includes("Failed to load url")) {
|
||||
const error = new Error(`Cannot find ${isBareImport(id) ? "package" : "module"} '${id}'${importer ? ` imported from '${importer}'` : ""}`, { cause });
|
||||
error.code = "ERR_MODULE_NOT_FOUND";
|
||||
throw error;
|
||||
}
|
||||
throw cause;
|
||||
}
|
||||
}
|
||||
/** @internal */
|
||||
async directRequest(id, fsPath, _callstack) {
|
||||
const moduleId = normalizeModuleId(fsPath);
|
||||
const callstack = [..._callstack, moduleId];
|
||||
const mod = this.moduleCache.getByModuleId(moduleId);
|
||||
const request = async (dep) => {
|
||||
const [id, depFsPath] = await this.resolveUrl(String(dep), fsPath);
|
||||
const depMod = this.moduleCache.getByModuleId(depFsPath);
|
||||
depMod.importers.add(moduleId);
|
||||
mod.imports.add(depFsPath);
|
||||
return this.dependencyRequest(id, depFsPath, callstack);
|
||||
};
|
||||
const requestStubs = this.options.requestStubs || DEFAULT_REQUEST_STUBS;
|
||||
if (id in requestStubs) return requestStubs[id];
|
||||
let { code: transformed, externalize } = await this._fetchModule(id, callstack[callstack.length - 2]);
|
||||
if (externalize) {
|
||||
debugNative(externalize);
|
||||
const exports = await this.interopedImport(externalize);
|
||||
mod.exports = exports;
|
||||
return exports;
|
||||
}
|
||||
if (transformed == null) throw new Error(`[vite-node] Failed to load "${id}" imported from ${callstack[callstack.length - 2]}`);
|
||||
const { Object, Reflect, Symbol } = this.getContextPrimitives();
|
||||
const modulePath = cleanUrl(moduleId);
|
||||
// disambiguate the `<UNIT>:/` on windows: see nodejs/node#31710
|
||||
const href = pathToFileURL(modulePath).href;
|
||||
const __filename = fileURLToPath(href);
|
||||
const __dirname = dirname(__filename);
|
||||
const meta = {
|
||||
url: href,
|
||||
env,
|
||||
filename: __filename,
|
||||
dirname: __dirname
|
||||
};
|
||||
const exports = Object.create(null);
|
||||
Object.defineProperty(exports, Symbol.toStringTag, {
|
||||
value: "Module",
|
||||
enumerable: false,
|
||||
configurable: false
|
||||
});
|
||||
const SYMBOL_NOT_DEFINED = Symbol("not defined");
|
||||
let moduleExports = SYMBOL_NOT_DEFINED;
|
||||
// this proxy is triggered only on exports.{name} and module.exports access
|
||||
// inside the module itself. imported module is always "exports"
|
||||
const cjsExports = new Proxy(exports, {
|
||||
get: (target, p, receiver) => {
|
||||
if (Reflect.has(target, p)) return Reflect.get(target, p, receiver);
|
||||
return Reflect.get(Object.prototype, p, receiver);
|
||||
},
|
||||
getPrototypeOf: () => Object.prototype,
|
||||
set: (_, p, value) => {
|
||||
// treat "module.exports =" the same as "exports.default =" to not have nested "default.default",
|
||||
// so "exports.default" becomes the actual module
|
||||
if (p === "default" && this.shouldInterop(modulePath, { default: value }) && cjsExports !== value) {
|
||||
exportAll(cjsExports, value);
|
||||
exports.default = value;
|
||||
return true;
|
||||
}
|
||||
if (!Reflect.has(exports, "default")) exports.default = {};
|
||||
// returns undefined, when accessing named exports, if default is not an object
|
||||
// but is still present inside hasOwnKeys, this is Node behaviour for CJS
|
||||
if (moduleExports !== SYMBOL_NOT_DEFINED && isPrimitive(moduleExports)) {
|
||||
defineExport(exports, p, () => void 0);
|
||||
return true;
|
||||
}
|
||||
if (!isPrimitive(exports.default)) exports.default[p] = value;
|
||||
if (p !== "default") defineExport(exports, p, () => value);
|
||||
return true;
|
||||
}
|
||||
});
|
||||
Object.assign(mod, {
|
||||
code: transformed,
|
||||
exports
|
||||
});
|
||||
const moduleProxy = {
|
||||
set exports(value) {
|
||||
exportAll(cjsExports, value);
|
||||
exports.default = value;
|
||||
moduleExports = value;
|
||||
},
|
||||
get exports() {
|
||||
return cjsExports;
|
||||
}
|
||||
};
|
||||
// Vite hot context
|
||||
let hotContext;
|
||||
if (this.options.createHotContext) Object.defineProperty(meta, "hot", {
|
||||
enumerable: true,
|
||||
get: () => {
|
||||
var _this$options$createH, _this$options;
|
||||
hotContext || (hotContext = (_this$options$createH = (_this$options = this.options).createHotContext) === null || _this$options$createH === void 0 ? void 0 : _this$options$createH.call(_this$options, this, moduleId));
|
||||
return hotContext;
|
||||
},
|
||||
set: (value) => {
|
||||
hotContext = value;
|
||||
}
|
||||
});
|
||||
// Be careful when changing this
|
||||
// changing context will change amount of code added on line :114 (vm.runInThisContext)
|
||||
// this messes up sourcemaps for coverage
|
||||
// adjust `WRAPPER_LENGTH` variable in packages/coverage-v8/src/provider.ts if you do change this
|
||||
const context = this.prepareContext({
|
||||
__vite_ssr_import__: request,
|
||||
__vite_ssr_dynamic_import__: request,
|
||||
__vite_ssr_exports__: exports,
|
||||
__vite_ssr_exportAll__: (obj) => exportAll(exports, obj),
|
||||
__vite_ssr_exportName__: (name, getter) => Object.defineProperty(exports, name, {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
get: getter
|
||||
}),
|
||||
__vite_ssr_import_meta__: meta,
|
||||
require: createRequire(href),
|
||||
exports: cjsExports,
|
||||
module: moduleProxy,
|
||||
__filename,
|
||||
__dirname
|
||||
});
|
||||
debugExecute(__filename);
|
||||
// remove shebang
|
||||
if (transformed[0] === "#") transformed = transformed.replace(/^#!.*/, (s) => " ".repeat(s.length));
|
||||
await this.runModule(context, transformed);
|
||||
return exports;
|
||||
}
|
||||
getContextPrimitives() {
|
||||
return {
|
||||
Object,
|
||||
Reflect,
|
||||
Symbol
|
||||
};
|
||||
}
|
||||
async runModule(context, transformed) {
|
||||
// add 'use strict' since ESM enables it by default
|
||||
const codeDefinition = `'use strict';async (${Object.keys(context).join(",")})=>{{`;
|
||||
const code = `${codeDefinition}${transformed}\n}}`;
|
||||
const options = {
|
||||
filename: context.__filename,
|
||||
lineOffset: 0,
|
||||
columnOffset: -codeDefinition.length
|
||||
};
|
||||
const finishModuleExecutionInfo = this.startCalculateModuleExecutionInfo(options.filename, codeDefinition.length);
|
||||
try {
|
||||
const fn = vm.runInThisContext(code, options);
|
||||
await fn(...Object.values(context));
|
||||
} finally {
|
||||
var _this$options$moduleE;
|
||||
(_this$options$moduleE = this.options.moduleExecutionInfo) === null || _this$options$moduleE === void 0 || _this$options$moduleE.set(options.filename, finishModuleExecutionInfo());
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Starts calculating the module execution info such as the total duration and self time spent on executing the module.
|
||||
* Returns a function to call once the module has finished executing.
|
||||
*/
|
||||
startCalculateModuleExecutionInfo(filename, startOffset) {
|
||||
const startTime = this.performanceNow();
|
||||
this.executionStack.push({
|
||||
filename,
|
||||
startTime,
|
||||
subImportTime: 0
|
||||
});
|
||||
return () => {
|
||||
const duration = this.performanceNow() - startTime;
|
||||
const currentExecution = this.executionStack.pop();
|
||||
if (currentExecution == null) throw new Error("Execution stack is empty, this should never happen");
|
||||
const selfTime = duration - currentExecution.subImportTime;
|
||||
if (this.executionStack.length > 0) this.executionStack.at(-1).subImportTime += duration;
|
||||
return {
|
||||
startOffset,
|
||||
duration,
|
||||
selfTime
|
||||
};
|
||||
};
|
||||
}
|
||||
prepareContext(context) {
|
||||
return context;
|
||||
}
|
||||
/**
|
||||
* Define if a module should be interop-ed
|
||||
* This function mostly for the ability to override by subclass
|
||||
*/
|
||||
shouldInterop(path, mod) {
|
||||
if (this.options.interopDefault === false) return false;
|
||||
// never interop ESM modules
|
||||
// TODO: should also skip for `.js` with `type="module"`
|
||||
return !path.endsWith(".mjs") && "default" in mod;
|
||||
}
|
||||
importExternalModule(path) {
|
||||
return import(
|
||||
/* @vite-ignore */
|
||||
path
|
||||
);
|
||||
}
|
||||
/**
|
||||
* Import a module and interop it
|
||||
*/
|
||||
async interopedImport(path) {
|
||||
const importedModule = await this.importExternalModule(path);
|
||||
if (!this.shouldInterop(path, importedModule)) return importedModule;
|
||||
const { mod, defaultExport } = interopModule(importedModule);
|
||||
return new Proxy(mod, {
|
||||
get(mod, prop) {
|
||||
if (prop === "default") return defaultExport;
|
||||
return mod[prop] ?? (defaultExport === null || defaultExport === void 0 ? void 0 : defaultExport[prop]);
|
||||
},
|
||||
has(mod, prop) {
|
||||
if (prop === "default") return defaultExport !== void 0;
|
||||
return prop in mod || defaultExport && prop in defaultExport;
|
||||
},
|
||||
getOwnPropertyDescriptor(mod, prop) {
|
||||
const descriptor = Reflect.getOwnPropertyDescriptor(mod, prop);
|
||||
if (descriptor) return descriptor;
|
||||
if (prop === "default" && defaultExport !== void 0) return {
|
||||
value: defaultExport,
|
||||
enumerable: true,
|
||||
configurable: true
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
function interopModule(mod) {
|
||||
if (isPrimitive(mod)) return {
|
||||
mod: { default: mod },
|
||||
defaultExport: mod
|
||||
};
|
||||
let defaultExport = "default" in mod ? mod.default : mod;
|
||||
if (!isPrimitive(defaultExport) && "__esModule" in defaultExport) {
|
||||
mod = defaultExport;
|
||||
if ("default" in defaultExport) defaultExport = defaultExport.default;
|
||||
}
|
||||
return {
|
||||
mod,
|
||||
defaultExport
|
||||
};
|
||||
}
|
||||
// keep consistency with Vite on how exports are defined
|
||||
function defineExport(exports, key, value) {
|
||||
Object.defineProperty(exports, key, {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
get: value
|
||||
});
|
||||
}
|
||||
function exportAll(exports, sourceModule) {
|
||||
// #1120 when a module exports itself it causes
|
||||
// call stack error
|
||||
if (exports === sourceModule) return;
|
||||
if (isPrimitive(sourceModule) || Array.isArray(sourceModule) || sourceModule instanceof Promise) return;
|
||||
for (const key in sourceModule) if (key !== "default" && !(key in exports)) try {
|
||||
defineExport(exports, key, () => sourceModule[key]);
|
||||
} catch {}
|
||||
}
|
||||
|
||||
export { DEFAULT_REQUEST_STUBS, ModuleCacheMap, ViteNodeRunner };
|
||||
36
GTA_P_V2/node_modules/vite-node/dist/constants.cjs
generated
vendored
Normal file
36
GTA_P_V2/node_modules/vite-node/dist/constants.cjs
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
'use strict';
|
||||
|
||||
const KNOWN_ASSET_TYPES = [
|
||||
"apng",
|
||||
"bmp",
|
||||
"png",
|
||||
"jpe?g",
|
||||
"jfif",
|
||||
"pjpeg",
|
||||
"pjp",
|
||||
"gif",
|
||||
"svg",
|
||||
"ico",
|
||||
"webp",
|
||||
"avif",
|
||||
"mp4",
|
||||
"webm",
|
||||
"ogg",
|
||||
"mp3",
|
||||
"wav",
|
||||
"flac",
|
||||
"aac",
|
||||
"woff2?",
|
||||
"eot",
|
||||
"ttf",
|
||||
"otf",
|
||||
"webmanifest",
|
||||
"pdf",
|
||||
"txt"
|
||||
];
|
||||
const KNOWN_ASSET_RE = new RegExp(`\\.(${KNOWN_ASSET_TYPES.join("|")})$`);
|
||||
const CSS_LANGS_RE = /\.(css|less|sass|scss|styl|stylus|pcss|postcss|sss)(?:$|\?)/;
|
||||
|
||||
exports.CSS_LANGS_RE = CSS_LANGS_RE;
|
||||
exports.KNOWN_ASSET_RE = KNOWN_ASSET_RE;
|
||||
exports.KNOWN_ASSET_TYPES = KNOWN_ASSET_TYPES;
|
||||
5
GTA_P_V2/node_modules/vite-node/dist/constants.d.ts
generated
vendored
Normal file
5
GTA_P_V2/node_modules/vite-node/dist/constants.d.ts
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
declare const KNOWN_ASSET_TYPES: string[];
|
||||
declare const KNOWN_ASSET_RE: RegExp;
|
||||
declare const CSS_LANGS_RE: RegExp;
|
||||
|
||||
export { CSS_LANGS_RE, KNOWN_ASSET_RE, KNOWN_ASSET_TYPES };
|
||||
32
GTA_P_V2/node_modules/vite-node/dist/constants.mjs
generated
vendored
Normal file
32
GTA_P_V2/node_modules/vite-node/dist/constants.mjs
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
const KNOWN_ASSET_TYPES = [
|
||||
"apng",
|
||||
"bmp",
|
||||
"png",
|
||||
"jpe?g",
|
||||
"jfif",
|
||||
"pjpeg",
|
||||
"pjp",
|
||||
"gif",
|
||||
"svg",
|
||||
"ico",
|
||||
"webp",
|
||||
"avif",
|
||||
"mp4",
|
||||
"webm",
|
||||
"ogg",
|
||||
"mp3",
|
||||
"wav",
|
||||
"flac",
|
||||
"aac",
|
||||
"woff2?",
|
||||
"eot",
|
||||
"ttf",
|
||||
"otf",
|
||||
"webmanifest",
|
||||
"pdf",
|
||||
"txt"
|
||||
];
|
||||
const KNOWN_ASSET_RE = new RegExp(`\\.(${KNOWN_ASSET_TYPES.join("|")})$`);
|
||||
const CSS_LANGS_RE = /\.(css|less|sass|scss|styl|stylus|pcss|postcss|sss)(?:$|\?)/;
|
||||
|
||||
export { CSS_LANGS_RE, KNOWN_ASSET_RE, KNOWN_ASSET_TYPES };
|
||||
21
GTA_P_V2/node_modules/vite-node/dist/hmr.cjs
generated
vendored
Normal file
21
GTA_P_V2/node_modules/vite-node/dist/hmr.cjs
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
'use strict';
|
||||
|
||||
var hmr = require('./chunk-hmr.cjs');
|
||||
require('node:events');
|
||||
require('debug');
|
||||
require('./chunk-browser.cjs');
|
||||
require('./utils.cjs');
|
||||
require('node:fs');
|
||||
require('node:module');
|
||||
require('node:url');
|
||||
require('pathe');
|
||||
|
||||
|
||||
|
||||
exports.createHmrEmitter = hmr.createHmrEmitter;
|
||||
exports.createHotContext = hmr.createHotContext;
|
||||
exports.getCache = hmr.getCache;
|
||||
exports.handleMessage = hmr.handleMessage;
|
||||
exports.reload = hmr.reload;
|
||||
exports.sendMessageBuffer = hmr.sendMessageBuffer;
|
||||
exports.viteNodeHmrPlugin = hmr.viteNodeHmrPlugin;
|
||||
58
GTA_P_V2/node_modules/vite-node/dist/hmr.d.ts
generated
vendored
Normal file
58
GTA_P_V2/node_modules/vite-node/dist/hmr.d.ts
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
import { HMRPayload, Plugin } from 'vite';
|
||||
import { EventEmitter } from 'node:events';
|
||||
import { C as CustomEventMap, a as ViteNodeRunner, H as HMRPayload$1, b as HotContext } from './index.d-DGmxD2U7.js';
|
||||
import './trace-mapping.d-DLVdEqOp.js';
|
||||
|
||||
type EventType = string | symbol;
|
||||
type Handler<T = unknown> = (event: T) => void;
|
||||
interface Emitter<Events extends Record<EventType, unknown>> {
|
||||
on: <Key extends keyof Events>(type: Key, handler: Handler<Events[Key]>) => void;
|
||||
off: <Key extends keyof Events>(type: Key, handler?: Handler<Events[Key]>) => void;
|
||||
emit: (<Key extends keyof Events>(type: Key, event: Events[Key]) => void) & (<Key extends keyof Events>(type: undefined extends Events[Key] ? Key : never) => void);
|
||||
}
|
||||
type HMREmitter = Emitter<{
|
||||
message: HMRPayload
|
||||
}> & EventEmitter;
|
||||
declare module "vite" {
|
||||
interface ViteDevServer {
|
||||
emitter: HMREmitter;
|
||||
}
|
||||
}
|
||||
declare function createHmrEmitter(): HMREmitter;
|
||||
declare function viteNodeHmrPlugin(): Plugin;
|
||||
|
||||
/* eslint-disable no-console */
|
||||
|
||||
type ModuleNamespace = Record<string, any> & {
|
||||
[Symbol.toStringTag]: "Module"
|
||||
};
|
||||
type InferCustomEventPayload<T extends string> = T extends keyof CustomEventMap ? CustomEventMap[T] : any;
|
||||
interface HotModule {
|
||||
id: string;
|
||||
callbacks: HotCallback[];
|
||||
}
|
||||
interface HotCallback {
|
||||
// the dependencies must be fetchable paths
|
||||
deps: string[];
|
||||
fn: (modules: (ModuleNamespace | undefined)[]) => void;
|
||||
}
|
||||
interface CacheData {
|
||||
hotModulesMap: Map<string, HotModule>;
|
||||
dataMap: Map<string, any>;
|
||||
disposeMap: Map<string, (data: any) => void | Promise<void>>;
|
||||
pruneMap: Map<string, (data: any) => void | Promise<void>>;
|
||||
customListenersMap: Map<string, ((data: any) => void)[]>;
|
||||
ctxToListenersMap: Map<string, Map<string, ((data: any) => void)[]>>;
|
||||
messageBuffer: string[];
|
||||
isFirstUpdate: boolean;
|
||||
pending: boolean;
|
||||
queued: Promise<(() => void) | undefined>[];
|
||||
}
|
||||
declare function getCache(runner: ViteNodeRunner): CacheData;
|
||||
declare function sendMessageBuffer(runner: ViteNodeRunner, emitter: HMREmitter): void;
|
||||
declare function reload(runner: ViteNodeRunner, files: string[]): Promise<any[]>;
|
||||
declare function handleMessage(runner: ViteNodeRunner, emitter: HMREmitter, files: string[], payload: HMRPayload$1): Promise<void>;
|
||||
declare function createHotContext(runner: ViteNodeRunner, emitter: HMREmitter, files: string[], ownerPath: string): HotContext;
|
||||
|
||||
export { createHmrEmitter, createHotContext, getCache, handleMessage, reload, sendMessageBuffer, viteNodeHmrPlugin };
|
||||
export type { Emitter, EventType, HMREmitter, Handler, HotCallback, HotModule, InferCustomEventPayload, ModuleNamespace };
|
||||
9
GTA_P_V2/node_modules/vite-node/dist/hmr.mjs
generated
vendored
Normal file
9
GTA_P_V2/node_modules/vite-node/dist/hmr.mjs
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
export { c as createHmrEmitter, a as createHotContext, g as getCache, h as handleMessage, r as reload, s as sendMessageBuffer, v as viteNodeHmrPlugin } from './chunk-hmr.mjs';
|
||||
import 'node:events';
|
||||
import 'debug';
|
||||
import './chunk-browser.mjs';
|
||||
import './utils.mjs';
|
||||
import 'node:fs';
|
||||
import 'node:module';
|
||||
import 'node:url';
|
||||
import 'pathe';
|
||||
2
GTA_P_V2/node_modules/vite-node/dist/index.cjs
generated
vendored
Normal file
2
GTA_P_V2/node_modules/vite-node/dist/index.cjs
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
'use strict';
|
||||
|
||||
363
GTA_P_V2/node_modules/vite-node/dist/index.d-DGmxD2U7.d.ts
generated
vendored
Normal file
363
GTA_P_V2/node_modules/vite-node/dist/index.d-DGmxD2U7.d.ts
generated
vendored
Normal file
@@ -0,0 +1,363 @@
|
||||
import { E as EncodedSourceMap } from './trace-mapping.d-DLVdEqOp.js';
|
||||
|
||||
/** @deprecated use HotPayload */
|
||||
type HMRPayload = HotPayload
|
||||
type HotPayload =
|
||||
| ConnectedPayload
|
||||
| PingPayload
|
||||
| UpdatePayload
|
||||
| FullReloadPayload
|
||||
| CustomPayload
|
||||
| ErrorPayload
|
||||
| PrunePayload
|
||||
|
||||
interface ConnectedPayload {
|
||||
type: 'connected'
|
||||
}
|
||||
|
||||
interface PingPayload {
|
||||
type: 'ping'
|
||||
}
|
||||
|
||||
interface UpdatePayload {
|
||||
type: 'update'
|
||||
updates: Update[]
|
||||
}
|
||||
|
||||
interface Update {
|
||||
type: 'js-update' | 'css-update'
|
||||
path: string
|
||||
acceptedPath: string
|
||||
timestamp: number
|
||||
/** @internal */
|
||||
explicitImportRequired?: boolean
|
||||
/** @internal */
|
||||
isWithinCircularImport?: boolean
|
||||
/** @internal */
|
||||
firstInvalidatedBy?: string
|
||||
/** @internal */
|
||||
invalidates?: string[]
|
||||
}
|
||||
|
||||
interface PrunePayload {
|
||||
type: 'prune'
|
||||
paths: string[]
|
||||
}
|
||||
|
||||
interface FullReloadPayload {
|
||||
type: 'full-reload'
|
||||
path?: string
|
||||
/** @internal */
|
||||
triggeredBy?: string
|
||||
}
|
||||
|
||||
interface CustomPayload {
|
||||
type: 'custom'
|
||||
event: string
|
||||
data?: any
|
||||
}
|
||||
|
||||
interface ErrorPayload {
|
||||
type: 'error'
|
||||
err: {
|
||||
[name: string]: any
|
||||
message: string
|
||||
stack: string
|
||||
id?: string
|
||||
frame?: string
|
||||
plugin?: string
|
||||
pluginCode?: string
|
||||
loc?: {
|
||||
file?: string
|
||||
line: number
|
||||
column: number
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
interface CustomEventMap {
|
||||
'vite:beforeUpdate': UpdatePayload
|
||||
'vite:afterUpdate': UpdatePayload
|
||||
'vite:beforePrune': PrunePayload
|
||||
'vite:beforeFullReload': FullReloadPayload
|
||||
'vite:error': ErrorPayload
|
||||
'vite:invalidate': InvalidatePayload
|
||||
'vite:ws:connect': WebSocketConnectionPayload
|
||||
'vite:ws:disconnect': WebSocketConnectionPayload
|
||||
}
|
||||
|
||||
interface WebSocketConnectionPayload {
|
||||
/**
|
||||
* @experimental
|
||||
* We expose this instance experimentally to see potential usage.
|
||||
* This might be removed in the future if we didn't find reasonable use cases.
|
||||
* If you find this useful, please open an issue with details so we can discuss and make it stable API.
|
||||
*/
|
||||
// eslint-disable-next-line n/no-unsupported-features/node-builtins
|
||||
webSocket: WebSocket
|
||||
}
|
||||
|
||||
interface InvalidatePayload {
|
||||
path: string
|
||||
message: string | undefined
|
||||
firstInvalidatedBy: string
|
||||
}
|
||||
|
||||
/**
|
||||
* provides types for payloads of built-in Vite events
|
||||
*/
|
||||
type InferCustomEventPayload<T extends string> =
|
||||
T extends keyof CustomEventMap ? CustomEventMap[T] : any
|
||||
|
||||
/**
|
||||
* provides types for names of built-in Vite events
|
||||
*/
|
||||
type CustomEventName = keyof CustomEventMap | (string & {})
|
||||
|
||||
type ModuleNamespace = Record<string, any> & {
|
||||
[Symbol.toStringTag]: 'Module'
|
||||
}
|
||||
|
||||
interface ViteHotContext {
|
||||
readonly data: any
|
||||
|
||||
accept(): void
|
||||
accept(cb: (mod: ModuleNamespace | undefined) => void): void
|
||||
accept(dep: string, cb: (mod: ModuleNamespace | undefined) => void): void
|
||||
accept(
|
||||
deps: readonly string[],
|
||||
cb: (mods: Array<ModuleNamespace | undefined>) => void,
|
||||
): void
|
||||
|
||||
acceptExports(
|
||||
exportNames: string | readonly string[],
|
||||
cb?: (mod: ModuleNamespace | undefined) => void,
|
||||
): void
|
||||
|
||||
dispose(cb: (data: any) => void): void
|
||||
prune(cb: (data: any) => void): void
|
||||
invalidate(message?: string): void
|
||||
|
||||
on<T extends CustomEventName>(
|
||||
event: T,
|
||||
cb: (payload: InferCustomEventPayload<T>) => void,
|
||||
): void
|
||||
off<T extends CustomEventName>(
|
||||
event: T,
|
||||
cb: (payload: InferCustomEventPayload<T>) => void,
|
||||
): void
|
||||
send<T extends CustomEventName>(
|
||||
event: T,
|
||||
data?: InferCustomEventPayload<T>,
|
||||
): void
|
||||
}
|
||||
|
||||
declare const DEFAULT_REQUEST_STUBS: Record<string, Record<string, unknown>>;
|
||||
declare class ModuleCacheMap extends Map<string, ModuleCache> {
|
||||
normalizePath(fsPath: string): string;
|
||||
/**
|
||||
* Assign partial data to the map
|
||||
*/
|
||||
update(fsPath: string, mod: ModuleCache): this;
|
||||
setByModuleId(modulePath: string, mod: ModuleCache): this;
|
||||
set(fsPath: string, mod: ModuleCache): this;
|
||||
getByModuleId(modulePath: string): ModuleCache & Required<Pick<ModuleCache, "imports" | "importers">>;
|
||||
get(fsPath: string): ModuleCache & Required<Pick<ModuleCache, "importers" | "imports">>;
|
||||
deleteByModuleId(modulePath: string): boolean;
|
||||
delete(fsPath: string): boolean;
|
||||
invalidateModule(mod: ModuleCache): boolean;
|
||||
/**
|
||||
* Invalidate modules that dependent on the given modules, up to the main entry
|
||||
*/
|
||||
invalidateDepTree(ids: string[] | Set<string>, invalidated?: Set<string>): Set<string>;
|
||||
/**
|
||||
* Invalidate dependency modules of the given modules, down to the bottom-level dependencies
|
||||
*/
|
||||
invalidateSubDepTree(ids: string[] | Set<string>, invalidated?: Set<string>): Set<string>;
|
||||
/**
|
||||
* Return parsed source map based on inlined source map of the module
|
||||
*/
|
||||
getSourceMap(id: string): EncodedSourceMap | null;
|
||||
}
|
||||
type ModuleExecutionInfo = Map<string, ModuleExecutionInfoEntry>;
|
||||
interface ModuleExecutionInfoEntry {
|
||||
startOffset: number;
|
||||
/** The duration that was spent executing the module. */
|
||||
duration: number;
|
||||
/** The time that was spent executing the module itself and externalized imports. */
|
||||
selfTime: number;
|
||||
}
|
||||
declare class ViteNodeRunner {
|
||||
options: ViteNodeRunnerOptions;
|
||||
root: string;
|
||||
debug: boolean;
|
||||
/**
|
||||
* Holds the cache of modules
|
||||
* Keys of the map are filepaths, or plain package names
|
||||
*/
|
||||
moduleCache: ModuleCacheMap;
|
||||
/**
|
||||
* Tracks the stack of modules being executed for the purpose of calculating import self-time.
|
||||
*
|
||||
* Note that while in most cases, imports are a linear stack of modules,
|
||||
* this is occasionally not the case, for example when you have parallel top-level dynamic imports like so:
|
||||
*
|
||||
* ```ts
|
||||
* await Promise.all([
|
||||
* import('./module1'),
|
||||
* import('./module2'),
|
||||
* ]);
|
||||
* ```
|
||||
*
|
||||
* In this case, the self time will be reported incorrectly for one of the modules (could go negative).
|
||||
* As top-level awaits with dynamic imports like this are uncommon, we don't handle this case specifically.
|
||||
*/
|
||||
private executionStack;
|
||||
// `performance` can be mocked, so make sure we're using the original function
|
||||
private performanceNow;
|
||||
constructor(options: ViteNodeRunnerOptions);
|
||||
executeFile(file: string): Promise<any>;
|
||||
executeId(rawId: string): Promise<any>;
|
||||
shouldResolveId(id: string, _importee?: string): boolean;
|
||||
private _resolveUrl;
|
||||
resolveUrl(id: string, importee?: string): Promise<[url: string, fsPath: string]>;
|
||||
private _fetchModule;
|
||||
protected getContextPrimitives(): {
|
||||
Object: ObjectConstructor
|
||||
Reflect: typeof Reflect
|
||||
Symbol: SymbolConstructor
|
||||
};
|
||||
protected runModule(context: Record<string, any>, transformed: string): Promise<void>;
|
||||
/**
|
||||
* Starts calculating the module execution info such as the total duration and self time spent on executing the module.
|
||||
* Returns a function to call once the module has finished executing.
|
||||
*/
|
||||
protected startCalculateModuleExecutionInfo(filename: string, startOffset: number): () => ModuleExecutionInfoEntry;
|
||||
prepareContext(context: Record<string, any>): Record<string, any>;
|
||||
/**
|
||||
* Define if a module should be interop-ed
|
||||
* This function mostly for the ability to override by subclass
|
||||
*/
|
||||
shouldInterop(path: string, mod: any): boolean;
|
||||
protected importExternalModule(path: string): Promise<any>;
|
||||
/**
|
||||
* Import a module and interop it
|
||||
*/
|
||||
interopedImport(path: string): Promise<any>;
|
||||
}
|
||||
|
||||
type Nullable<T> = T | null | undefined;
|
||||
type Arrayable<T> = T | Array<T>;
|
||||
type Awaitable<T> = T | PromiseLike<T>;
|
||||
interface DepsHandlingOptions {
|
||||
external?: (string | RegExp)[];
|
||||
inline?: (string | RegExp)[] | true;
|
||||
inlineFiles?: string[];
|
||||
/**
|
||||
* A list of directories that are considered to hold Node.js modules
|
||||
* Have to include "/" at the start and end of the path
|
||||
*
|
||||
* Vite-Node checks the whole absolute path of the import, so make sure you don't include
|
||||
* unwanted files accidentally
|
||||
* @default ['/node_modules/']
|
||||
*/
|
||||
moduleDirectories?: string[];
|
||||
cacheDir?: string;
|
||||
/**
|
||||
* Try to guess the CJS version of a package when it's invalid ESM
|
||||
* @default false
|
||||
*/
|
||||
fallbackCJS?: boolean;
|
||||
}
|
||||
interface StartOfSourceMap {
|
||||
file?: string;
|
||||
sourceRoot?: string;
|
||||
}
|
||||
|
||||
interface RawSourceMap extends StartOfSourceMap {
|
||||
version: number;
|
||||
sources: string[];
|
||||
names: string[];
|
||||
sourcesContent?: (string | null)[];
|
||||
mappings: string;
|
||||
}
|
||||
interface FetchResult {
|
||||
code?: string;
|
||||
externalize?: string;
|
||||
map?: EncodedSourceMap | null;
|
||||
}
|
||||
type HotContext = Omit<ViteHotContext, "acceptDeps" | "decline">;
|
||||
type FetchFunction = (id: string) => Promise<FetchResult>;
|
||||
type ResolveIdFunction = (id: string, importer?: string) => Awaitable<ViteNodeResolveId | null | undefined | void>;
|
||||
type CreateHotContextFunction = (runner: ViteNodeRunner, url: string) => HotContext;
|
||||
interface ModuleCache {
|
||||
promise?: Promise<any>;
|
||||
exports?: any;
|
||||
evaluated?: boolean;
|
||||
resolving?: boolean;
|
||||
code?: string;
|
||||
map?: EncodedSourceMap;
|
||||
/**
|
||||
* Module ids that imports this module
|
||||
*/
|
||||
importers?: Set<string>;
|
||||
imports?: Set<string>;
|
||||
}
|
||||
interface ViteNodeRunnerOptions {
|
||||
root: string;
|
||||
fetchModule: FetchFunction;
|
||||
resolveId?: ResolveIdFunction;
|
||||
createHotContext?: CreateHotContextFunction;
|
||||
base?: string;
|
||||
moduleCache?: ModuleCacheMap;
|
||||
moduleExecutionInfo?: ModuleExecutionInfo;
|
||||
interopDefault?: boolean;
|
||||
requestStubs?: Record<string, any>;
|
||||
debug?: boolean;
|
||||
}
|
||||
interface ViteNodeResolveId {
|
||||
external?: boolean | "absolute" | "relative";
|
||||
id: string;
|
||||
meta?: Record<string, any> | null;
|
||||
moduleSideEffects?: boolean | "no-treeshake" | null;
|
||||
syntheticNamedExports?: boolean | string | null;
|
||||
}
|
||||
interface ViteNodeResolveModule {
|
||||
external: string | null;
|
||||
id: string;
|
||||
fsPath: string;
|
||||
}
|
||||
interface ViteNodeServerOptions {
|
||||
/**
|
||||
* Inject inline sourcemap to modules
|
||||
* @default 'inline'
|
||||
*/
|
||||
sourcemap?: "inline" | boolean;
|
||||
/**
|
||||
* Deps handling
|
||||
*/
|
||||
deps?: DepsHandlingOptions;
|
||||
/**
|
||||
* Transform method for modules
|
||||
*/
|
||||
transformMode?: {
|
||||
ssr?: RegExp[]
|
||||
web?: RegExp[]
|
||||
};
|
||||
debug?: DebuggerOptions;
|
||||
}
|
||||
interface DebuggerOptions {
|
||||
/**
|
||||
* Dump the transformed module to filesystem
|
||||
* Passing a string will dump to the specified path
|
||||
*/
|
||||
dumpModules?: boolean | string;
|
||||
/**
|
||||
* Read dumpped module from filesystem whenever exists.
|
||||
* Useful for debugging by modifying the dump result from the filesystem.
|
||||
*/
|
||||
loadDumppedModules?: boolean;
|
||||
}
|
||||
|
||||
export { ModuleCacheMap as M, ViteNodeRunner as a, DEFAULT_REQUEST_STUBS as e };
|
||||
export type { Arrayable as A, CustomEventMap as C, DebuggerOptions as D, FetchResult as F, HMRPayload as H, Nullable as N, RawSourceMap as R, StartOfSourceMap as S, ViteNodeServerOptions as V, HotContext as b, DepsHandlingOptions as c, ViteNodeResolveId as d, ModuleExecutionInfo as f, ModuleExecutionInfoEntry as g, Awaitable as h, FetchFunction as i, ResolveIdFunction as j, CreateHotContextFunction as k, ModuleCache as l, ViteNodeRunnerOptions as m, ViteNodeResolveModule as n };
|
||||
2
GTA_P_V2/node_modules/vite-node/dist/index.d.ts
generated
vendored
Normal file
2
GTA_P_V2/node_modules/vite-node/dist/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
export { A as Arrayable, h as Awaitable, k as CreateHotContextFunction, D as DebuggerOptions, c as DepsHandlingOptions, i as FetchFunction, F as FetchResult, b as HotContext, l as ModuleCache, M as ModuleCacheMap, f as ModuleExecutionInfo, N as Nullable, R as RawSourceMap, j as ResolveIdFunction, S as StartOfSourceMap, d as ViteNodeResolveId, n as ViteNodeResolveModule, m as ViteNodeRunnerOptions, V as ViteNodeServerOptions } from './index.d-DGmxD2U7.js';
|
||||
export { D as DecodedSourceMap, E as EncodedSourceMap, S as SourceMapInput } from './trace-mapping.d-DLVdEqOp.js';
|
||||
1
GTA_P_V2/node_modules/vite-node/dist/index.mjs
generated
vendored
Normal file
1
GTA_P_V2/node_modules/vite-node/dist/index.mjs
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
440
GTA_P_V2/node_modules/vite-node/dist/server.cjs
generated
vendored
Normal file
440
GTA_P_V2/node_modules/vite-node/dist/server.cjs
generated
vendored
Normal file
@@ -0,0 +1,440 @@
|
||||
'use strict';
|
||||
|
||||
var assert = require('node:assert');
|
||||
var fs = require('node:fs');
|
||||
var node_perf_hooks = require('node:perf_hooks');
|
||||
var node_url = require('node:url');
|
||||
var createDebug = require('debug');
|
||||
var pathe = require('pathe');
|
||||
var vite = require('vite');
|
||||
var browser = require('./chunk-browser.cjs');
|
||||
var esModuleLexer = require('es-module-lexer');
|
||||
var constants = require('./constants.cjs');
|
||||
var utils = require('./utils.cjs');
|
||||
var sourceMap = require('./source-map.cjs');
|
||||
require('node:module');
|
||||
require('node:path');
|
||||
|
||||
function _interopNamespaceDefault(e) {
|
||||
var n = Object.create(null);
|
||||
if (e) {
|
||||
Object.keys(e).forEach(function (k) {
|
||||
if (k !== 'default') {
|
||||
var d = Object.getOwnPropertyDescriptor(e, k);
|
||||
Object.defineProperty(n, k, d.get ? d : {
|
||||
enumerable: true,
|
||||
get: function () { return e[k]; }
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
n.default = e;
|
||||
return Object.freeze(n);
|
||||
}
|
||||
|
||||
var esModuleLexer__namespace = /*#__PURE__*/_interopNamespaceDefault(esModuleLexer);
|
||||
|
||||
/* eslint-disable no-console */
|
||||
function hashCode(s) {
|
||||
return s.split("").reduce((a, b) => {
|
||||
a = (a << 5) - a + b.charCodeAt(0);
|
||||
return a & a;
|
||||
}, 0);
|
||||
}
|
||||
class Debugger {
|
||||
dumpDir;
|
||||
initPromise;
|
||||
externalizeMap = /* @__PURE__ */ new Map();
|
||||
constructor(root, options) {
|
||||
this.options = options;
|
||||
if (options.dumpModules) this.dumpDir = pathe.resolve(root, options.dumpModules === true ? ".vite-node/dump" : options.dumpModules);
|
||||
if (this.dumpDir) if (options.loadDumppedModules) console.info(browser.s.gray(`[vite-node] [debug] load modules from ${this.dumpDir}`));
|
||||
else console.info(browser.s.gray(`[vite-node] [debug] dump modules to ${this.dumpDir}`));
|
||||
this.initPromise = this.clearDump();
|
||||
}
|
||||
async clearDump() {
|
||||
if (!this.dumpDir) return;
|
||||
if (!this.options.loadDumppedModules && fs.existsSync(this.dumpDir)) await fs.promises.rm(this.dumpDir, {
|
||||
recursive: true,
|
||||
force: true
|
||||
});
|
||||
await fs.promises.mkdir(this.dumpDir, { recursive: true });
|
||||
}
|
||||
encodeId(id) {
|
||||
return `${id.replace(/[^\w@\-]/g, "_").replace(/_+/g, "_")}-${hashCode(id)}.js`;
|
||||
}
|
||||
async recordExternalize(id, path) {
|
||||
if (!this.dumpDir) return;
|
||||
this.externalizeMap.set(id, path);
|
||||
await this.writeInfo();
|
||||
}
|
||||
async dumpFile(id, result) {
|
||||
if (!result || !this.dumpDir) return;
|
||||
await this.initPromise;
|
||||
const name = this.encodeId(id);
|
||||
return await fs.promises.writeFile(pathe.join(this.dumpDir, name), `// ${id.replace(/\0/g, "\\0")}\n${result.code}`, "utf-8");
|
||||
}
|
||||
async loadDump(id) {
|
||||
if (!this.dumpDir) return null;
|
||||
await this.initPromise;
|
||||
const name = this.encodeId(id);
|
||||
const path = pathe.join(this.dumpDir, name);
|
||||
if (!fs.existsSync(path)) return null;
|
||||
const code = await fs.promises.readFile(path, "utf-8");
|
||||
return {
|
||||
code: code.replace(/^\/\/.*\n/, ""),
|
||||
map: void 0
|
||||
};
|
||||
}
|
||||
async writeInfo() {
|
||||
if (!this.dumpDir) return;
|
||||
const info = JSON.stringify({
|
||||
time: (/* @__PURE__ */ new Date()).toLocaleString(),
|
||||
externalize: Object.fromEntries(this.externalizeMap.entries())
|
||||
}, null, 2);
|
||||
return fs.promises.writeFile(pathe.join(this.dumpDir, "info.json"), info, "utf-8");
|
||||
}
|
||||
}
|
||||
|
||||
const BUILTIN_EXTENSIONS = new Set([
|
||||
".mjs",
|
||||
".cjs",
|
||||
".node",
|
||||
".wasm"
|
||||
]);
|
||||
const ESM_EXT_RE = /\.(es|esm|esm-browser|esm-bundler|es6|module)\.js$/;
|
||||
const ESM_FOLDER_RE = /\/(es|esm)\/(.*\.js)$/;
|
||||
const defaultInline = [
|
||||
/virtual:/,
|
||||
/\.[mc]?ts$/,
|
||||
/[?&](init|raw|url|inline)\b/,
|
||||
constants.KNOWN_ASSET_RE
|
||||
];
|
||||
const depsExternal = [/\/node_modules\/.*\.cjs\.js$/, /\/node_modules\/.*\.mjs$/];
|
||||
function guessCJSversion(id) {
|
||||
if (id.match(ESM_EXT_RE)) {
|
||||
for (const i of [
|
||||
id.replace(ESM_EXT_RE, ".mjs"),
|
||||
id.replace(ESM_EXT_RE, ".umd.js"),
|
||||
id.replace(ESM_EXT_RE, ".cjs.js"),
|
||||
id.replace(ESM_EXT_RE, ".js")
|
||||
]) if (fs.existsSync(i)) return i;
|
||||
}
|
||||
if (id.match(ESM_FOLDER_RE)) {
|
||||
for (const i of [
|
||||
id.replace(ESM_FOLDER_RE, "/umd/$1"),
|
||||
id.replace(ESM_FOLDER_RE, "/cjs/$1"),
|
||||
id.replace(ESM_FOLDER_RE, "/lib/$1"),
|
||||
id.replace(ESM_FOLDER_RE, "/$1")
|
||||
]) if (fs.existsSync(i)) return i;
|
||||
}
|
||||
}
|
||||
// The code from https://github.com/unjs/mlly/blob/c5bcca0cda175921344fd6de1bc0c499e73e5dac/src/syntax.ts#L51-L98
|
||||
async function isValidNodeImport(id) {
|
||||
const extension = pathe.extname(id);
|
||||
if (BUILTIN_EXTENSIONS.has(extension)) return true;
|
||||
if (extension !== ".js") return false;
|
||||
id = id.replace("file:///", "");
|
||||
const package_ = await utils.findNearestPackageData(pathe.dirname(id));
|
||||
if (package_.type === "module") return true;
|
||||
if (/\.(?:\w+-)?esm?(?:-\w+)?\.js$|\/esm?\//.test(id)) return false;
|
||||
try {
|
||||
await esModuleLexer__namespace.init;
|
||||
const code = await fs.promises.readFile(id, "utf8");
|
||||
const [, , , hasModuleSyntax] = esModuleLexer__namespace.parse(code);
|
||||
return !hasModuleSyntax;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
const _defaultExternalizeCache = /* @__PURE__ */ new Map();
|
||||
async function shouldExternalize(id, options, cache = _defaultExternalizeCache) {
|
||||
if (!cache.has(id)) cache.set(id, _shouldExternalize(id, options));
|
||||
return cache.get(id);
|
||||
}
|
||||
async function _shouldExternalize(id, options) {
|
||||
if (utils.isNodeBuiltin(id)) return id;
|
||||
// data: should be processed by native import,
|
||||
// since it is a feature of ESM.
|
||||
// also externalize network imports since nodejs allows it when --experimental-network-imports
|
||||
if (id.startsWith("data:") || /^(?:https?:)?\/\//.test(id)) return id;
|
||||
id = patchWindowsImportPath(id);
|
||||
const moduleDirectories = (options === null || options === void 0 ? void 0 : options.moduleDirectories) || ["/node_modules/"];
|
||||
if (matchExternalizePattern(id, moduleDirectories, options === null || options === void 0 ? void 0 : options.inline)) return false;
|
||||
if ((options === null || options === void 0 ? void 0 : options.inlineFiles) && (options === null || options === void 0 ? void 0 : options.inlineFiles.includes(id))) return false;
|
||||
if (matchExternalizePattern(id, moduleDirectories, options === null || options === void 0 ? void 0 : options.external)) return id;
|
||||
// Unless the user explicitly opted to inline them, externalize Vite deps.
|
||||
// They are too big to inline by default.
|
||||
if ((options === null || options === void 0 ? void 0 : options.cacheDir) && id.includes(options.cacheDir)) return id;
|
||||
const isLibraryModule = moduleDirectories.some((dir) => id.includes(dir));
|
||||
const guessCJS = isLibraryModule && (options === null || options === void 0 ? void 0 : options.fallbackCJS);
|
||||
id = guessCJS ? guessCJSversion(id) || id : id;
|
||||
if (matchExternalizePattern(id, moduleDirectories, defaultInline)) return false;
|
||||
if (matchExternalizePattern(id, moduleDirectories, depsExternal)) return id;
|
||||
if (isLibraryModule && await isValidNodeImport(id)) return id;
|
||||
return false;
|
||||
}
|
||||
function matchExternalizePattern(id, moduleDirectories, patterns) {
|
||||
if (patterns == null) return false;
|
||||
if (patterns === true) return true;
|
||||
for (const ex of patterns) if (typeof ex === "string") {
|
||||
if (moduleDirectories.some((dir) => id.includes(pathe.join(dir, ex)))) return true;
|
||||
} else if (ex.test(id)) return true;
|
||||
return false;
|
||||
}
|
||||
function patchWindowsImportPath(path) {
|
||||
if (path.match(/^\w:\\/)) return `file:///${utils.slash(path)}`;
|
||||
else if (path.match(/^\w:\//)) return `file:///${path}`;
|
||||
else return path;
|
||||
}
|
||||
|
||||
const debugRequest = createDebug("vite-node:server:request");
|
||||
class ViteNodeServer {
|
||||
fetchPromiseMap = {
|
||||
ssr: /* @__PURE__ */ new Map(),
|
||||
web: /* @__PURE__ */ new Map()
|
||||
};
|
||||
transformPromiseMap = {
|
||||
ssr: /* @__PURE__ */ new Map(),
|
||||
web: /* @__PURE__ */ new Map()
|
||||
};
|
||||
durations = {
|
||||
ssr: /* @__PURE__ */ new Map(),
|
||||
web: /* @__PURE__ */ new Map()
|
||||
};
|
||||
existingOptimizedDeps = /* @__PURE__ */ new Set();
|
||||
fetchCaches = {
|
||||
ssr: /* @__PURE__ */ new Map(),
|
||||
web: /* @__PURE__ */ new Map()
|
||||
};
|
||||
fetchCache = /* @__PURE__ */ new Map();
|
||||
externalizeCache = /* @__PURE__ */ new Map();
|
||||
debugger;
|
||||
constructor(server, options = {}) {
|
||||
var _options$deps3;
|
||||
this.server = server;
|
||||
this.options = options;
|
||||
const ssrOptions = server.config.ssr;
|
||||
options.deps ?? (options.deps = {});
|
||||
options.deps.cacheDir = pathe.relative(server.config.root, options.deps.cacheDir || server.config.cacheDir);
|
||||
if (ssrOptions) {
|
||||
// we don't externalize ssr, because it has different semantics in Vite
|
||||
// if (ssrOptions.external) {
|
||||
// options.deps.external ??= []
|
||||
// options.deps.external.push(...ssrOptions.external)
|
||||
// }
|
||||
if (ssrOptions.noExternal === true) {
|
||||
var _options$deps;
|
||||
(_options$deps = options.deps).inline ?? (_options$deps.inline = true);
|
||||
} else if (options.deps.inline !== true) {
|
||||
var _options$deps2;
|
||||
(_options$deps2 = options.deps).inline ?? (_options$deps2.inline = []);
|
||||
const inline = options.deps.inline;
|
||||
options.deps.inline.push(...utils.toArray(ssrOptions.noExternal).filter((dep) => !inline.includes(dep)));
|
||||
}
|
||||
}
|
||||
if (process.env.VITE_NODE_DEBUG_DUMP) options.debug = Object.assign({
|
||||
dumpModules: !!process.env.VITE_NODE_DEBUG_DUMP,
|
||||
loadDumppedModules: process.env.VITE_NODE_DEBUG_DUMP === "load"
|
||||
}, options.debug ?? {});
|
||||
if (options.debug) this.debugger = new Debugger(server.config.root, options.debug);
|
||||
if (options.deps.inlineFiles) options.deps.inlineFiles = options.deps.inlineFiles.flatMap((file) => {
|
||||
if (file.startsWith("file://")) return file;
|
||||
const resolvedId = pathe.resolve(file);
|
||||
return [resolvedId, node_url.pathToFileURL(resolvedId).href];
|
||||
});
|
||||
(_options$deps3 = options.deps).moduleDirectories ?? (_options$deps3.moduleDirectories = []);
|
||||
const envValue = process.env.VITE_NODE_DEPS_MODULE_DIRECTORIES || process.env.npm_config_VITE_NODE_DEPS_MODULE_DIRECTORIES;
|
||||
const customModuleDirectories = envValue === null || envValue === void 0 ? void 0 : envValue.split(",");
|
||||
if (customModuleDirectories) options.deps.moduleDirectories.push(...customModuleDirectories);
|
||||
options.deps.moduleDirectories = options.deps.moduleDirectories.map((dir) => {
|
||||
if (!dir.startsWith("/")) dir = `/${dir}`;
|
||||
if (!dir.endsWith("/")) dir += "/";
|
||||
return pathe.normalize(dir);
|
||||
});
|
||||
// always add node_modules as a module directory
|
||||
if (!options.deps.moduleDirectories.includes("/node_modules/")) options.deps.moduleDirectories.push("/node_modules/");
|
||||
}
|
||||
shouldExternalize(id) {
|
||||
return shouldExternalize(id, this.options.deps, this.externalizeCache);
|
||||
}
|
||||
getTotalDuration() {
|
||||
const ssrDurations = [...this.durations.ssr.values()].flat();
|
||||
const webDurations = [...this.durations.web.values()].flat();
|
||||
return [...ssrDurations, ...webDurations].reduce((a, b) => a + b, 0);
|
||||
}
|
||||
async ensureExists(id) {
|
||||
if (this.existingOptimizedDeps.has(id)) return true;
|
||||
if (fs.existsSync(id)) {
|
||||
this.existingOptimizedDeps.add(id);
|
||||
return true;
|
||||
}
|
||||
return new Promise((resolve) => {
|
||||
setTimeout(() => {
|
||||
this.ensureExists(id).then(() => {
|
||||
resolve(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
async resolveId(id, importer, transformMode) {
|
||||
if (importer && !importer.startsWith(utils.withTrailingSlash(this.server.config.root))) importer = pathe.resolve(this.server.config.root, importer);
|
||||
const mode = transformMode ?? (importer && this.getTransformMode(importer) || "ssr");
|
||||
return this.server.pluginContainer.resolveId(id, importer, { ssr: mode === "ssr" });
|
||||
}
|
||||
getSourceMap(source) {
|
||||
var _this$fetchCache$get, _this$server$moduleGr;
|
||||
source = utils.normalizeModuleId(source);
|
||||
const fetchResult = (_this$fetchCache$get = this.fetchCache.get(source)) === null || _this$fetchCache$get === void 0 ? void 0 : _this$fetchCache$get.result;
|
||||
if (fetchResult === null || fetchResult === void 0 ? void 0 : fetchResult.map) return fetchResult.map;
|
||||
const ssrTransformResult = (_this$server$moduleGr = this.server.moduleGraph.getModuleById(source)) === null || _this$server$moduleGr === void 0 ? void 0 : _this$server$moduleGr.ssrTransformResult;
|
||||
return (ssrTransformResult === null || ssrTransformResult === void 0 ? void 0 : ssrTransformResult.map) || null;
|
||||
}
|
||||
assertMode(mode) {
|
||||
assert(mode === "web" || mode === "ssr", `"transformMode" can only be "web" or "ssr", received "${mode}".`);
|
||||
}
|
||||
async fetchModule(id, transformMode) {
|
||||
const mode = transformMode || this.getTransformMode(id);
|
||||
return this.fetchResult(id, mode).then((r) => {
|
||||
return this.options.sourcemap !== true ? {
|
||||
...r,
|
||||
map: void 0
|
||||
} : r;
|
||||
});
|
||||
}
|
||||
async fetchResult(id, mode) {
|
||||
const moduleId = utils.normalizeModuleId(id);
|
||||
this.assertMode(mode);
|
||||
const promiseMap = this.fetchPromiseMap[mode];
|
||||
// reuse transform for concurrent requests
|
||||
if (!promiseMap.has(moduleId)) promiseMap.set(moduleId, this._fetchModule(moduleId, mode).finally(() => {
|
||||
promiseMap.delete(moduleId);
|
||||
}));
|
||||
return promiseMap.get(moduleId);
|
||||
}
|
||||
async transformRequest(id, filepath = id, transformMode) {
|
||||
const mode = transformMode || this.getTransformMode(id);
|
||||
this.assertMode(mode);
|
||||
const promiseMap = this.transformPromiseMap[mode];
|
||||
// reuse transform for concurrent requests
|
||||
if (!promiseMap.has(id)) promiseMap.set(id, this._transformRequest(id, filepath, mode).finally(() => {
|
||||
promiseMap.delete(id);
|
||||
}));
|
||||
return promiseMap.get(id);
|
||||
}
|
||||
async transformModule(id, transformMode) {
|
||||
if (transformMode !== "web") throw new Error("`transformModule` only supports `transformMode: \"web\"`.");
|
||||
const normalizedId = utils.normalizeModuleId(id);
|
||||
const mod = this.server.moduleGraph.getModuleById(normalizedId);
|
||||
const result = (mod === null || mod === void 0 ? void 0 : mod.transformResult) || await this.server.transformRequest(normalizedId);
|
||||
return { code: result === null || result === void 0 ? void 0 : result.code };
|
||||
}
|
||||
getTransformMode(id) {
|
||||
var _this$options$transfo, _this$options$transfo2;
|
||||
const withoutQuery = id.split("?")[0];
|
||||
if ((_this$options$transfo = this.options.transformMode) === null || _this$options$transfo === void 0 || (_this$options$transfo = _this$options$transfo.web) === null || _this$options$transfo === void 0 ? void 0 : _this$options$transfo.some((r) => withoutQuery.match(r))) return "web";
|
||||
if ((_this$options$transfo2 = this.options.transformMode) === null || _this$options$transfo2 === void 0 || (_this$options$transfo2 = _this$options$transfo2.ssr) === null || _this$options$transfo2 === void 0 ? void 0 : _this$options$transfo2.some((r) => withoutQuery.match(r))) return "ssr";
|
||||
if (withoutQuery.match(/\.([cm]?[jt]sx?|json)$/)) return "ssr";
|
||||
return "web";
|
||||
}
|
||||
getChangedModule(id, file) {
|
||||
const module = this.server.moduleGraph.getModuleById(id) || this.server.moduleGraph.getModuleById(file);
|
||||
if (module) return module;
|
||||
const _modules = this.server.moduleGraph.getModulesByFile(file);
|
||||
if (!_modules || !_modules.size) return null;
|
||||
// find the latest changed module
|
||||
const modules = [..._modules];
|
||||
let mod = modules[0];
|
||||
let latestMax = -1;
|
||||
for (const m of _modules) {
|
||||
const timestamp = Math.max(m.lastHMRTimestamp, m.lastInvalidationTimestamp);
|
||||
if (timestamp > latestMax) {
|
||||
latestMax = timestamp;
|
||||
mod = m;
|
||||
}
|
||||
}
|
||||
return mod;
|
||||
}
|
||||
async _fetchModule(id, transformMode) {
|
||||
var _this$options$deps;
|
||||
let result;
|
||||
const cacheDir = (_this$options$deps = this.options.deps) === null || _this$options$deps === void 0 ? void 0 : _this$options$deps.cacheDir;
|
||||
if (cacheDir && id.includes(cacheDir)) {
|
||||
if (!id.startsWith(utils.withTrailingSlash(this.server.config.root))) id = pathe.join(this.server.config.root, id);
|
||||
const timeout = setTimeout(() => {
|
||||
throw new Error(`ViteNodeServer: ${id} not found. This is a bug, please report it.`);
|
||||
}, 5e3);
|
||||
await this.ensureExists(id);
|
||||
clearTimeout(timeout);
|
||||
}
|
||||
const { path: filePath } = utils.toFilePath(id, this.server.config.root);
|
||||
const moduleNode = this.getChangedModule(id, filePath);
|
||||
const cache = this.fetchCaches[transformMode].get(filePath);
|
||||
// lastUpdateTimestamp is the timestamp that marks the last time the module was changed
|
||||
// if lastUpdateTimestamp is 0, then the module was not changed since the server started
|
||||
// we test "timestamp === 0" for expressiveness, but it's not necessary
|
||||
const timestamp = moduleNode ? Math.max(moduleNode.lastHMRTimestamp, moduleNode.lastInvalidationTimestamp) : 0;
|
||||
if (cache && (timestamp === 0 || cache.timestamp >= timestamp)) return cache.result;
|
||||
const time = Date.now();
|
||||
const externalize = await this.shouldExternalize(filePath);
|
||||
let duration;
|
||||
if (externalize) {
|
||||
var _this$debugger;
|
||||
result = { externalize };
|
||||
(_this$debugger = this.debugger) === null || _this$debugger === void 0 || _this$debugger.recordExternalize(id, externalize);
|
||||
} else {
|
||||
const start = node_perf_hooks.performance.now();
|
||||
const r = await this._transformRequest(id, filePath, transformMode);
|
||||
duration = node_perf_hooks.performance.now() - start;
|
||||
result = {
|
||||
code: r === null || r === void 0 ? void 0 : r.code,
|
||||
map: r === null || r === void 0 ? void 0 : r.map
|
||||
};
|
||||
}
|
||||
const cacheEntry = {
|
||||
duration,
|
||||
timestamp: time,
|
||||
result
|
||||
};
|
||||
const durations = this.durations[transformMode].get(filePath) || [];
|
||||
this.durations[transformMode].set(filePath, [...durations, duration ?? 0]);
|
||||
this.fetchCaches[transformMode].set(filePath, cacheEntry);
|
||||
this.fetchCache.set(filePath, cacheEntry);
|
||||
return result;
|
||||
}
|
||||
async processTransformResult(filepath, result) {
|
||||
const mod = this.server.moduleGraph.getModuleById(filepath);
|
||||
return sourceMap.withInlineSourcemap(result, {
|
||||
filepath: (mod === null || mod === void 0 ? void 0 : mod.file) || filepath,
|
||||
root: this.server.config.root,
|
||||
noFirstLineMapping: Number(vite.version.split(".")[0]) >= 6
|
||||
});
|
||||
}
|
||||
async _transformRequest(id, filepath, transformMode) {
|
||||
var _this$options$debug, _this$options$debug2;
|
||||
debugRequest(id);
|
||||
let result = null;
|
||||
if ((_this$options$debug = this.options.debug) === null || _this$options$debug === void 0 ? void 0 : _this$options$debug.loadDumppedModules) {
|
||||
var _this$debugger2;
|
||||
result = await ((_this$debugger2 = this.debugger) === null || _this$debugger2 === void 0 ? void 0 : _this$debugger2.loadDump(id)) ?? null;
|
||||
if (result) return result;
|
||||
}
|
||||
if (transformMode === "web") {
|
||||
// for components like Vue, we want to use the client side
|
||||
// plugins but then convert the code to be consumed by the server
|
||||
result = await this.server.transformRequest(id);
|
||||
if (result) result = await this.server.ssrTransform(result.code, result.map, id);
|
||||
} else result = await this.server.transformRequest(id, { ssr: true });
|
||||
const sourcemap = this.options.sourcemap ?? "inline";
|
||||
if (sourcemap === "inline" && result) result = await this.processTransformResult(filepath, result);
|
||||
if ((_this$options$debug2 = this.options.debug) === null || _this$options$debug2 === void 0 ? void 0 : _this$options$debug2.dumpModules) {
|
||||
var _this$debugger3;
|
||||
await ((_this$debugger3 = this.debugger) === null || _this$debugger3 === void 0 ? void 0 : _this$debugger3.dumpFile(id, result));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
exports.ViteNodeServer = ViteNodeServer;
|
||||
exports.guessCJSversion = guessCJSversion;
|
||||
exports.shouldExternalize = shouldExternalize;
|
||||
58
GTA_P_V2/node_modules/vite-node/dist/server.d.ts
generated
vendored
Normal file
58
GTA_P_V2/node_modules/vite-node/dist/server.d.ts
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
import { TransformResult, ViteDevServer } from 'vite';
|
||||
import { D as DebuggerOptions, c as DepsHandlingOptions, V as ViteNodeServerOptions, F as FetchResult, d as ViteNodeResolveId } from './index.d-DGmxD2U7.js';
|
||||
import { E as EncodedSourceMap } from './trace-mapping.d-DLVdEqOp.js';
|
||||
|
||||
declare class Debugger {
|
||||
options: DebuggerOptions;
|
||||
dumpDir: string | undefined;
|
||||
initPromise: Promise<void> | undefined;
|
||||
externalizeMap: Map<string, string>;
|
||||
constructor(root: string, options: DebuggerOptions);
|
||||
clearDump(): Promise<void>;
|
||||
encodeId(id: string): string;
|
||||
recordExternalize(id: string, path: string): Promise<void>;
|
||||
dumpFile(id: string, result: TransformResult | null): Promise<void>;
|
||||
loadDump(id: string): Promise<TransformResult | null>;
|
||||
writeInfo(): Promise<void>;
|
||||
}
|
||||
|
||||
declare function guessCJSversion(id: string): string | undefined;
|
||||
declare function shouldExternalize(id: string, options?: DepsHandlingOptions, cache?: Map<string, Promise<string | false>>): Promise<string | false>;
|
||||
|
||||
interface FetchCache {
|
||||
duration?: number;
|
||||
timestamp: number;
|
||||
result: FetchResult;
|
||||
}
|
||||
declare class ViteNodeServer {
|
||||
server: ViteDevServer;
|
||||
options: ViteNodeServerOptions;
|
||||
private fetchPromiseMap;
|
||||
private transformPromiseMap;
|
||||
private durations;
|
||||
private existingOptimizedDeps;
|
||||
fetchCaches: Record<"ssr" | "web", Map<string, FetchCache>>;
|
||||
fetchCache: Map<string, FetchCache>;
|
||||
externalizeCache: Map<string, Promise<string | false>>;
|
||||
debugger?: Debugger;
|
||||
constructor(server: ViteDevServer, options?: ViteNodeServerOptions);
|
||||
shouldExternalize(id: string): Promise<string | false>;
|
||||
getTotalDuration(): number;
|
||||
private ensureExists;
|
||||
resolveId(id: string, importer?: string, transformMode?: "web" | "ssr"): Promise<ViteNodeResolveId | null>;
|
||||
getSourceMap(source: string): EncodedSourceMap | null;
|
||||
private assertMode;
|
||||
fetchModule(id: string, transformMode?: "web" | "ssr"): Promise<FetchResult>;
|
||||
fetchResult(id: string, mode: "web" | "ssr"): Promise<FetchResult>;
|
||||
transformRequest(id: string, filepath?: string, transformMode?: "web" | "ssr"): Promise<TransformResult | null | undefined>;
|
||||
transformModule(id: string, transformMode?: "web" | "ssr"): Promise<{
|
||||
code: string | undefined
|
||||
}>;
|
||||
getTransformMode(id: string): "ssr" | "web";
|
||||
private getChangedModule;
|
||||
private _fetchModule;
|
||||
protected processTransformResult(filepath: string, result: TransformResult): Promise<TransformResult>;
|
||||
private _transformRequest;
|
||||
}
|
||||
|
||||
export { ViteNodeServer, guessCJSversion, shouldExternalize };
|
||||
417
GTA_P_V2/node_modules/vite-node/dist/server.mjs
generated
vendored
Normal file
417
GTA_P_V2/node_modules/vite-node/dist/server.mjs
generated
vendored
Normal file
@@ -0,0 +1,417 @@
|
||||
import assert from 'node:assert';
|
||||
import { existsSync, promises } from 'node:fs';
|
||||
import { performance } from 'node:perf_hooks';
|
||||
import { pathToFileURL } from 'node:url';
|
||||
import createDebug from 'debug';
|
||||
import { resolve, join, extname, dirname, relative, normalize } from 'pathe';
|
||||
import { version } from 'vite';
|
||||
import { s } from './chunk-browser.mjs';
|
||||
import * as esModuleLexer from 'es-module-lexer';
|
||||
import { KNOWN_ASSET_RE } from './constants.mjs';
|
||||
import { isNodeBuiltin, slash, findNearestPackageData, toArray, withTrailingSlash, normalizeModuleId, toFilePath } from './utils.mjs';
|
||||
import { withInlineSourcemap } from './source-map.mjs';
|
||||
import 'node:module';
|
||||
import 'node:path';
|
||||
|
||||
/* eslint-disable no-console */
|
||||
function hashCode(s) {
|
||||
return s.split("").reduce((a, b) => {
|
||||
a = (a << 5) - a + b.charCodeAt(0);
|
||||
return a & a;
|
||||
}, 0);
|
||||
}
|
||||
class Debugger {
|
||||
dumpDir;
|
||||
initPromise;
|
||||
externalizeMap = /* @__PURE__ */ new Map();
|
||||
constructor(root, options) {
|
||||
this.options = options;
|
||||
if (options.dumpModules) this.dumpDir = resolve(root, options.dumpModules === true ? ".vite-node/dump" : options.dumpModules);
|
||||
if (this.dumpDir) if (options.loadDumppedModules) console.info(s.gray(`[vite-node] [debug] load modules from ${this.dumpDir}`));
|
||||
else console.info(s.gray(`[vite-node] [debug] dump modules to ${this.dumpDir}`));
|
||||
this.initPromise = this.clearDump();
|
||||
}
|
||||
async clearDump() {
|
||||
if (!this.dumpDir) return;
|
||||
if (!this.options.loadDumppedModules && existsSync(this.dumpDir)) await promises.rm(this.dumpDir, {
|
||||
recursive: true,
|
||||
force: true
|
||||
});
|
||||
await promises.mkdir(this.dumpDir, { recursive: true });
|
||||
}
|
||||
encodeId(id) {
|
||||
return `${id.replace(/[^\w@\-]/g, "_").replace(/_+/g, "_")}-${hashCode(id)}.js`;
|
||||
}
|
||||
async recordExternalize(id, path) {
|
||||
if (!this.dumpDir) return;
|
||||
this.externalizeMap.set(id, path);
|
||||
await this.writeInfo();
|
||||
}
|
||||
async dumpFile(id, result) {
|
||||
if (!result || !this.dumpDir) return;
|
||||
await this.initPromise;
|
||||
const name = this.encodeId(id);
|
||||
return await promises.writeFile(join(this.dumpDir, name), `// ${id.replace(/\0/g, "\\0")}\n${result.code}`, "utf-8");
|
||||
}
|
||||
async loadDump(id) {
|
||||
if (!this.dumpDir) return null;
|
||||
await this.initPromise;
|
||||
const name = this.encodeId(id);
|
||||
const path = join(this.dumpDir, name);
|
||||
if (!existsSync(path)) return null;
|
||||
const code = await promises.readFile(path, "utf-8");
|
||||
return {
|
||||
code: code.replace(/^\/\/.*\n/, ""),
|
||||
map: void 0
|
||||
};
|
||||
}
|
||||
async writeInfo() {
|
||||
if (!this.dumpDir) return;
|
||||
const info = JSON.stringify({
|
||||
time: (/* @__PURE__ */ new Date()).toLocaleString(),
|
||||
externalize: Object.fromEntries(this.externalizeMap.entries())
|
||||
}, null, 2);
|
||||
return promises.writeFile(join(this.dumpDir, "info.json"), info, "utf-8");
|
||||
}
|
||||
}
|
||||
|
||||
const BUILTIN_EXTENSIONS = new Set([
|
||||
".mjs",
|
||||
".cjs",
|
||||
".node",
|
||||
".wasm"
|
||||
]);
|
||||
const ESM_EXT_RE = /\.(es|esm|esm-browser|esm-bundler|es6|module)\.js$/;
|
||||
const ESM_FOLDER_RE = /\/(es|esm)\/(.*\.js)$/;
|
||||
const defaultInline = [
|
||||
/virtual:/,
|
||||
/\.[mc]?ts$/,
|
||||
/[?&](init|raw|url|inline)\b/,
|
||||
KNOWN_ASSET_RE
|
||||
];
|
||||
const depsExternal = [/\/node_modules\/.*\.cjs\.js$/, /\/node_modules\/.*\.mjs$/];
|
||||
function guessCJSversion(id) {
|
||||
if (id.match(ESM_EXT_RE)) {
|
||||
for (const i of [
|
||||
id.replace(ESM_EXT_RE, ".mjs"),
|
||||
id.replace(ESM_EXT_RE, ".umd.js"),
|
||||
id.replace(ESM_EXT_RE, ".cjs.js"),
|
||||
id.replace(ESM_EXT_RE, ".js")
|
||||
]) if (existsSync(i)) return i;
|
||||
}
|
||||
if (id.match(ESM_FOLDER_RE)) {
|
||||
for (const i of [
|
||||
id.replace(ESM_FOLDER_RE, "/umd/$1"),
|
||||
id.replace(ESM_FOLDER_RE, "/cjs/$1"),
|
||||
id.replace(ESM_FOLDER_RE, "/lib/$1"),
|
||||
id.replace(ESM_FOLDER_RE, "/$1")
|
||||
]) if (existsSync(i)) return i;
|
||||
}
|
||||
}
|
||||
// The code from https://github.com/unjs/mlly/blob/c5bcca0cda175921344fd6de1bc0c499e73e5dac/src/syntax.ts#L51-L98
|
||||
async function isValidNodeImport(id) {
|
||||
const extension = extname(id);
|
||||
if (BUILTIN_EXTENSIONS.has(extension)) return true;
|
||||
if (extension !== ".js") return false;
|
||||
id = id.replace("file:///", "");
|
||||
const package_ = await findNearestPackageData(dirname(id));
|
||||
if (package_.type === "module") return true;
|
||||
if (/\.(?:\w+-)?esm?(?:-\w+)?\.js$|\/esm?\//.test(id)) return false;
|
||||
try {
|
||||
await esModuleLexer.init;
|
||||
const code = await promises.readFile(id, "utf8");
|
||||
const [, , , hasModuleSyntax] = esModuleLexer.parse(code);
|
||||
return !hasModuleSyntax;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
const _defaultExternalizeCache = /* @__PURE__ */ new Map();
|
||||
async function shouldExternalize(id, options, cache = _defaultExternalizeCache) {
|
||||
if (!cache.has(id)) cache.set(id, _shouldExternalize(id, options));
|
||||
return cache.get(id);
|
||||
}
|
||||
async function _shouldExternalize(id, options) {
|
||||
if (isNodeBuiltin(id)) return id;
|
||||
// data: should be processed by native import,
|
||||
// since it is a feature of ESM.
|
||||
// also externalize network imports since nodejs allows it when --experimental-network-imports
|
||||
if (id.startsWith("data:") || /^(?:https?:)?\/\//.test(id)) return id;
|
||||
id = patchWindowsImportPath(id);
|
||||
const moduleDirectories = (options === null || options === void 0 ? void 0 : options.moduleDirectories) || ["/node_modules/"];
|
||||
if (matchExternalizePattern(id, moduleDirectories, options === null || options === void 0 ? void 0 : options.inline)) return false;
|
||||
if ((options === null || options === void 0 ? void 0 : options.inlineFiles) && (options === null || options === void 0 ? void 0 : options.inlineFiles.includes(id))) return false;
|
||||
if (matchExternalizePattern(id, moduleDirectories, options === null || options === void 0 ? void 0 : options.external)) return id;
|
||||
// Unless the user explicitly opted to inline them, externalize Vite deps.
|
||||
// They are too big to inline by default.
|
||||
if ((options === null || options === void 0 ? void 0 : options.cacheDir) && id.includes(options.cacheDir)) return id;
|
||||
const isLibraryModule = moduleDirectories.some((dir) => id.includes(dir));
|
||||
const guessCJS = isLibraryModule && (options === null || options === void 0 ? void 0 : options.fallbackCJS);
|
||||
id = guessCJS ? guessCJSversion(id) || id : id;
|
||||
if (matchExternalizePattern(id, moduleDirectories, defaultInline)) return false;
|
||||
if (matchExternalizePattern(id, moduleDirectories, depsExternal)) return id;
|
||||
if (isLibraryModule && await isValidNodeImport(id)) return id;
|
||||
return false;
|
||||
}
|
||||
function matchExternalizePattern(id, moduleDirectories, patterns) {
|
||||
if (patterns == null) return false;
|
||||
if (patterns === true) return true;
|
||||
for (const ex of patterns) if (typeof ex === "string") {
|
||||
if (moduleDirectories.some((dir) => id.includes(join(dir, ex)))) return true;
|
||||
} else if (ex.test(id)) return true;
|
||||
return false;
|
||||
}
|
||||
function patchWindowsImportPath(path) {
|
||||
if (path.match(/^\w:\\/)) return `file:///${slash(path)}`;
|
||||
else if (path.match(/^\w:\//)) return `file:///${path}`;
|
||||
else return path;
|
||||
}
|
||||
|
||||
const debugRequest = createDebug("vite-node:server:request");
|
||||
class ViteNodeServer {
|
||||
fetchPromiseMap = {
|
||||
ssr: /* @__PURE__ */ new Map(),
|
||||
web: /* @__PURE__ */ new Map()
|
||||
};
|
||||
transformPromiseMap = {
|
||||
ssr: /* @__PURE__ */ new Map(),
|
||||
web: /* @__PURE__ */ new Map()
|
||||
};
|
||||
durations = {
|
||||
ssr: /* @__PURE__ */ new Map(),
|
||||
web: /* @__PURE__ */ new Map()
|
||||
};
|
||||
existingOptimizedDeps = /* @__PURE__ */ new Set();
|
||||
fetchCaches = {
|
||||
ssr: /* @__PURE__ */ new Map(),
|
||||
web: /* @__PURE__ */ new Map()
|
||||
};
|
||||
fetchCache = /* @__PURE__ */ new Map();
|
||||
externalizeCache = /* @__PURE__ */ new Map();
|
||||
debugger;
|
||||
constructor(server, options = {}) {
|
||||
var _options$deps3;
|
||||
this.server = server;
|
||||
this.options = options;
|
||||
const ssrOptions = server.config.ssr;
|
||||
options.deps ?? (options.deps = {});
|
||||
options.deps.cacheDir = relative(server.config.root, options.deps.cacheDir || server.config.cacheDir);
|
||||
if (ssrOptions) {
|
||||
// we don't externalize ssr, because it has different semantics in Vite
|
||||
// if (ssrOptions.external) {
|
||||
// options.deps.external ??= []
|
||||
// options.deps.external.push(...ssrOptions.external)
|
||||
// }
|
||||
if (ssrOptions.noExternal === true) {
|
||||
var _options$deps;
|
||||
(_options$deps = options.deps).inline ?? (_options$deps.inline = true);
|
||||
} else if (options.deps.inline !== true) {
|
||||
var _options$deps2;
|
||||
(_options$deps2 = options.deps).inline ?? (_options$deps2.inline = []);
|
||||
const inline = options.deps.inline;
|
||||
options.deps.inline.push(...toArray(ssrOptions.noExternal).filter((dep) => !inline.includes(dep)));
|
||||
}
|
||||
}
|
||||
if (process.env.VITE_NODE_DEBUG_DUMP) options.debug = Object.assign({
|
||||
dumpModules: !!process.env.VITE_NODE_DEBUG_DUMP,
|
||||
loadDumppedModules: process.env.VITE_NODE_DEBUG_DUMP === "load"
|
||||
}, options.debug ?? {});
|
||||
if (options.debug) this.debugger = new Debugger(server.config.root, options.debug);
|
||||
if (options.deps.inlineFiles) options.deps.inlineFiles = options.deps.inlineFiles.flatMap((file) => {
|
||||
if (file.startsWith("file://")) return file;
|
||||
const resolvedId = resolve(file);
|
||||
return [resolvedId, pathToFileURL(resolvedId).href];
|
||||
});
|
||||
(_options$deps3 = options.deps).moduleDirectories ?? (_options$deps3.moduleDirectories = []);
|
||||
const envValue = process.env.VITE_NODE_DEPS_MODULE_DIRECTORIES || process.env.npm_config_VITE_NODE_DEPS_MODULE_DIRECTORIES;
|
||||
const customModuleDirectories = envValue === null || envValue === void 0 ? void 0 : envValue.split(",");
|
||||
if (customModuleDirectories) options.deps.moduleDirectories.push(...customModuleDirectories);
|
||||
options.deps.moduleDirectories = options.deps.moduleDirectories.map((dir) => {
|
||||
if (!dir.startsWith("/")) dir = `/${dir}`;
|
||||
if (!dir.endsWith("/")) dir += "/";
|
||||
return normalize(dir);
|
||||
});
|
||||
// always add node_modules as a module directory
|
||||
if (!options.deps.moduleDirectories.includes("/node_modules/")) options.deps.moduleDirectories.push("/node_modules/");
|
||||
}
|
||||
shouldExternalize(id) {
|
||||
return shouldExternalize(id, this.options.deps, this.externalizeCache);
|
||||
}
|
||||
getTotalDuration() {
|
||||
const ssrDurations = [...this.durations.ssr.values()].flat();
|
||||
const webDurations = [...this.durations.web.values()].flat();
|
||||
return [...ssrDurations, ...webDurations].reduce((a, b) => a + b, 0);
|
||||
}
|
||||
async ensureExists(id) {
|
||||
if (this.existingOptimizedDeps.has(id)) return true;
|
||||
if (existsSync(id)) {
|
||||
this.existingOptimizedDeps.add(id);
|
||||
return true;
|
||||
}
|
||||
return new Promise((resolve) => {
|
||||
setTimeout(() => {
|
||||
this.ensureExists(id).then(() => {
|
||||
resolve(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
async resolveId(id, importer, transformMode) {
|
||||
if (importer && !importer.startsWith(withTrailingSlash(this.server.config.root))) importer = resolve(this.server.config.root, importer);
|
||||
const mode = transformMode ?? (importer && this.getTransformMode(importer) || "ssr");
|
||||
return this.server.pluginContainer.resolveId(id, importer, { ssr: mode === "ssr" });
|
||||
}
|
||||
getSourceMap(source) {
|
||||
var _this$fetchCache$get, _this$server$moduleGr;
|
||||
source = normalizeModuleId(source);
|
||||
const fetchResult = (_this$fetchCache$get = this.fetchCache.get(source)) === null || _this$fetchCache$get === void 0 ? void 0 : _this$fetchCache$get.result;
|
||||
if (fetchResult === null || fetchResult === void 0 ? void 0 : fetchResult.map) return fetchResult.map;
|
||||
const ssrTransformResult = (_this$server$moduleGr = this.server.moduleGraph.getModuleById(source)) === null || _this$server$moduleGr === void 0 ? void 0 : _this$server$moduleGr.ssrTransformResult;
|
||||
return (ssrTransformResult === null || ssrTransformResult === void 0 ? void 0 : ssrTransformResult.map) || null;
|
||||
}
|
||||
assertMode(mode) {
|
||||
assert(mode === "web" || mode === "ssr", `"transformMode" can only be "web" or "ssr", received "${mode}".`);
|
||||
}
|
||||
async fetchModule(id, transformMode) {
|
||||
const mode = transformMode || this.getTransformMode(id);
|
||||
return this.fetchResult(id, mode).then((r) => {
|
||||
return this.options.sourcemap !== true ? {
|
||||
...r,
|
||||
map: void 0
|
||||
} : r;
|
||||
});
|
||||
}
|
||||
async fetchResult(id, mode) {
|
||||
const moduleId = normalizeModuleId(id);
|
||||
this.assertMode(mode);
|
||||
const promiseMap = this.fetchPromiseMap[mode];
|
||||
// reuse transform for concurrent requests
|
||||
if (!promiseMap.has(moduleId)) promiseMap.set(moduleId, this._fetchModule(moduleId, mode).finally(() => {
|
||||
promiseMap.delete(moduleId);
|
||||
}));
|
||||
return promiseMap.get(moduleId);
|
||||
}
|
||||
async transformRequest(id, filepath = id, transformMode) {
|
||||
const mode = transformMode || this.getTransformMode(id);
|
||||
this.assertMode(mode);
|
||||
const promiseMap = this.transformPromiseMap[mode];
|
||||
// reuse transform for concurrent requests
|
||||
if (!promiseMap.has(id)) promiseMap.set(id, this._transformRequest(id, filepath, mode).finally(() => {
|
||||
promiseMap.delete(id);
|
||||
}));
|
||||
return promiseMap.get(id);
|
||||
}
|
||||
async transformModule(id, transformMode) {
|
||||
if (transformMode !== "web") throw new Error("`transformModule` only supports `transformMode: \"web\"`.");
|
||||
const normalizedId = normalizeModuleId(id);
|
||||
const mod = this.server.moduleGraph.getModuleById(normalizedId);
|
||||
const result = (mod === null || mod === void 0 ? void 0 : mod.transformResult) || await this.server.transformRequest(normalizedId);
|
||||
return { code: result === null || result === void 0 ? void 0 : result.code };
|
||||
}
|
||||
getTransformMode(id) {
|
||||
var _this$options$transfo, _this$options$transfo2;
|
||||
const withoutQuery = id.split("?")[0];
|
||||
if ((_this$options$transfo = this.options.transformMode) === null || _this$options$transfo === void 0 || (_this$options$transfo = _this$options$transfo.web) === null || _this$options$transfo === void 0 ? void 0 : _this$options$transfo.some((r) => withoutQuery.match(r))) return "web";
|
||||
if ((_this$options$transfo2 = this.options.transformMode) === null || _this$options$transfo2 === void 0 || (_this$options$transfo2 = _this$options$transfo2.ssr) === null || _this$options$transfo2 === void 0 ? void 0 : _this$options$transfo2.some((r) => withoutQuery.match(r))) return "ssr";
|
||||
if (withoutQuery.match(/\.([cm]?[jt]sx?|json)$/)) return "ssr";
|
||||
return "web";
|
||||
}
|
||||
getChangedModule(id, file) {
|
||||
const module = this.server.moduleGraph.getModuleById(id) || this.server.moduleGraph.getModuleById(file);
|
||||
if (module) return module;
|
||||
const _modules = this.server.moduleGraph.getModulesByFile(file);
|
||||
if (!_modules || !_modules.size) return null;
|
||||
// find the latest changed module
|
||||
const modules = [..._modules];
|
||||
let mod = modules[0];
|
||||
let latestMax = -1;
|
||||
for (const m of _modules) {
|
||||
const timestamp = Math.max(m.lastHMRTimestamp, m.lastInvalidationTimestamp);
|
||||
if (timestamp > latestMax) {
|
||||
latestMax = timestamp;
|
||||
mod = m;
|
||||
}
|
||||
}
|
||||
return mod;
|
||||
}
|
||||
async _fetchModule(id, transformMode) {
|
||||
var _this$options$deps;
|
||||
let result;
|
||||
const cacheDir = (_this$options$deps = this.options.deps) === null || _this$options$deps === void 0 ? void 0 : _this$options$deps.cacheDir;
|
||||
if (cacheDir && id.includes(cacheDir)) {
|
||||
if (!id.startsWith(withTrailingSlash(this.server.config.root))) id = join(this.server.config.root, id);
|
||||
const timeout = setTimeout(() => {
|
||||
throw new Error(`ViteNodeServer: ${id} not found. This is a bug, please report it.`);
|
||||
}, 5e3);
|
||||
await this.ensureExists(id);
|
||||
clearTimeout(timeout);
|
||||
}
|
||||
const { path: filePath } = toFilePath(id, this.server.config.root);
|
||||
const moduleNode = this.getChangedModule(id, filePath);
|
||||
const cache = this.fetchCaches[transformMode].get(filePath);
|
||||
// lastUpdateTimestamp is the timestamp that marks the last time the module was changed
|
||||
// if lastUpdateTimestamp is 0, then the module was not changed since the server started
|
||||
// we test "timestamp === 0" for expressiveness, but it's not necessary
|
||||
const timestamp = moduleNode ? Math.max(moduleNode.lastHMRTimestamp, moduleNode.lastInvalidationTimestamp) : 0;
|
||||
if (cache && (timestamp === 0 || cache.timestamp >= timestamp)) return cache.result;
|
||||
const time = Date.now();
|
||||
const externalize = await this.shouldExternalize(filePath);
|
||||
let duration;
|
||||
if (externalize) {
|
||||
var _this$debugger;
|
||||
result = { externalize };
|
||||
(_this$debugger = this.debugger) === null || _this$debugger === void 0 || _this$debugger.recordExternalize(id, externalize);
|
||||
} else {
|
||||
const start = performance.now();
|
||||
const r = await this._transformRequest(id, filePath, transformMode);
|
||||
duration = performance.now() - start;
|
||||
result = {
|
||||
code: r === null || r === void 0 ? void 0 : r.code,
|
||||
map: r === null || r === void 0 ? void 0 : r.map
|
||||
};
|
||||
}
|
||||
const cacheEntry = {
|
||||
duration,
|
||||
timestamp: time,
|
||||
result
|
||||
};
|
||||
const durations = this.durations[transformMode].get(filePath) || [];
|
||||
this.durations[transformMode].set(filePath, [...durations, duration ?? 0]);
|
||||
this.fetchCaches[transformMode].set(filePath, cacheEntry);
|
||||
this.fetchCache.set(filePath, cacheEntry);
|
||||
return result;
|
||||
}
|
||||
async processTransformResult(filepath, result) {
|
||||
const mod = this.server.moduleGraph.getModuleById(filepath);
|
||||
return withInlineSourcemap(result, {
|
||||
filepath: (mod === null || mod === void 0 ? void 0 : mod.file) || filepath,
|
||||
root: this.server.config.root,
|
||||
noFirstLineMapping: Number(version.split(".")[0]) >= 6
|
||||
});
|
||||
}
|
||||
async _transformRequest(id, filepath, transformMode) {
|
||||
var _this$options$debug, _this$options$debug2;
|
||||
debugRequest(id);
|
||||
let result = null;
|
||||
if ((_this$options$debug = this.options.debug) === null || _this$options$debug === void 0 ? void 0 : _this$options$debug.loadDumppedModules) {
|
||||
var _this$debugger2;
|
||||
result = await ((_this$debugger2 = this.debugger) === null || _this$debugger2 === void 0 ? void 0 : _this$debugger2.loadDump(id)) ?? null;
|
||||
if (result) return result;
|
||||
}
|
||||
if (transformMode === "web") {
|
||||
// for components like Vue, we want to use the client side
|
||||
// plugins but then convert the code to be consumed by the server
|
||||
result = await this.server.transformRequest(id);
|
||||
if (result) result = await this.server.ssrTransform(result.code, result.map, id);
|
||||
} else result = await this.server.transformRequest(id, { ssr: true });
|
||||
const sourcemap = this.options.sourcemap ?? "inline";
|
||||
if (sourcemap === "inline" && result) result = await this.processTransformResult(filepath, result);
|
||||
if ((_this$options$debug2 = this.options.debug) === null || _this$options$debug2 === void 0 ? void 0 : _this$options$debug2.dumpModules) {
|
||||
var _this$debugger3;
|
||||
await ((_this$debugger3 = this.debugger) === null || _this$debugger3 === void 0 ? void 0 : _this$debugger3.dumpFile(id, result));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
export { ViteNodeServer, guessCJSversion, shouldExternalize };
|
||||
977
GTA_P_V2/node_modules/vite-node/dist/source-map.cjs
generated
vendored
Normal file
977
GTA_P_V2/node_modules/vite-node/dist/source-map.cjs
generated
vendored
Normal file
@@ -0,0 +1,977 @@
|
||||
'use strict';
|
||||
|
||||
var pathe = require('pathe');
|
||||
var fs = require('node:fs');
|
||||
var path = require('node:path');
|
||||
var utils = require('./utils.cjs');
|
||||
require('node:module');
|
||||
require('node:url');
|
||||
|
||||
const comma = ','.charCodeAt(0);
|
||||
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
|
||||
const intToChar = new Uint8Array(64); // 64 possible chars.
|
||||
const charToInt = new Uint8Array(128); // z is 122 in ASCII
|
||||
for (let i = 0; i < chars.length; i++) {
|
||||
const c = chars.charCodeAt(i);
|
||||
intToChar[i] = c;
|
||||
charToInt[c] = i;
|
||||
}
|
||||
function decodeInteger(reader, relative) {
|
||||
let value = 0;
|
||||
let shift = 0;
|
||||
let integer = 0;
|
||||
do {
|
||||
const c = reader.next();
|
||||
integer = charToInt[c];
|
||||
value |= (integer & 31) << shift;
|
||||
shift += 5;
|
||||
} while (integer & 32);
|
||||
const shouldNegate = value & 1;
|
||||
value >>>= 1;
|
||||
if (shouldNegate) {
|
||||
value = -2147483648 | -value;
|
||||
}
|
||||
return relative + value;
|
||||
}
|
||||
function hasMoreVlq(reader, max) {
|
||||
if (reader.pos >= max)
|
||||
return false;
|
||||
return reader.peek() !== comma;
|
||||
}
|
||||
class StringReader {
|
||||
constructor(buffer) {
|
||||
this.pos = 0;
|
||||
this.buffer = buffer;
|
||||
}
|
||||
next() {
|
||||
return this.buffer.charCodeAt(this.pos++);
|
||||
}
|
||||
peek() {
|
||||
return this.buffer.charCodeAt(this.pos);
|
||||
}
|
||||
indexOf(char) {
|
||||
const { buffer, pos } = this;
|
||||
const idx = buffer.indexOf(char, pos);
|
||||
return idx === -1 ? buffer.length : idx;
|
||||
}
|
||||
}
|
||||
|
||||
function decode(mappings) {
|
||||
const { length } = mappings;
|
||||
const reader = new StringReader(mappings);
|
||||
const decoded = [];
|
||||
let genColumn = 0;
|
||||
let sourcesIndex = 0;
|
||||
let sourceLine = 0;
|
||||
let sourceColumn = 0;
|
||||
let namesIndex = 0;
|
||||
do {
|
||||
const semi = reader.indexOf(';');
|
||||
const line = [];
|
||||
let sorted = true;
|
||||
let lastCol = 0;
|
||||
genColumn = 0;
|
||||
while (reader.pos < semi) {
|
||||
let seg;
|
||||
genColumn = decodeInteger(reader, genColumn);
|
||||
if (genColumn < lastCol)
|
||||
sorted = false;
|
||||
lastCol = genColumn;
|
||||
if (hasMoreVlq(reader, semi)) {
|
||||
sourcesIndex = decodeInteger(reader, sourcesIndex);
|
||||
sourceLine = decodeInteger(reader, sourceLine);
|
||||
sourceColumn = decodeInteger(reader, sourceColumn);
|
||||
if (hasMoreVlq(reader, semi)) {
|
||||
namesIndex = decodeInteger(reader, namesIndex);
|
||||
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex];
|
||||
}
|
||||
else {
|
||||
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn];
|
||||
}
|
||||
}
|
||||
else {
|
||||
seg = [genColumn];
|
||||
}
|
||||
line.push(seg);
|
||||
reader.pos++;
|
||||
}
|
||||
if (!sorted)
|
||||
sort(line);
|
||||
decoded.push(line);
|
||||
reader.pos = semi + 1;
|
||||
} while (reader.pos <= length);
|
||||
return decoded;
|
||||
}
|
||||
function sort(line) {
|
||||
line.sort(sortComparator$1);
|
||||
}
|
||||
function sortComparator$1(a, b) {
|
||||
return a[0] - b[0];
|
||||
}
|
||||
|
||||
// Matches the scheme of a URL, eg "http://"
|
||||
const schemeRegex = /^[\w+.-]+:\/\//;
|
||||
/**
|
||||
* Matches the parts of a URL:
|
||||
* 1. Scheme, including ":", guaranteed.
|
||||
* 2. User/password, including "@", optional.
|
||||
* 3. Host, guaranteed.
|
||||
* 4. Port, including ":", optional.
|
||||
* 5. Path, including "/", optional.
|
||||
* 6. Query, including "?", optional.
|
||||
* 7. Hash, including "#", optional.
|
||||
*/
|
||||
const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?(\?[^#]*)?(#.*)?/;
|
||||
/**
|
||||
* File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start
|
||||
* with a leading `/`, they can have a domain (but only if they don't start with a Windows drive).
|
||||
*
|
||||
* 1. Host, optional.
|
||||
* 2. Path, which may include "/", guaranteed.
|
||||
* 3. Query, including "?", optional.
|
||||
* 4. Hash, including "#", optional.
|
||||
*/
|
||||
const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/#?]*)?)?(\/?[^#?]*)(\?[^#]*)?(#.*)?/i;
|
||||
var UrlType;
|
||||
(function (UrlType) {
|
||||
UrlType[UrlType["Empty"] = 1] = "Empty";
|
||||
UrlType[UrlType["Hash"] = 2] = "Hash";
|
||||
UrlType[UrlType["Query"] = 3] = "Query";
|
||||
UrlType[UrlType["RelativePath"] = 4] = "RelativePath";
|
||||
UrlType[UrlType["AbsolutePath"] = 5] = "AbsolutePath";
|
||||
UrlType[UrlType["SchemeRelative"] = 6] = "SchemeRelative";
|
||||
UrlType[UrlType["Absolute"] = 7] = "Absolute";
|
||||
})(UrlType || (UrlType = {}));
|
||||
function isAbsoluteUrl(input) {
|
||||
return schemeRegex.test(input);
|
||||
}
|
||||
function isSchemeRelativeUrl(input) {
|
||||
return input.startsWith('//');
|
||||
}
|
||||
function isAbsolutePath(input) {
|
||||
return input.startsWith('/');
|
||||
}
|
||||
function isFileUrl(input) {
|
||||
return input.startsWith('file:');
|
||||
}
|
||||
function isRelative(input) {
|
||||
return /^[.?#]/.test(input);
|
||||
}
|
||||
function parseAbsoluteUrl(input) {
|
||||
const match = urlRegex.exec(input);
|
||||
return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/', match[6] || '', match[7] || '');
|
||||
}
|
||||
function parseFileUrl(input) {
|
||||
const match = fileRegex.exec(input);
|
||||
const path = match[2];
|
||||
return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path, match[3] || '', match[4] || '');
|
||||
}
|
||||
function makeUrl(scheme, user, host, port, path, query, hash) {
|
||||
return {
|
||||
scheme,
|
||||
user,
|
||||
host,
|
||||
port,
|
||||
path,
|
||||
query,
|
||||
hash,
|
||||
type: UrlType.Absolute,
|
||||
};
|
||||
}
|
||||
function parseUrl(input) {
|
||||
if (isSchemeRelativeUrl(input)) {
|
||||
const url = parseAbsoluteUrl('http:' + input);
|
||||
url.scheme = '';
|
||||
url.type = UrlType.SchemeRelative;
|
||||
return url;
|
||||
}
|
||||
if (isAbsolutePath(input)) {
|
||||
const url = parseAbsoluteUrl('http://foo.com' + input);
|
||||
url.scheme = '';
|
||||
url.host = '';
|
||||
url.type = UrlType.AbsolutePath;
|
||||
return url;
|
||||
}
|
||||
if (isFileUrl(input))
|
||||
return parseFileUrl(input);
|
||||
if (isAbsoluteUrl(input))
|
||||
return parseAbsoluteUrl(input);
|
||||
const url = parseAbsoluteUrl('http://foo.com/' + input);
|
||||
url.scheme = '';
|
||||
url.host = '';
|
||||
url.type = input
|
||||
? input.startsWith('?')
|
||||
? UrlType.Query
|
||||
: input.startsWith('#')
|
||||
? UrlType.Hash
|
||||
: UrlType.RelativePath
|
||||
: UrlType.Empty;
|
||||
return url;
|
||||
}
|
||||
function stripPathFilename(path) {
|
||||
// If a path ends with a parent directory "..", then it's a relative path with excess parent
|
||||
// paths. It's not a file, so we can't strip it.
|
||||
if (path.endsWith('/..'))
|
||||
return path;
|
||||
const index = path.lastIndexOf('/');
|
||||
return path.slice(0, index + 1);
|
||||
}
|
||||
function mergePaths(url, base) {
|
||||
normalizePath(base, base.type);
|
||||
// If the path is just a "/", then it was an empty path to begin with (remember, we're a relative
|
||||
// path).
|
||||
if (url.path === '/') {
|
||||
url.path = base.path;
|
||||
}
|
||||
else {
|
||||
// Resolution happens relative to the base path's directory, not the file.
|
||||
url.path = stripPathFilename(base.path) + url.path;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* The path can have empty directories "//", unneeded parents "foo/..", or current directory
|
||||
* "foo/.". We need to normalize to a standard representation.
|
||||
*/
|
||||
function normalizePath(url, type) {
|
||||
const rel = type <= UrlType.RelativePath;
|
||||
const pieces = url.path.split('/');
|
||||
// We need to preserve the first piece always, so that we output a leading slash. The item at
|
||||
// pieces[0] is an empty string.
|
||||
let pointer = 1;
|
||||
// Positive is the number of real directories we've output, used for popping a parent directory.
|
||||
// Eg, "foo/bar/.." will have a positive 2, and we can decrement to be left with just "foo".
|
||||
let positive = 0;
|
||||
// We need to keep a trailing slash if we encounter an empty directory (eg, splitting "foo/" will
|
||||
// generate `["foo", ""]` pieces). And, if we pop a parent directory. But once we encounter a
|
||||
// real directory, we won't need to append, unless the other conditions happen again.
|
||||
let addTrailingSlash = false;
|
||||
for (let i = 1; i < pieces.length; i++) {
|
||||
const piece = pieces[i];
|
||||
// An empty directory, could be a trailing slash, or just a double "//" in the path.
|
||||
if (!piece) {
|
||||
addTrailingSlash = true;
|
||||
continue;
|
||||
}
|
||||
// If we encounter a real directory, then we don't need to append anymore.
|
||||
addTrailingSlash = false;
|
||||
// A current directory, which we can always drop.
|
||||
if (piece === '.')
|
||||
continue;
|
||||
// A parent directory, we need to see if there are any real directories we can pop. Else, we
|
||||
// have an excess of parents, and we'll need to keep the "..".
|
||||
if (piece === '..') {
|
||||
if (positive) {
|
||||
addTrailingSlash = true;
|
||||
positive--;
|
||||
pointer--;
|
||||
}
|
||||
else if (rel) {
|
||||
// If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute
|
||||
// URL, protocol relative URL, or an absolute path, we don't need to keep excess.
|
||||
pieces[pointer++] = piece;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
// We've encountered a real directory. Move it to the next insertion pointer, which accounts for
|
||||
// any popped or dropped directories.
|
||||
pieces[pointer++] = piece;
|
||||
positive++;
|
||||
}
|
||||
let path = '';
|
||||
for (let i = 1; i < pointer; i++) {
|
||||
path += '/' + pieces[i];
|
||||
}
|
||||
if (!path || (addTrailingSlash && !path.endsWith('/..'))) {
|
||||
path += '/';
|
||||
}
|
||||
url.path = path;
|
||||
}
|
||||
/**
|
||||
* Attempts to resolve `input` URL/path relative to `base`.
|
||||
*/
|
||||
function resolve$1(input, base) {
|
||||
if (!input && !base)
|
||||
return '';
|
||||
const url = parseUrl(input);
|
||||
let inputType = url.type;
|
||||
if (base && inputType !== UrlType.Absolute) {
|
||||
const baseUrl = parseUrl(base);
|
||||
const baseType = baseUrl.type;
|
||||
switch (inputType) {
|
||||
case UrlType.Empty:
|
||||
url.hash = baseUrl.hash;
|
||||
// fall through
|
||||
case UrlType.Hash:
|
||||
url.query = baseUrl.query;
|
||||
// fall through
|
||||
case UrlType.Query:
|
||||
case UrlType.RelativePath:
|
||||
mergePaths(url, baseUrl);
|
||||
// fall through
|
||||
case UrlType.AbsolutePath:
|
||||
// The host, user, and port are joined, you can't copy one without the others.
|
||||
url.user = baseUrl.user;
|
||||
url.host = baseUrl.host;
|
||||
url.port = baseUrl.port;
|
||||
// fall through
|
||||
case UrlType.SchemeRelative:
|
||||
// The input doesn't have a schema at least, so we need to copy at least that over.
|
||||
url.scheme = baseUrl.scheme;
|
||||
}
|
||||
if (baseType > inputType)
|
||||
inputType = baseType;
|
||||
}
|
||||
normalizePath(url, inputType);
|
||||
const queryHash = url.query + url.hash;
|
||||
switch (inputType) {
|
||||
// This is impossible, because of the empty checks at the start of the function.
|
||||
// case UrlType.Empty:
|
||||
case UrlType.Hash:
|
||||
case UrlType.Query:
|
||||
return queryHash;
|
||||
case UrlType.RelativePath: {
|
||||
// The first char is always a "/", and we need it to be relative.
|
||||
const path = url.path.slice(1);
|
||||
if (!path)
|
||||
return queryHash || '.';
|
||||
if (isRelative(base || input) && !isRelative(path)) {
|
||||
// If base started with a leading ".", or there is no base and input started with a ".",
|
||||
// then we need to ensure that the relative path starts with a ".". We don't know if
|
||||
// relative starts with a "..", though, so check before prepending.
|
||||
return './' + path + queryHash;
|
||||
}
|
||||
return path + queryHash;
|
||||
}
|
||||
case UrlType.AbsolutePath:
|
||||
return url.path + queryHash;
|
||||
default:
|
||||
return url.scheme + '//' + url.user + url.host + url.port + url.path + queryHash;
|
||||
}
|
||||
}
|
||||
|
||||
function resolve(input, base) {
|
||||
// The base is always treated as a directory, if it's not empty.
|
||||
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
||||
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
||||
if (base && !base.endsWith('/'))
|
||||
base += '/';
|
||||
return resolve$1(input, base);
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes everything after the last "/", but leaves the slash.
|
||||
*/
|
||||
function stripFilename(path) {
|
||||
if (!path)
|
||||
return '';
|
||||
const index = path.lastIndexOf('/');
|
||||
return path.slice(0, index + 1);
|
||||
}
|
||||
|
||||
const COLUMN = 0;
|
||||
const SOURCES_INDEX = 1;
|
||||
const SOURCE_LINE = 2;
|
||||
const SOURCE_COLUMN = 3;
|
||||
const NAMES_INDEX = 4;
|
||||
|
||||
function maybeSort(mappings, owned) {
|
||||
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
|
||||
if (unsortedIndex === mappings.length)
|
||||
return mappings;
|
||||
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
||||
// not, we do not want to modify the consumer's input array.
|
||||
if (!owned)
|
||||
mappings = mappings.slice();
|
||||
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
|
||||
mappings[i] = sortSegments(mappings[i], owned);
|
||||
}
|
||||
return mappings;
|
||||
}
|
||||
function nextUnsortedSegmentLine(mappings, start) {
|
||||
for (let i = start; i < mappings.length; i++) {
|
||||
if (!isSorted(mappings[i]))
|
||||
return i;
|
||||
}
|
||||
return mappings.length;
|
||||
}
|
||||
function isSorted(line) {
|
||||
for (let j = 1; j < line.length; j++) {
|
||||
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
function sortSegments(line, owned) {
|
||||
if (!owned)
|
||||
line = line.slice();
|
||||
return line.sort(sortComparator);
|
||||
}
|
||||
function sortComparator(a, b) {
|
||||
return a[COLUMN] - b[COLUMN];
|
||||
}
|
||||
|
||||
let found = false;
|
||||
/**
|
||||
* A binary search implementation that returns the index if a match is found.
|
||||
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||
* the next index:
|
||||
*
|
||||
* ```js
|
||||
* const array = [1, 3];
|
||||
* const needle = 2;
|
||||
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||
*
|
||||
* assert.equal(index, 0);
|
||||
* array.splice(index + 1, 0, needle);
|
||||
* assert.deepEqual(array, [1, 2, 3]);
|
||||
* ```
|
||||
*/
|
||||
function binarySearch(haystack, needle, low, high) {
|
||||
while (low <= high) {
|
||||
const mid = low + ((high - low) >> 1);
|
||||
const cmp = haystack[mid][COLUMN] - needle;
|
||||
if (cmp === 0) {
|
||||
found = true;
|
||||
return mid;
|
||||
}
|
||||
if (cmp < 0) {
|
||||
low = mid + 1;
|
||||
}
|
||||
else {
|
||||
high = mid - 1;
|
||||
}
|
||||
}
|
||||
found = false;
|
||||
return low - 1;
|
||||
}
|
||||
function upperBound(haystack, needle, index) {
|
||||
for (let i = index + 1; i < haystack.length; index = i++) {
|
||||
if (haystack[i][COLUMN] !== needle)
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function lowerBound(haystack, needle, index) {
|
||||
for (let i = index - 1; i >= 0; index = i--) {
|
||||
if (haystack[i][COLUMN] !== needle)
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function memoizedState() {
|
||||
return {
|
||||
lastKey: -1,
|
||||
lastNeedle: -1,
|
||||
lastIndex: -1,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||
*/
|
||||
function memoizedBinarySearch(haystack, needle, state, key) {
|
||||
const { lastKey, lastNeedle, lastIndex } = state;
|
||||
let low = 0;
|
||||
let high = haystack.length - 1;
|
||||
if (key === lastKey) {
|
||||
if (needle === lastNeedle) {
|
||||
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
|
||||
return lastIndex;
|
||||
}
|
||||
if (needle >= lastNeedle) {
|
||||
// lastIndex may be -1 if the previous needle was not found.
|
||||
low = lastIndex === -1 ? 0 : lastIndex;
|
||||
}
|
||||
else {
|
||||
high = lastIndex;
|
||||
}
|
||||
}
|
||||
state.lastKey = key;
|
||||
state.lastNeedle = needle;
|
||||
return (state.lastIndex = binarySearch(haystack, needle, low, high));
|
||||
}
|
||||
|
||||
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
||||
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
||||
const LEAST_UPPER_BOUND = -1;
|
||||
const GREATEST_LOWER_BOUND = 1;
|
||||
class TraceMap {
|
||||
constructor(map, mapUrl) {
|
||||
const isString = typeof map === 'string';
|
||||
if (!isString && map._decodedMemo)
|
||||
return map;
|
||||
const parsed = (isString ? JSON.parse(map) : map);
|
||||
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
||||
this.version = version;
|
||||
this.file = file;
|
||||
this.names = names || [];
|
||||
this.sourceRoot = sourceRoot;
|
||||
this.sources = sources;
|
||||
this.sourcesContent = sourcesContent;
|
||||
this.ignoreList = parsed.ignoreList || parsed.x_google_ignoreList || undefined;
|
||||
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
||||
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
||||
const { mappings } = parsed;
|
||||
if (typeof mappings === 'string') {
|
||||
this._encoded = mappings;
|
||||
this._decoded = undefined;
|
||||
}
|
||||
else {
|
||||
this._encoded = undefined;
|
||||
this._decoded = maybeSort(mappings, isString);
|
||||
}
|
||||
this._decodedMemo = memoizedState();
|
||||
this._bySources = undefined;
|
||||
this._bySourceMemos = undefined;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
|
||||
* with public access modifiers.
|
||||
*/
|
||||
function cast(map) {
|
||||
return map;
|
||||
}
|
||||
/**
|
||||
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||
*/
|
||||
function decodedMappings(map) {
|
||||
var _a;
|
||||
return ((_a = cast(map))._decoded || (_a._decoded = decode(cast(map)._encoded)));
|
||||
}
|
||||
/**
|
||||
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||
* `source-map` library.
|
||||
*/
|
||||
function originalPositionFor(map, needle) {
|
||||
let { line, column, bias } = needle;
|
||||
line--;
|
||||
if (line < 0)
|
||||
throw new Error(LINE_GTR_ZERO);
|
||||
if (column < 0)
|
||||
throw new Error(COL_GTR_EQ_ZERO);
|
||||
const decoded = decodedMappings(map);
|
||||
// It's common for parent source maps to have pointers to lines that have no
|
||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||
if (line >= decoded.length)
|
||||
return OMapping(null, null, null, null);
|
||||
const segments = decoded[line];
|
||||
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
||||
if (index === -1)
|
||||
return OMapping(null, null, null, null);
|
||||
const segment = segments[index];
|
||||
if (segment.length === 1)
|
||||
return OMapping(null, null, null, null);
|
||||
const { names, resolvedSources } = map;
|
||||
return OMapping(resolvedSources[segment[SOURCES_INDEX]], segment[SOURCE_LINE] + 1, segment[SOURCE_COLUMN], segment.length === 5 ? names[segment[NAMES_INDEX]] : null);
|
||||
}
|
||||
function OMapping(source, line, column, name) {
|
||||
return { source, line, column, name };
|
||||
}
|
||||
function traceSegmentInternal(segments, memo, line, column, bias) {
|
||||
let index = memoizedBinarySearch(segments, column, memo, line);
|
||||
if (found) {
|
||||
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
|
||||
}
|
||||
else if (bias === LEAST_UPPER_BOUND)
|
||||
index++;
|
||||
if (index === -1 || index === segments.length)
|
||||
return -1;
|
||||
return index;
|
||||
}
|
||||
|
||||
// Only install once if called multiple times
|
||||
let errorFormatterInstalled = false;
|
||||
// Maps a file path to a string containing the file contents
|
||||
const fileContentsCache = {};
|
||||
// Maps a file path to a source map for that file
|
||||
const sourceMapCache = {};
|
||||
// Regex for detecting source maps
|
||||
const reSourceMap = /^data:application\/json[^,]+base64,/;
|
||||
// Priority list of retrieve handlers
|
||||
let retrieveFileHandlers = [];
|
||||
let retrieveMapHandlers = [];
|
||||
function globalProcessVersion() {
|
||||
if (typeof process === "object" && process !== null) return process.version;
|
||||
else return "";
|
||||
}
|
||||
function handlerExec(list) {
|
||||
return function(arg) {
|
||||
for (let i = 0; i < list.length; i++) {
|
||||
const ret = list[i](arg);
|
||||
if (ret) return ret;
|
||||
}
|
||||
return null;
|
||||
};
|
||||
}
|
||||
let retrieveFile = handlerExec(retrieveFileHandlers);
|
||||
retrieveFileHandlers.push((path) => {
|
||||
// Trim the path to make sure there is no extra whitespace.
|
||||
path = path.trim();
|
||||
if (path.startsWith("file:"))
|
||||
// existsSync/readFileSync can't handle file protocol, but once stripped, it works
|
||||
path = path.replace(/file:\/\/\/(\w:)?/, (protocol, drive) => {
|
||||
return drive ? "" : "/";
|
||||
});
|
||||
if (path in fileContentsCache) return fileContentsCache[path];
|
||||
let contents = "";
|
||||
try {
|
||||
if (fs.existsSync(path)) contents = fs.readFileSync(path, "utf8");
|
||||
} catch {}
|
||||
return fileContentsCache[path] = contents;
|
||||
});
|
||||
// Support URLs relative to a directory, but be careful about a protocol prefix
|
||||
function supportRelativeURL(file, url) {
|
||||
if (!file) return url;
|
||||
const dir = path.dirname(file);
|
||||
const match = /^\w+:\/\/[^/]*/.exec(dir);
|
||||
let protocol = match ? match[0] : "";
|
||||
const startPath = dir.slice(protocol.length);
|
||||
if (protocol && /^\/\w:/.test(startPath)) {
|
||||
// handle file:///C:/ paths
|
||||
protocol += "/";
|
||||
return protocol + path.resolve(dir.slice(protocol.length), url).replace(/\\/g, "/");
|
||||
}
|
||||
return protocol + path.resolve(dir.slice(protocol.length), url);
|
||||
}
|
||||
function retrieveSourceMapURL(source) {
|
||||
// Get the URL of the source map
|
||||
const fileData = retrieveFile(source);
|
||||
if (!fileData) return null;
|
||||
const re = /\/\/[@#]\s*sourceMappingURL=([^\s'"]+)\s*$|\/\*[@#]\s*sourceMappingURL=[^\s*'"]+\s*\*\/\s*$/gm;
|
||||
// Keep executing the search to find the *last* sourceMappingURL to avoid
|
||||
// picking up sourceMappingURLs from comments, strings, etc.
|
||||
let lastMatch, match;
|
||||
// eslint-disable-next-line no-cond-assign
|
||||
while (match = re.exec(fileData)) lastMatch = match;
|
||||
if (!lastMatch) return null;
|
||||
return lastMatch[1];
|
||||
}
|
||||
// Can be overridden by the retrieveSourceMap option to install. Takes a
|
||||
// generated source filename; returns a {map, optional url} object, or null if
|
||||
// there is no source map. The map field may be either a string or the parsed
|
||||
// JSON object (ie, it must be a valid argument to the SourceMapConsumer
|
||||
// constructor).
|
||||
let retrieveSourceMap = handlerExec(retrieveMapHandlers);
|
||||
retrieveMapHandlers.push((source) => {
|
||||
let sourceMappingURL = retrieveSourceMapURL(source);
|
||||
if (!sourceMappingURL) return null;
|
||||
// Read the contents of the source map
|
||||
let sourceMapData;
|
||||
if (reSourceMap.test(sourceMappingURL)) {
|
||||
// Support source map URL as a data url
|
||||
const rawData = sourceMappingURL.slice(sourceMappingURL.indexOf(",") + 1);
|
||||
sourceMapData = Buffer.from(rawData, "base64").toString();
|
||||
sourceMappingURL = source;
|
||||
} else {
|
||||
// Support source map URLs relative to the source URL
|
||||
sourceMappingURL = supportRelativeURL(source, sourceMappingURL);
|
||||
sourceMapData = retrieveFile(sourceMappingURL);
|
||||
}
|
||||
if (!sourceMapData) return null;
|
||||
return {
|
||||
url: sourceMappingURL,
|
||||
map: sourceMapData
|
||||
};
|
||||
});
|
||||
// interface Position {
|
||||
// source: string
|
||||
// line: number
|
||||
// column: number
|
||||
// }
|
||||
function mapSourcePosition(position) {
|
||||
if (!position.source) return position;
|
||||
let sourceMap = sourceMapCache[position.source];
|
||||
if (!sourceMap) {
|
||||
// Call the (overridable) retrieveSourceMap function to get the source map.
|
||||
const urlAndMap = retrieveSourceMap(position.source);
|
||||
const map = urlAndMap && urlAndMap.map;
|
||||
if (map && !(typeof map === "object" && "mappings" in map && map.mappings === "")) {
|
||||
var _sourceMap$map;
|
||||
sourceMap = sourceMapCache[position.source] = {
|
||||
url: urlAndMap.url,
|
||||
map: new TraceMap(map)
|
||||
};
|
||||
// Load all sources stored inline with the source map into the file cache
|
||||
// to pretend like they are already loaded. They may not exist on disk.
|
||||
if ((_sourceMap$map = sourceMap.map) === null || _sourceMap$map === void 0 ? void 0 : _sourceMap$map.sourcesContent) sourceMap.map.sources.forEach((source, i) => {
|
||||
var _sourceMap$map2;
|
||||
const contents = (_sourceMap$map2 = sourceMap.map) === null || _sourceMap$map2 === void 0 || (_sourceMap$map2 = _sourceMap$map2.sourcesContent) === null || _sourceMap$map2 === void 0 ? void 0 : _sourceMap$map2[i];
|
||||
if (contents && source && sourceMap.url) {
|
||||
const url = supportRelativeURL(sourceMap.url, source);
|
||||
fileContentsCache[url] = contents;
|
||||
}
|
||||
});
|
||||
} else sourceMap = sourceMapCache[position.source] = {
|
||||
url: null,
|
||||
map: null
|
||||
};
|
||||
}
|
||||
// Resolve the source URL relative to the URL of the source map
|
||||
if (sourceMap && sourceMap.map && sourceMap.url) {
|
||||
const originalPosition = originalPositionFor(sourceMap.map, position);
|
||||
// Only return the original position if a matching line was found. If no
|
||||
// matching line is found then we return position instead, which will cause
|
||||
// the stack trace to print the path and line for the compiled file. It is
|
||||
// better to give a precise location in the compiled file than a vague
|
||||
// location in the original file.
|
||||
if (originalPosition.source !== null) {
|
||||
originalPosition.source = supportRelativeURL(sourceMap.url, originalPosition.source);
|
||||
return originalPosition;
|
||||
}
|
||||
}
|
||||
return position;
|
||||
}
|
||||
// Parses code generated by FormatEvalOrigin(), a function inside V8:
|
||||
// https://code.google.com/p/v8/source/browse/trunk/src/messages.js
|
||||
function mapEvalOrigin(origin) {
|
||||
// Most eval() calls are in this format
|
||||
let match = /^eval at ([^(]+) \((.+):(\d+):(\d+)\)$/.exec(origin);
|
||||
if (match) {
|
||||
const position = mapSourcePosition({
|
||||
name: null,
|
||||
source: match[2],
|
||||
line: +match[3],
|
||||
column: +match[4] - 1
|
||||
});
|
||||
return `eval at ${match[1]} (${position.source}:${position.line}:${position.column + 1})`;
|
||||
}
|
||||
// Parse nested eval() calls using recursion
|
||||
match = /^eval at ([^(]+) \((.+)\)$/.exec(origin);
|
||||
if (match) return `eval at ${match[1]} (${mapEvalOrigin(match[2])})`;
|
||||
// Make sure we still return useful information if we didn't find anything
|
||||
return origin;
|
||||
}
|
||||
// This is copied almost verbatim from the V8 source code at
|
||||
// https://code.google.com/p/v8/source/browse/trunk/src/messages.js. The
|
||||
// implementation of wrapCallSite() used to just forward to the actual source
|
||||
// code of CallSite.prototype.toString but unfortunately a new release of V8
|
||||
// did something to the prototype chain and broke the shim. The only fix I
|
||||
// could find was copy/paste.
|
||||
function CallSiteToString() {
|
||||
let fileName;
|
||||
let fileLocation = "";
|
||||
if (this.isNative()) fileLocation = "native";
|
||||
else {
|
||||
fileName = this.getScriptNameOrSourceURL();
|
||||
if (!fileName && this.isEval()) {
|
||||
fileLocation = this.getEvalOrigin();
|
||||
fileLocation += ", ";
|
||||
}
|
||||
if (fileName) fileLocation += fileName;
|
||||
else
|
||||
// Source code does not originate from a file and is not native, but we
|
||||
// can still get the source position inside the source string, e.g. in
|
||||
// an eval string.
|
||||
fileLocation += "<anonymous>";
|
||||
const lineNumber = this.getLineNumber();
|
||||
if (lineNumber != null) {
|
||||
fileLocation += `:${lineNumber}`;
|
||||
const columnNumber = this.getColumnNumber();
|
||||
if (columnNumber) fileLocation += `:${columnNumber}`;
|
||||
}
|
||||
}
|
||||
let line = "";
|
||||
const functionName = this.getFunctionName();
|
||||
let addSuffix = true;
|
||||
const isConstructor = this.isConstructor();
|
||||
const isMethodCall = !(this.isToplevel() || isConstructor);
|
||||
if (isMethodCall) {
|
||||
let typeName = this.getTypeName();
|
||||
// Fixes shim to be backward compatible with Node v0 to v4
|
||||
if (typeName === "[object Object]") typeName = "null";
|
||||
const methodName = this.getMethodName();
|
||||
if (functionName) {
|
||||
if (typeName && functionName.indexOf(typeName) !== 0) line += `${typeName}.`;
|
||||
line += functionName;
|
||||
if (methodName && functionName.indexOf(`.${methodName}`) !== functionName.length - methodName.length - 1) line += ` [as ${methodName}]`;
|
||||
} else line += `${typeName}.${methodName || "<anonymous>"}`;
|
||||
} else if (isConstructor) line += `new ${functionName || "<anonymous>"}`;
|
||||
else if (functionName) line += functionName;
|
||||
else {
|
||||
line += fileLocation;
|
||||
addSuffix = false;
|
||||
}
|
||||
if (addSuffix) line += ` (${fileLocation})`;
|
||||
return line;
|
||||
}
|
||||
function cloneCallSite(frame) {
|
||||
const object = {};
|
||||
Object.getOwnPropertyNames(Object.getPrototypeOf(frame)).forEach((name) => {
|
||||
const key = name;
|
||||
// @ts-expect-error difficult to type
|
||||
object[key] = /^(?:is|get)/.test(name) ? function() {
|
||||
// eslint-disable-next-line no-useless-call
|
||||
return frame[key].call(frame);
|
||||
} : frame[key];
|
||||
});
|
||||
object.toString = CallSiteToString;
|
||||
return object;
|
||||
}
|
||||
function wrapCallSite(frame, state) {
|
||||
// provides interface backward compatibility
|
||||
if (state === void 0) state = {
|
||||
nextPosition: null,
|
||||
curPosition: null
|
||||
};
|
||||
if (frame.isNative()) {
|
||||
state.curPosition = null;
|
||||
return frame;
|
||||
}
|
||||
// Most call sites will return the source file from getFileName(), but code
|
||||
// passed to eval() ending in "//# sourceURL=..." will return the source file
|
||||
// from getScriptNameOrSourceURL() instead
|
||||
const source = frame.getFileName() || frame.getScriptNameOrSourceURL();
|
||||
if (source) {
|
||||
const line = frame.getLineNumber();
|
||||
let column = frame.getColumnNumber() - 1;
|
||||
// Fix position in Node where some (internal) code is prepended.
|
||||
// See https://github.com/evanw/node-source-map-support/issues/36
|
||||
// Header removed in node at ^10.16 || >=11.11.0
|
||||
// v11 is not an LTS candidate, we can just test the one version with it.
|
||||
// Test node versions for: 10.16-19, 10.20+, 12-19, 20-99, 100+, or 11.11
|
||||
const noHeader = /^v(?:10\.1[6-9]|10\.[2-9]\d|10\.\d{3,}|1[2-9]\d*|[2-9]\d|\d{3,}|11\.11)/;
|
||||
const headerLength = noHeader.test(globalProcessVersion()) ? 0 : 62;
|
||||
if (line === 1 && column > headerLength && !frame.isEval()) column -= headerLength;
|
||||
const position = mapSourcePosition({
|
||||
name: null,
|
||||
source,
|
||||
line,
|
||||
column
|
||||
});
|
||||
state.curPosition = position;
|
||||
frame = cloneCallSite(frame);
|
||||
const originalFunctionName = frame.getFunctionName;
|
||||
frame.getFunctionName = function() {
|
||||
if (state.nextPosition == null) return originalFunctionName();
|
||||
return state.nextPosition.name || originalFunctionName();
|
||||
};
|
||||
frame.getFileName = function() {
|
||||
return position.source ?? null;
|
||||
};
|
||||
frame.getLineNumber = function() {
|
||||
return position.line;
|
||||
};
|
||||
frame.getColumnNumber = function() {
|
||||
return position.column + 1;
|
||||
};
|
||||
frame.getScriptNameOrSourceURL = function() {
|
||||
return position.source;
|
||||
};
|
||||
return frame;
|
||||
}
|
||||
// Code called using eval() needs special handling
|
||||
let origin = frame.isEval() && frame.getEvalOrigin();
|
||||
if (origin) {
|
||||
origin = mapEvalOrigin(origin);
|
||||
frame = cloneCallSite(frame);
|
||||
frame.getEvalOrigin = function() {
|
||||
return origin || void 0;
|
||||
};
|
||||
return frame;
|
||||
}
|
||||
// If we get here then we were unable to change the source position
|
||||
return frame;
|
||||
}
|
||||
// This function is part of the V8 stack trace API, for more info see:
|
||||
// https://v8.dev/docs/stack-trace-api
|
||||
function prepareStackTrace(error, stack) {
|
||||
const name = error.name || "Error";
|
||||
const message = error.message || "";
|
||||
const errorString = `${name}: ${message}`;
|
||||
const state = {
|
||||
nextPosition: null,
|
||||
curPosition: null
|
||||
};
|
||||
const processedStack = [];
|
||||
for (let i = stack.length - 1; i >= 0; i--) {
|
||||
processedStack.push(`\n at ${wrapCallSite(stack[i], state)}`);
|
||||
state.nextPosition = state.curPosition;
|
||||
}
|
||||
state.curPosition = state.nextPosition = null;
|
||||
return errorString + processedStack.reverse().join("");
|
||||
}
|
||||
retrieveFileHandlers.slice(0);
|
||||
retrieveMapHandlers.slice(0);
|
||||
function install(options) {
|
||||
options = options || {};
|
||||
// Allow sources to be found by methods other than reading the files
|
||||
// directly from disk.
|
||||
if (options.retrieveFile) {
|
||||
if (options.overrideRetrieveFile) retrieveFileHandlers.length = 0;
|
||||
retrieveFileHandlers.unshift(options.retrieveFile);
|
||||
}
|
||||
// Allow source maps to be found by methods other than reading the files
|
||||
// directly from disk.
|
||||
if (options.retrieveSourceMap) {
|
||||
if (options.overrideRetrieveSourceMap) retrieveMapHandlers.length = 0;
|
||||
retrieveMapHandlers.unshift(options.retrieveSourceMap);
|
||||
}
|
||||
// Install the error reformatter
|
||||
if (!errorFormatterInstalled) {
|
||||
errorFormatterInstalled = true;
|
||||
Error.prepareStackTrace = prepareStackTrace;
|
||||
}
|
||||
}
|
||||
|
||||
let SOURCEMAPPING_URL = "sourceMa";
|
||||
SOURCEMAPPING_URL += "ppingURL";
|
||||
const VITE_NODE_SOURCEMAPPING_SOURCE = "//# sourceMappingSource=vite-node";
|
||||
const VITE_NODE_SOURCEMAPPING_URL = `${SOURCEMAPPING_URL}=data:application/json;charset=utf-8`;
|
||||
function withInlineSourcemap(result, options) {
|
||||
const map = result.map;
|
||||
let code = result.code;
|
||||
if (!map || code.includes(VITE_NODE_SOURCEMAPPING_SOURCE)) return result;
|
||||
if ("sources" in map) {
|
||||
var _map$sources;
|
||||
map.sources = (_map$sources = map.sources) === null || _map$sources === void 0 ? void 0 : _map$sources.map((source) => {
|
||||
if (!source) return source;
|
||||
// sometimes files here are absolute,
|
||||
// but they are considered absolute to the server url, not the file system
|
||||
// this is a bug in Vite
|
||||
// all files should be either absolute to the file system or relative to the source map file
|
||||
if (pathe.isAbsolute(source)) {
|
||||
const actualPath = !source.startsWith(utils.withTrailingSlash(options.root)) && source.startsWith("/") ? pathe.resolve(options.root, source.slice(1)) : source;
|
||||
return pathe.relative(pathe.dirname(options.filepath), actualPath);
|
||||
}
|
||||
return source;
|
||||
});
|
||||
}
|
||||
// to reduce the payload size, we only inline vite node source map, because it's also the only one we use
|
||||
const OTHER_SOURCE_MAP_REGEXP = new RegExp(`//# ${SOURCEMAPPING_URL}=data:application/json[^,]+base64,([A-Za-z0-9+/=]+)$`, "gm");
|
||||
while (OTHER_SOURCE_MAP_REGEXP.test(code)) code = code.replace(OTHER_SOURCE_MAP_REGEXP, "");
|
||||
// If the first line is not present on source maps, add simple 1:1 mapping ([0,0,0,0], [1,0,0,0])
|
||||
// so that debuggers can be set to break on first line
|
||||
// Since Vite 6, import statements at the top of the file are preserved correctly,
|
||||
// so we don't need to add this mapping anymore.
|
||||
if (!options.noFirstLineMapping && map.mappings.startsWith(";")) map.mappings = `AAAA,CAAA${map.mappings}`;
|
||||
const sourceMap = Buffer.from(JSON.stringify(map), "utf-8").toString("base64");
|
||||
result.code = `${code.trimEnd()}\n\n${VITE_NODE_SOURCEMAPPING_SOURCE}\n//# ${VITE_NODE_SOURCEMAPPING_URL};base64,${sourceMap}\n`;
|
||||
return result;
|
||||
}
|
||||
function extractSourceMap(code) {
|
||||
const regexp = new RegExp(`//# ${VITE_NODE_SOURCEMAPPING_URL};base64,(.+)`, "gm");
|
||||
let lastMatch, match;
|
||||
// eslint-disable-next-line no-cond-assign
|
||||
while (match = regexp.exec(code)) lastMatch = match;
|
||||
// pick only the last source map keeping user strings that look like maps
|
||||
if (lastMatch) return JSON.parse(Buffer.from(lastMatch[1], "base64").toString("utf-8"));
|
||||
return null;
|
||||
}
|
||||
function installSourcemapsSupport(options) {
|
||||
install({ retrieveSourceMap(source) {
|
||||
const map = options.getSourceMap(source);
|
||||
if (map) return {
|
||||
url: source,
|
||||
map
|
||||
};
|
||||
return null;
|
||||
} });
|
||||
}
|
||||
|
||||
exports.extractSourceMap = extractSourceMap;
|
||||
exports.installSourcemapsSupport = installSourcemapsSupport;
|
||||
exports.withInlineSourcemap = withInlineSourcemap;
|
||||
15
GTA_P_V2/node_modules/vite-node/dist/source-map.d.ts
generated
vendored
Normal file
15
GTA_P_V2/node_modules/vite-node/dist/source-map.d.ts
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
import { TransformResult } from 'vite';
|
||||
import { E as EncodedSourceMap } from './trace-mapping.d-DLVdEqOp.js';
|
||||
|
||||
interface InstallSourceMapSupportOptions {
|
||||
getSourceMap: (source: string) => EncodedSourceMap | null | undefined;
|
||||
}
|
||||
declare function withInlineSourcemap(result: TransformResult, options: {
|
||||
root: string
|
||||
filepath: string
|
||||
noFirstLineMapping?: boolean
|
||||
}): TransformResult;
|
||||
declare function extractSourceMap(code: string): EncodedSourceMap | null;
|
||||
declare function installSourcemapsSupport(options: InstallSourceMapSupportOptions): void;
|
||||
|
||||
export { extractSourceMap, installSourcemapsSupport, withInlineSourcemap };
|
||||
973
GTA_P_V2/node_modules/vite-node/dist/source-map.mjs
generated
vendored
Normal file
973
GTA_P_V2/node_modules/vite-node/dist/source-map.mjs
generated
vendored
Normal file
@@ -0,0 +1,973 @@
|
||||
import { isAbsolute, resolve as resolve$2, relative, dirname } from 'pathe';
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { withTrailingSlash } from './utils.mjs';
|
||||
import 'node:module';
|
||||
import 'node:url';
|
||||
|
||||
const comma = ','.charCodeAt(0);
|
||||
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
|
||||
const intToChar = new Uint8Array(64); // 64 possible chars.
|
||||
const charToInt = new Uint8Array(128); // z is 122 in ASCII
|
||||
for (let i = 0; i < chars.length; i++) {
|
||||
const c = chars.charCodeAt(i);
|
||||
intToChar[i] = c;
|
||||
charToInt[c] = i;
|
||||
}
|
||||
function decodeInteger(reader, relative) {
|
||||
let value = 0;
|
||||
let shift = 0;
|
||||
let integer = 0;
|
||||
do {
|
||||
const c = reader.next();
|
||||
integer = charToInt[c];
|
||||
value |= (integer & 31) << shift;
|
||||
shift += 5;
|
||||
} while (integer & 32);
|
||||
const shouldNegate = value & 1;
|
||||
value >>>= 1;
|
||||
if (shouldNegate) {
|
||||
value = -2147483648 | -value;
|
||||
}
|
||||
return relative + value;
|
||||
}
|
||||
function hasMoreVlq(reader, max) {
|
||||
if (reader.pos >= max)
|
||||
return false;
|
||||
return reader.peek() !== comma;
|
||||
}
|
||||
class StringReader {
|
||||
constructor(buffer) {
|
||||
this.pos = 0;
|
||||
this.buffer = buffer;
|
||||
}
|
||||
next() {
|
||||
return this.buffer.charCodeAt(this.pos++);
|
||||
}
|
||||
peek() {
|
||||
return this.buffer.charCodeAt(this.pos);
|
||||
}
|
||||
indexOf(char) {
|
||||
const { buffer, pos } = this;
|
||||
const idx = buffer.indexOf(char, pos);
|
||||
return idx === -1 ? buffer.length : idx;
|
||||
}
|
||||
}
|
||||
|
||||
function decode(mappings) {
|
||||
const { length } = mappings;
|
||||
const reader = new StringReader(mappings);
|
||||
const decoded = [];
|
||||
let genColumn = 0;
|
||||
let sourcesIndex = 0;
|
||||
let sourceLine = 0;
|
||||
let sourceColumn = 0;
|
||||
let namesIndex = 0;
|
||||
do {
|
||||
const semi = reader.indexOf(';');
|
||||
const line = [];
|
||||
let sorted = true;
|
||||
let lastCol = 0;
|
||||
genColumn = 0;
|
||||
while (reader.pos < semi) {
|
||||
let seg;
|
||||
genColumn = decodeInteger(reader, genColumn);
|
||||
if (genColumn < lastCol)
|
||||
sorted = false;
|
||||
lastCol = genColumn;
|
||||
if (hasMoreVlq(reader, semi)) {
|
||||
sourcesIndex = decodeInteger(reader, sourcesIndex);
|
||||
sourceLine = decodeInteger(reader, sourceLine);
|
||||
sourceColumn = decodeInteger(reader, sourceColumn);
|
||||
if (hasMoreVlq(reader, semi)) {
|
||||
namesIndex = decodeInteger(reader, namesIndex);
|
||||
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex];
|
||||
}
|
||||
else {
|
||||
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn];
|
||||
}
|
||||
}
|
||||
else {
|
||||
seg = [genColumn];
|
||||
}
|
||||
line.push(seg);
|
||||
reader.pos++;
|
||||
}
|
||||
if (!sorted)
|
||||
sort(line);
|
||||
decoded.push(line);
|
||||
reader.pos = semi + 1;
|
||||
} while (reader.pos <= length);
|
||||
return decoded;
|
||||
}
|
||||
function sort(line) {
|
||||
line.sort(sortComparator$1);
|
||||
}
|
||||
function sortComparator$1(a, b) {
|
||||
return a[0] - b[0];
|
||||
}
|
||||
|
||||
// Matches the scheme of a URL, eg "http://"
|
||||
const schemeRegex = /^[\w+.-]+:\/\//;
|
||||
/**
|
||||
* Matches the parts of a URL:
|
||||
* 1. Scheme, including ":", guaranteed.
|
||||
* 2. User/password, including "@", optional.
|
||||
* 3. Host, guaranteed.
|
||||
* 4. Port, including ":", optional.
|
||||
* 5. Path, including "/", optional.
|
||||
* 6. Query, including "?", optional.
|
||||
* 7. Hash, including "#", optional.
|
||||
*/
|
||||
const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?(\?[^#]*)?(#.*)?/;
|
||||
/**
|
||||
* File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start
|
||||
* with a leading `/`, they can have a domain (but only if they don't start with a Windows drive).
|
||||
*
|
||||
* 1. Host, optional.
|
||||
* 2. Path, which may include "/", guaranteed.
|
||||
* 3. Query, including "?", optional.
|
||||
* 4. Hash, including "#", optional.
|
||||
*/
|
||||
const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/#?]*)?)?(\/?[^#?]*)(\?[^#]*)?(#.*)?/i;
|
||||
var UrlType;
|
||||
(function (UrlType) {
|
||||
UrlType[UrlType["Empty"] = 1] = "Empty";
|
||||
UrlType[UrlType["Hash"] = 2] = "Hash";
|
||||
UrlType[UrlType["Query"] = 3] = "Query";
|
||||
UrlType[UrlType["RelativePath"] = 4] = "RelativePath";
|
||||
UrlType[UrlType["AbsolutePath"] = 5] = "AbsolutePath";
|
||||
UrlType[UrlType["SchemeRelative"] = 6] = "SchemeRelative";
|
||||
UrlType[UrlType["Absolute"] = 7] = "Absolute";
|
||||
})(UrlType || (UrlType = {}));
|
||||
function isAbsoluteUrl(input) {
|
||||
return schemeRegex.test(input);
|
||||
}
|
||||
function isSchemeRelativeUrl(input) {
|
||||
return input.startsWith('//');
|
||||
}
|
||||
function isAbsolutePath(input) {
|
||||
return input.startsWith('/');
|
||||
}
|
||||
function isFileUrl(input) {
|
||||
return input.startsWith('file:');
|
||||
}
|
||||
function isRelative(input) {
|
||||
return /^[.?#]/.test(input);
|
||||
}
|
||||
function parseAbsoluteUrl(input) {
|
||||
const match = urlRegex.exec(input);
|
||||
return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/', match[6] || '', match[7] || '');
|
||||
}
|
||||
function parseFileUrl(input) {
|
||||
const match = fileRegex.exec(input);
|
||||
const path = match[2];
|
||||
return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path, match[3] || '', match[4] || '');
|
||||
}
|
||||
function makeUrl(scheme, user, host, port, path, query, hash) {
|
||||
return {
|
||||
scheme,
|
||||
user,
|
||||
host,
|
||||
port,
|
||||
path,
|
||||
query,
|
||||
hash,
|
||||
type: UrlType.Absolute,
|
||||
};
|
||||
}
|
||||
function parseUrl(input) {
|
||||
if (isSchemeRelativeUrl(input)) {
|
||||
const url = parseAbsoluteUrl('http:' + input);
|
||||
url.scheme = '';
|
||||
url.type = UrlType.SchemeRelative;
|
||||
return url;
|
||||
}
|
||||
if (isAbsolutePath(input)) {
|
||||
const url = parseAbsoluteUrl('http://foo.com' + input);
|
||||
url.scheme = '';
|
||||
url.host = '';
|
||||
url.type = UrlType.AbsolutePath;
|
||||
return url;
|
||||
}
|
||||
if (isFileUrl(input))
|
||||
return parseFileUrl(input);
|
||||
if (isAbsoluteUrl(input))
|
||||
return parseAbsoluteUrl(input);
|
||||
const url = parseAbsoluteUrl('http://foo.com/' + input);
|
||||
url.scheme = '';
|
||||
url.host = '';
|
||||
url.type = input
|
||||
? input.startsWith('?')
|
||||
? UrlType.Query
|
||||
: input.startsWith('#')
|
||||
? UrlType.Hash
|
||||
: UrlType.RelativePath
|
||||
: UrlType.Empty;
|
||||
return url;
|
||||
}
|
||||
function stripPathFilename(path) {
|
||||
// If a path ends with a parent directory "..", then it's a relative path with excess parent
|
||||
// paths. It's not a file, so we can't strip it.
|
||||
if (path.endsWith('/..'))
|
||||
return path;
|
||||
const index = path.lastIndexOf('/');
|
||||
return path.slice(0, index + 1);
|
||||
}
|
||||
function mergePaths(url, base) {
|
||||
normalizePath(base, base.type);
|
||||
// If the path is just a "/", then it was an empty path to begin with (remember, we're a relative
|
||||
// path).
|
||||
if (url.path === '/') {
|
||||
url.path = base.path;
|
||||
}
|
||||
else {
|
||||
// Resolution happens relative to the base path's directory, not the file.
|
||||
url.path = stripPathFilename(base.path) + url.path;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* The path can have empty directories "//", unneeded parents "foo/..", or current directory
|
||||
* "foo/.". We need to normalize to a standard representation.
|
||||
*/
|
||||
function normalizePath(url, type) {
|
||||
const rel = type <= UrlType.RelativePath;
|
||||
const pieces = url.path.split('/');
|
||||
// We need to preserve the first piece always, so that we output a leading slash. The item at
|
||||
// pieces[0] is an empty string.
|
||||
let pointer = 1;
|
||||
// Positive is the number of real directories we've output, used for popping a parent directory.
|
||||
// Eg, "foo/bar/.." will have a positive 2, and we can decrement to be left with just "foo".
|
||||
let positive = 0;
|
||||
// We need to keep a trailing slash if we encounter an empty directory (eg, splitting "foo/" will
|
||||
// generate `["foo", ""]` pieces). And, if we pop a parent directory. But once we encounter a
|
||||
// real directory, we won't need to append, unless the other conditions happen again.
|
||||
let addTrailingSlash = false;
|
||||
for (let i = 1; i < pieces.length; i++) {
|
||||
const piece = pieces[i];
|
||||
// An empty directory, could be a trailing slash, or just a double "//" in the path.
|
||||
if (!piece) {
|
||||
addTrailingSlash = true;
|
||||
continue;
|
||||
}
|
||||
// If we encounter a real directory, then we don't need to append anymore.
|
||||
addTrailingSlash = false;
|
||||
// A current directory, which we can always drop.
|
||||
if (piece === '.')
|
||||
continue;
|
||||
// A parent directory, we need to see if there are any real directories we can pop. Else, we
|
||||
// have an excess of parents, and we'll need to keep the "..".
|
||||
if (piece === '..') {
|
||||
if (positive) {
|
||||
addTrailingSlash = true;
|
||||
positive--;
|
||||
pointer--;
|
||||
}
|
||||
else if (rel) {
|
||||
// If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute
|
||||
// URL, protocol relative URL, or an absolute path, we don't need to keep excess.
|
||||
pieces[pointer++] = piece;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
// We've encountered a real directory. Move it to the next insertion pointer, which accounts for
|
||||
// any popped or dropped directories.
|
||||
pieces[pointer++] = piece;
|
||||
positive++;
|
||||
}
|
||||
let path = '';
|
||||
for (let i = 1; i < pointer; i++) {
|
||||
path += '/' + pieces[i];
|
||||
}
|
||||
if (!path || (addTrailingSlash && !path.endsWith('/..'))) {
|
||||
path += '/';
|
||||
}
|
||||
url.path = path;
|
||||
}
|
||||
/**
|
||||
* Attempts to resolve `input` URL/path relative to `base`.
|
||||
*/
|
||||
function resolve$1(input, base) {
|
||||
if (!input && !base)
|
||||
return '';
|
||||
const url = parseUrl(input);
|
||||
let inputType = url.type;
|
||||
if (base && inputType !== UrlType.Absolute) {
|
||||
const baseUrl = parseUrl(base);
|
||||
const baseType = baseUrl.type;
|
||||
switch (inputType) {
|
||||
case UrlType.Empty:
|
||||
url.hash = baseUrl.hash;
|
||||
// fall through
|
||||
case UrlType.Hash:
|
||||
url.query = baseUrl.query;
|
||||
// fall through
|
||||
case UrlType.Query:
|
||||
case UrlType.RelativePath:
|
||||
mergePaths(url, baseUrl);
|
||||
// fall through
|
||||
case UrlType.AbsolutePath:
|
||||
// The host, user, and port are joined, you can't copy one without the others.
|
||||
url.user = baseUrl.user;
|
||||
url.host = baseUrl.host;
|
||||
url.port = baseUrl.port;
|
||||
// fall through
|
||||
case UrlType.SchemeRelative:
|
||||
// The input doesn't have a schema at least, so we need to copy at least that over.
|
||||
url.scheme = baseUrl.scheme;
|
||||
}
|
||||
if (baseType > inputType)
|
||||
inputType = baseType;
|
||||
}
|
||||
normalizePath(url, inputType);
|
||||
const queryHash = url.query + url.hash;
|
||||
switch (inputType) {
|
||||
// This is impossible, because of the empty checks at the start of the function.
|
||||
// case UrlType.Empty:
|
||||
case UrlType.Hash:
|
||||
case UrlType.Query:
|
||||
return queryHash;
|
||||
case UrlType.RelativePath: {
|
||||
// The first char is always a "/", and we need it to be relative.
|
||||
const path = url.path.slice(1);
|
||||
if (!path)
|
||||
return queryHash || '.';
|
||||
if (isRelative(base || input) && !isRelative(path)) {
|
||||
// If base started with a leading ".", or there is no base and input started with a ".",
|
||||
// then we need to ensure that the relative path starts with a ".". We don't know if
|
||||
// relative starts with a "..", though, so check before prepending.
|
||||
return './' + path + queryHash;
|
||||
}
|
||||
return path + queryHash;
|
||||
}
|
||||
case UrlType.AbsolutePath:
|
||||
return url.path + queryHash;
|
||||
default:
|
||||
return url.scheme + '//' + url.user + url.host + url.port + url.path + queryHash;
|
||||
}
|
||||
}
|
||||
|
||||
function resolve(input, base) {
|
||||
// The base is always treated as a directory, if it's not empty.
|
||||
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
||||
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
||||
if (base && !base.endsWith('/'))
|
||||
base += '/';
|
||||
return resolve$1(input, base);
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes everything after the last "/", but leaves the slash.
|
||||
*/
|
||||
function stripFilename(path) {
|
||||
if (!path)
|
||||
return '';
|
||||
const index = path.lastIndexOf('/');
|
||||
return path.slice(0, index + 1);
|
||||
}
|
||||
|
||||
const COLUMN = 0;
|
||||
const SOURCES_INDEX = 1;
|
||||
const SOURCE_LINE = 2;
|
||||
const SOURCE_COLUMN = 3;
|
||||
const NAMES_INDEX = 4;
|
||||
|
||||
function maybeSort(mappings, owned) {
|
||||
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
|
||||
if (unsortedIndex === mappings.length)
|
||||
return mappings;
|
||||
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
||||
// not, we do not want to modify the consumer's input array.
|
||||
if (!owned)
|
||||
mappings = mappings.slice();
|
||||
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
|
||||
mappings[i] = sortSegments(mappings[i], owned);
|
||||
}
|
||||
return mappings;
|
||||
}
|
||||
function nextUnsortedSegmentLine(mappings, start) {
|
||||
for (let i = start; i < mappings.length; i++) {
|
||||
if (!isSorted(mappings[i]))
|
||||
return i;
|
||||
}
|
||||
return mappings.length;
|
||||
}
|
||||
function isSorted(line) {
|
||||
for (let j = 1; j < line.length; j++) {
|
||||
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
function sortSegments(line, owned) {
|
||||
if (!owned)
|
||||
line = line.slice();
|
||||
return line.sort(sortComparator);
|
||||
}
|
||||
function sortComparator(a, b) {
|
||||
return a[COLUMN] - b[COLUMN];
|
||||
}
|
||||
|
||||
let found = false;
|
||||
/**
|
||||
* A binary search implementation that returns the index if a match is found.
|
||||
* If no match is found, then the left-index (the index associated with the item that comes just
|
||||
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
||||
* the next index:
|
||||
*
|
||||
* ```js
|
||||
* const array = [1, 3];
|
||||
* const needle = 2;
|
||||
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
||||
*
|
||||
* assert.equal(index, 0);
|
||||
* array.splice(index + 1, 0, needle);
|
||||
* assert.deepEqual(array, [1, 2, 3]);
|
||||
* ```
|
||||
*/
|
||||
function binarySearch(haystack, needle, low, high) {
|
||||
while (low <= high) {
|
||||
const mid = low + ((high - low) >> 1);
|
||||
const cmp = haystack[mid][COLUMN] - needle;
|
||||
if (cmp === 0) {
|
||||
found = true;
|
||||
return mid;
|
||||
}
|
||||
if (cmp < 0) {
|
||||
low = mid + 1;
|
||||
}
|
||||
else {
|
||||
high = mid - 1;
|
||||
}
|
||||
}
|
||||
found = false;
|
||||
return low - 1;
|
||||
}
|
||||
function upperBound(haystack, needle, index) {
|
||||
for (let i = index + 1; i < haystack.length; index = i++) {
|
||||
if (haystack[i][COLUMN] !== needle)
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function lowerBound(haystack, needle, index) {
|
||||
for (let i = index - 1; i >= 0; index = i--) {
|
||||
if (haystack[i][COLUMN] !== needle)
|
||||
break;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
function memoizedState() {
|
||||
return {
|
||||
lastKey: -1,
|
||||
lastNeedle: -1,
|
||||
lastIndex: -1,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* This overly complicated beast is just to record the last tested line/column and the resulting
|
||||
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
||||
*/
|
||||
function memoizedBinarySearch(haystack, needle, state, key) {
|
||||
const { lastKey, lastNeedle, lastIndex } = state;
|
||||
let low = 0;
|
||||
let high = haystack.length - 1;
|
||||
if (key === lastKey) {
|
||||
if (needle === lastNeedle) {
|
||||
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
|
||||
return lastIndex;
|
||||
}
|
||||
if (needle >= lastNeedle) {
|
||||
// lastIndex may be -1 if the previous needle was not found.
|
||||
low = lastIndex === -1 ? 0 : lastIndex;
|
||||
}
|
||||
else {
|
||||
high = lastIndex;
|
||||
}
|
||||
}
|
||||
state.lastKey = key;
|
||||
state.lastNeedle = needle;
|
||||
return (state.lastIndex = binarySearch(haystack, needle, low, high));
|
||||
}
|
||||
|
||||
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
||||
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
||||
const LEAST_UPPER_BOUND = -1;
|
||||
const GREATEST_LOWER_BOUND = 1;
|
||||
class TraceMap {
|
||||
constructor(map, mapUrl) {
|
||||
const isString = typeof map === 'string';
|
||||
if (!isString && map._decodedMemo)
|
||||
return map;
|
||||
const parsed = (isString ? JSON.parse(map) : map);
|
||||
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
||||
this.version = version;
|
||||
this.file = file;
|
||||
this.names = names || [];
|
||||
this.sourceRoot = sourceRoot;
|
||||
this.sources = sources;
|
||||
this.sourcesContent = sourcesContent;
|
||||
this.ignoreList = parsed.ignoreList || parsed.x_google_ignoreList || undefined;
|
||||
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
||||
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
||||
const { mappings } = parsed;
|
||||
if (typeof mappings === 'string') {
|
||||
this._encoded = mappings;
|
||||
this._decoded = undefined;
|
||||
}
|
||||
else {
|
||||
this._encoded = undefined;
|
||||
this._decoded = maybeSort(mappings, isString);
|
||||
}
|
||||
this._decodedMemo = memoizedState();
|
||||
this._bySources = undefined;
|
||||
this._bySourceMemos = undefined;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
|
||||
* with public access modifiers.
|
||||
*/
|
||||
function cast(map) {
|
||||
return map;
|
||||
}
|
||||
/**
|
||||
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
||||
*/
|
||||
function decodedMappings(map) {
|
||||
var _a;
|
||||
return ((_a = cast(map))._decoded || (_a._decoded = decode(cast(map)._encoded)));
|
||||
}
|
||||
/**
|
||||
* A higher-level API to find the source/line/column associated with a generated line/column
|
||||
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
||||
* `source-map` library.
|
||||
*/
|
||||
function originalPositionFor(map, needle) {
|
||||
let { line, column, bias } = needle;
|
||||
line--;
|
||||
if (line < 0)
|
||||
throw new Error(LINE_GTR_ZERO);
|
||||
if (column < 0)
|
||||
throw new Error(COL_GTR_EQ_ZERO);
|
||||
const decoded = decodedMappings(map);
|
||||
// It's common for parent source maps to have pointers to lines that have no
|
||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
||||
if (line >= decoded.length)
|
||||
return OMapping(null, null, null, null);
|
||||
const segments = decoded[line];
|
||||
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
||||
if (index === -1)
|
||||
return OMapping(null, null, null, null);
|
||||
const segment = segments[index];
|
||||
if (segment.length === 1)
|
||||
return OMapping(null, null, null, null);
|
||||
const { names, resolvedSources } = map;
|
||||
return OMapping(resolvedSources[segment[SOURCES_INDEX]], segment[SOURCE_LINE] + 1, segment[SOURCE_COLUMN], segment.length === 5 ? names[segment[NAMES_INDEX]] : null);
|
||||
}
|
||||
function OMapping(source, line, column, name) {
|
||||
return { source, line, column, name };
|
||||
}
|
||||
function traceSegmentInternal(segments, memo, line, column, bias) {
|
||||
let index = memoizedBinarySearch(segments, column, memo, line);
|
||||
if (found) {
|
||||
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
|
||||
}
|
||||
else if (bias === LEAST_UPPER_BOUND)
|
||||
index++;
|
||||
if (index === -1 || index === segments.length)
|
||||
return -1;
|
||||
return index;
|
||||
}
|
||||
|
||||
// Only install once if called multiple times
|
||||
let errorFormatterInstalled = false;
|
||||
// Maps a file path to a string containing the file contents
|
||||
const fileContentsCache = {};
|
||||
// Maps a file path to a source map for that file
|
||||
const sourceMapCache = {};
|
||||
// Regex for detecting source maps
|
||||
const reSourceMap = /^data:application\/json[^,]+base64,/;
|
||||
// Priority list of retrieve handlers
|
||||
let retrieveFileHandlers = [];
|
||||
let retrieveMapHandlers = [];
|
||||
function globalProcessVersion() {
|
||||
if (typeof process === "object" && process !== null) return process.version;
|
||||
else return "";
|
||||
}
|
||||
function handlerExec(list) {
|
||||
return function(arg) {
|
||||
for (let i = 0; i < list.length; i++) {
|
||||
const ret = list[i](arg);
|
||||
if (ret) return ret;
|
||||
}
|
||||
return null;
|
||||
};
|
||||
}
|
||||
let retrieveFile = handlerExec(retrieveFileHandlers);
|
||||
retrieveFileHandlers.push((path) => {
|
||||
// Trim the path to make sure there is no extra whitespace.
|
||||
path = path.trim();
|
||||
if (path.startsWith("file:"))
|
||||
// existsSync/readFileSync can't handle file protocol, but once stripped, it works
|
||||
path = path.replace(/file:\/\/\/(\w:)?/, (protocol, drive) => {
|
||||
return drive ? "" : "/";
|
||||
});
|
||||
if (path in fileContentsCache) return fileContentsCache[path];
|
||||
let contents = "";
|
||||
try {
|
||||
if (fs.existsSync(path)) contents = fs.readFileSync(path, "utf8");
|
||||
} catch {}
|
||||
return fileContentsCache[path] = contents;
|
||||
});
|
||||
// Support URLs relative to a directory, but be careful about a protocol prefix
|
||||
function supportRelativeURL(file, url) {
|
||||
if (!file) return url;
|
||||
const dir = path.dirname(file);
|
||||
const match = /^\w+:\/\/[^/]*/.exec(dir);
|
||||
let protocol = match ? match[0] : "";
|
||||
const startPath = dir.slice(protocol.length);
|
||||
if (protocol && /^\/\w:/.test(startPath)) {
|
||||
// handle file:///C:/ paths
|
||||
protocol += "/";
|
||||
return protocol + path.resolve(dir.slice(protocol.length), url).replace(/\\/g, "/");
|
||||
}
|
||||
return protocol + path.resolve(dir.slice(protocol.length), url);
|
||||
}
|
||||
function retrieveSourceMapURL(source) {
|
||||
// Get the URL of the source map
|
||||
const fileData = retrieveFile(source);
|
||||
if (!fileData) return null;
|
||||
const re = /\/\/[@#]\s*sourceMappingURL=([^\s'"]+)\s*$|\/\*[@#]\s*sourceMappingURL=[^\s*'"]+\s*\*\/\s*$/gm;
|
||||
// Keep executing the search to find the *last* sourceMappingURL to avoid
|
||||
// picking up sourceMappingURLs from comments, strings, etc.
|
||||
let lastMatch, match;
|
||||
// eslint-disable-next-line no-cond-assign
|
||||
while (match = re.exec(fileData)) lastMatch = match;
|
||||
if (!lastMatch) return null;
|
||||
return lastMatch[1];
|
||||
}
|
||||
// Can be overridden by the retrieveSourceMap option to install. Takes a
|
||||
// generated source filename; returns a {map, optional url} object, or null if
|
||||
// there is no source map. The map field may be either a string or the parsed
|
||||
// JSON object (ie, it must be a valid argument to the SourceMapConsumer
|
||||
// constructor).
|
||||
let retrieveSourceMap = handlerExec(retrieveMapHandlers);
|
||||
retrieveMapHandlers.push((source) => {
|
||||
let sourceMappingURL = retrieveSourceMapURL(source);
|
||||
if (!sourceMappingURL) return null;
|
||||
// Read the contents of the source map
|
||||
let sourceMapData;
|
||||
if (reSourceMap.test(sourceMappingURL)) {
|
||||
// Support source map URL as a data url
|
||||
const rawData = sourceMappingURL.slice(sourceMappingURL.indexOf(",") + 1);
|
||||
sourceMapData = Buffer.from(rawData, "base64").toString();
|
||||
sourceMappingURL = source;
|
||||
} else {
|
||||
// Support source map URLs relative to the source URL
|
||||
sourceMappingURL = supportRelativeURL(source, sourceMappingURL);
|
||||
sourceMapData = retrieveFile(sourceMappingURL);
|
||||
}
|
||||
if (!sourceMapData) return null;
|
||||
return {
|
||||
url: sourceMappingURL,
|
||||
map: sourceMapData
|
||||
};
|
||||
});
|
||||
// interface Position {
|
||||
// source: string
|
||||
// line: number
|
||||
// column: number
|
||||
// }
|
||||
function mapSourcePosition(position) {
|
||||
if (!position.source) return position;
|
||||
let sourceMap = sourceMapCache[position.source];
|
||||
if (!sourceMap) {
|
||||
// Call the (overridable) retrieveSourceMap function to get the source map.
|
||||
const urlAndMap = retrieveSourceMap(position.source);
|
||||
const map = urlAndMap && urlAndMap.map;
|
||||
if (map && !(typeof map === "object" && "mappings" in map && map.mappings === "")) {
|
||||
var _sourceMap$map;
|
||||
sourceMap = sourceMapCache[position.source] = {
|
||||
url: urlAndMap.url,
|
||||
map: new TraceMap(map)
|
||||
};
|
||||
// Load all sources stored inline with the source map into the file cache
|
||||
// to pretend like they are already loaded. They may not exist on disk.
|
||||
if ((_sourceMap$map = sourceMap.map) === null || _sourceMap$map === void 0 ? void 0 : _sourceMap$map.sourcesContent) sourceMap.map.sources.forEach((source, i) => {
|
||||
var _sourceMap$map2;
|
||||
const contents = (_sourceMap$map2 = sourceMap.map) === null || _sourceMap$map2 === void 0 || (_sourceMap$map2 = _sourceMap$map2.sourcesContent) === null || _sourceMap$map2 === void 0 ? void 0 : _sourceMap$map2[i];
|
||||
if (contents && source && sourceMap.url) {
|
||||
const url = supportRelativeURL(sourceMap.url, source);
|
||||
fileContentsCache[url] = contents;
|
||||
}
|
||||
});
|
||||
} else sourceMap = sourceMapCache[position.source] = {
|
||||
url: null,
|
||||
map: null
|
||||
};
|
||||
}
|
||||
// Resolve the source URL relative to the URL of the source map
|
||||
if (sourceMap && sourceMap.map && sourceMap.url) {
|
||||
const originalPosition = originalPositionFor(sourceMap.map, position);
|
||||
// Only return the original position if a matching line was found. If no
|
||||
// matching line is found then we return position instead, which will cause
|
||||
// the stack trace to print the path and line for the compiled file. It is
|
||||
// better to give a precise location in the compiled file than a vague
|
||||
// location in the original file.
|
||||
if (originalPosition.source !== null) {
|
||||
originalPosition.source = supportRelativeURL(sourceMap.url, originalPosition.source);
|
||||
return originalPosition;
|
||||
}
|
||||
}
|
||||
return position;
|
||||
}
|
||||
// Parses code generated by FormatEvalOrigin(), a function inside V8:
|
||||
// https://code.google.com/p/v8/source/browse/trunk/src/messages.js
|
||||
function mapEvalOrigin(origin) {
|
||||
// Most eval() calls are in this format
|
||||
let match = /^eval at ([^(]+) \((.+):(\d+):(\d+)\)$/.exec(origin);
|
||||
if (match) {
|
||||
const position = mapSourcePosition({
|
||||
name: null,
|
||||
source: match[2],
|
||||
line: +match[3],
|
||||
column: +match[4] - 1
|
||||
});
|
||||
return `eval at ${match[1]} (${position.source}:${position.line}:${position.column + 1})`;
|
||||
}
|
||||
// Parse nested eval() calls using recursion
|
||||
match = /^eval at ([^(]+) \((.+)\)$/.exec(origin);
|
||||
if (match) return `eval at ${match[1]} (${mapEvalOrigin(match[2])})`;
|
||||
// Make sure we still return useful information if we didn't find anything
|
||||
return origin;
|
||||
}
|
||||
// This is copied almost verbatim from the V8 source code at
|
||||
// https://code.google.com/p/v8/source/browse/trunk/src/messages.js. The
|
||||
// implementation of wrapCallSite() used to just forward to the actual source
|
||||
// code of CallSite.prototype.toString but unfortunately a new release of V8
|
||||
// did something to the prototype chain and broke the shim. The only fix I
|
||||
// could find was copy/paste.
|
||||
function CallSiteToString() {
|
||||
let fileName;
|
||||
let fileLocation = "";
|
||||
if (this.isNative()) fileLocation = "native";
|
||||
else {
|
||||
fileName = this.getScriptNameOrSourceURL();
|
||||
if (!fileName && this.isEval()) {
|
||||
fileLocation = this.getEvalOrigin();
|
||||
fileLocation += ", ";
|
||||
}
|
||||
if (fileName) fileLocation += fileName;
|
||||
else
|
||||
// Source code does not originate from a file and is not native, but we
|
||||
// can still get the source position inside the source string, e.g. in
|
||||
// an eval string.
|
||||
fileLocation += "<anonymous>";
|
||||
const lineNumber = this.getLineNumber();
|
||||
if (lineNumber != null) {
|
||||
fileLocation += `:${lineNumber}`;
|
||||
const columnNumber = this.getColumnNumber();
|
||||
if (columnNumber) fileLocation += `:${columnNumber}`;
|
||||
}
|
||||
}
|
||||
let line = "";
|
||||
const functionName = this.getFunctionName();
|
||||
let addSuffix = true;
|
||||
const isConstructor = this.isConstructor();
|
||||
const isMethodCall = !(this.isToplevel() || isConstructor);
|
||||
if (isMethodCall) {
|
||||
let typeName = this.getTypeName();
|
||||
// Fixes shim to be backward compatible with Node v0 to v4
|
||||
if (typeName === "[object Object]") typeName = "null";
|
||||
const methodName = this.getMethodName();
|
||||
if (functionName) {
|
||||
if (typeName && functionName.indexOf(typeName) !== 0) line += `${typeName}.`;
|
||||
line += functionName;
|
||||
if (methodName && functionName.indexOf(`.${methodName}`) !== functionName.length - methodName.length - 1) line += ` [as ${methodName}]`;
|
||||
} else line += `${typeName}.${methodName || "<anonymous>"}`;
|
||||
} else if (isConstructor) line += `new ${functionName || "<anonymous>"}`;
|
||||
else if (functionName) line += functionName;
|
||||
else {
|
||||
line += fileLocation;
|
||||
addSuffix = false;
|
||||
}
|
||||
if (addSuffix) line += ` (${fileLocation})`;
|
||||
return line;
|
||||
}
|
||||
function cloneCallSite(frame) {
|
||||
const object = {};
|
||||
Object.getOwnPropertyNames(Object.getPrototypeOf(frame)).forEach((name) => {
|
||||
const key = name;
|
||||
// @ts-expect-error difficult to type
|
||||
object[key] = /^(?:is|get)/.test(name) ? function() {
|
||||
// eslint-disable-next-line no-useless-call
|
||||
return frame[key].call(frame);
|
||||
} : frame[key];
|
||||
});
|
||||
object.toString = CallSiteToString;
|
||||
return object;
|
||||
}
|
||||
function wrapCallSite(frame, state) {
|
||||
// provides interface backward compatibility
|
||||
if (state === void 0) state = {
|
||||
nextPosition: null,
|
||||
curPosition: null
|
||||
};
|
||||
if (frame.isNative()) {
|
||||
state.curPosition = null;
|
||||
return frame;
|
||||
}
|
||||
// Most call sites will return the source file from getFileName(), but code
|
||||
// passed to eval() ending in "//# sourceURL=..." will return the source file
|
||||
// from getScriptNameOrSourceURL() instead
|
||||
const source = frame.getFileName() || frame.getScriptNameOrSourceURL();
|
||||
if (source) {
|
||||
const line = frame.getLineNumber();
|
||||
let column = frame.getColumnNumber() - 1;
|
||||
// Fix position in Node where some (internal) code is prepended.
|
||||
// See https://github.com/evanw/node-source-map-support/issues/36
|
||||
// Header removed in node at ^10.16 || >=11.11.0
|
||||
// v11 is not an LTS candidate, we can just test the one version with it.
|
||||
// Test node versions for: 10.16-19, 10.20+, 12-19, 20-99, 100+, or 11.11
|
||||
const noHeader = /^v(?:10\.1[6-9]|10\.[2-9]\d|10\.\d{3,}|1[2-9]\d*|[2-9]\d|\d{3,}|11\.11)/;
|
||||
const headerLength = noHeader.test(globalProcessVersion()) ? 0 : 62;
|
||||
if (line === 1 && column > headerLength && !frame.isEval()) column -= headerLength;
|
||||
const position = mapSourcePosition({
|
||||
name: null,
|
||||
source,
|
||||
line,
|
||||
column
|
||||
});
|
||||
state.curPosition = position;
|
||||
frame = cloneCallSite(frame);
|
||||
const originalFunctionName = frame.getFunctionName;
|
||||
frame.getFunctionName = function() {
|
||||
if (state.nextPosition == null) return originalFunctionName();
|
||||
return state.nextPosition.name || originalFunctionName();
|
||||
};
|
||||
frame.getFileName = function() {
|
||||
return position.source ?? null;
|
||||
};
|
||||
frame.getLineNumber = function() {
|
||||
return position.line;
|
||||
};
|
||||
frame.getColumnNumber = function() {
|
||||
return position.column + 1;
|
||||
};
|
||||
frame.getScriptNameOrSourceURL = function() {
|
||||
return position.source;
|
||||
};
|
||||
return frame;
|
||||
}
|
||||
// Code called using eval() needs special handling
|
||||
let origin = frame.isEval() && frame.getEvalOrigin();
|
||||
if (origin) {
|
||||
origin = mapEvalOrigin(origin);
|
||||
frame = cloneCallSite(frame);
|
||||
frame.getEvalOrigin = function() {
|
||||
return origin || void 0;
|
||||
};
|
||||
return frame;
|
||||
}
|
||||
// If we get here then we were unable to change the source position
|
||||
return frame;
|
||||
}
|
||||
// This function is part of the V8 stack trace API, for more info see:
|
||||
// https://v8.dev/docs/stack-trace-api
|
||||
function prepareStackTrace(error, stack) {
|
||||
const name = error.name || "Error";
|
||||
const message = error.message || "";
|
||||
const errorString = `${name}: ${message}`;
|
||||
const state = {
|
||||
nextPosition: null,
|
||||
curPosition: null
|
||||
};
|
||||
const processedStack = [];
|
||||
for (let i = stack.length - 1; i >= 0; i--) {
|
||||
processedStack.push(`\n at ${wrapCallSite(stack[i], state)}`);
|
||||
state.nextPosition = state.curPosition;
|
||||
}
|
||||
state.curPosition = state.nextPosition = null;
|
||||
return errorString + processedStack.reverse().join("");
|
||||
}
|
||||
retrieveFileHandlers.slice(0);
|
||||
retrieveMapHandlers.slice(0);
|
||||
function install(options) {
|
||||
options = options || {};
|
||||
// Allow sources to be found by methods other than reading the files
|
||||
// directly from disk.
|
||||
if (options.retrieveFile) {
|
||||
if (options.overrideRetrieveFile) retrieveFileHandlers.length = 0;
|
||||
retrieveFileHandlers.unshift(options.retrieveFile);
|
||||
}
|
||||
// Allow source maps to be found by methods other than reading the files
|
||||
// directly from disk.
|
||||
if (options.retrieveSourceMap) {
|
||||
if (options.overrideRetrieveSourceMap) retrieveMapHandlers.length = 0;
|
||||
retrieveMapHandlers.unshift(options.retrieveSourceMap);
|
||||
}
|
||||
// Install the error reformatter
|
||||
if (!errorFormatterInstalled) {
|
||||
errorFormatterInstalled = true;
|
||||
Error.prepareStackTrace = prepareStackTrace;
|
||||
}
|
||||
}
|
||||
|
||||
let SOURCEMAPPING_URL = "sourceMa";
|
||||
SOURCEMAPPING_URL += "ppingURL";
|
||||
const VITE_NODE_SOURCEMAPPING_SOURCE = "//# sourceMappingSource=vite-node";
|
||||
const VITE_NODE_SOURCEMAPPING_URL = `${SOURCEMAPPING_URL}=data:application/json;charset=utf-8`;
|
||||
function withInlineSourcemap(result, options) {
|
||||
const map = result.map;
|
||||
let code = result.code;
|
||||
if (!map || code.includes(VITE_NODE_SOURCEMAPPING_SOURCE)) return result;
|
||||
if ("sources" in map) {
|
||||
var _map$sources;
|
||||
map.sources = (_map$sources = map.sources) === null || _map$sources === void 0 ? void 0 : _map$sources.map((source) => {
|
||||
if (!source) return source;
|
||||
// sometimes files here are absolute,
|
||||
// but they are considered absolute to the server url, not the file system
|
||||
// this is a bug in Vite
|
||||
// all files should be either absolute to the file system or relative to the source map file
|
||||
if (isAbsolute(source)) {
|
||||
const actualPath = !source.startsWith(withTrailingSlash(options.root)) && source.startsWith("/") ? resolve$2(options.root, source.slice(1)) : source;
|
||||
return relative(dirname(options.filepath), actualPath);
|
||||
}
|
||||
return source;
|
||||
});
|
||||
}
|
||||
// to reduce the payload size, we only inline vite node source map, because it's also the only one we use
|
||||
const OTHER_SOURCE_MAP_REGEXP = new RegExp(`//# ${SOURCEMAPPING_URL}=data:application/json[^,]+base64,([A-Za-z0-9+/=]+)$`, "gm");
|
||||
while (OTHER_SOURCE_MAP_REGEXP.test(code)) code = code.replace(OTHER_SOURCE_MAP_REGEXP, "");
|
||||
// If the first line is not present on source maps, add simple 1:1 mapping ([0,0,0,0], [1,0,0,0])
|
||||
// so that debuggers can be set to break on first line
|
||||
// Since Vite 6, import statements at the top of the file are preserved correctly,
|
||||
// so we don't need to add this mapping anymore.
|
||||
if (!options.noFirstLineMapping && map.mappings.startsWith(";")) map.mappings = `AAAA,CAAA${map.mappings}`;
|
||||
const sourceMap = Buffer.from(JSON.stringify(map), "utf-8").toString("base64");
|
||||
result.code = `${code.trimEnd()}\n\n${VITE_NODE_SOURCEMAPPING_SOURCE}\n//# ${VITE_NODE_SOURCEMAPPING_URL};base64,${sourceMap}\n`;
|
||||
return result;
|
||||
}
|
||||
function extractSourceMap(code) {
|
||||
const regexp = new RegExp(`//# ${VITE_NODE_SOURCEMAPPING_URL};base64,(.+)`, "gm");
|
||||
let lastMatch, match;
|
||||
// eslint-disable-next-line no-cond-assign
|
||||
while (match = regexp.exec(code)) lastMatch = match;
|
||||
// pick only the last source map keeping user strings that look like maps
|
||||
if (lastMatch) return JSON.parse(Buffer.from(lastMatch[1], "base64").toString("utf-8"));
|
||||
return null;
|
||||
}
|
||||
function installSourcemapsSupport(options) {
|
||||
install({ retrieveSourceMap(source) {
|
||||
const map = options.getSourceMap(source);
|
||||
if (map) return {
|
||||
url: source,
|
||||
map
|
||||
};
|
||||
return null;
|
||||
} });
|
||||
}
|
||||
|
||||
export { extractSourceMap, installSourcemapsSupport, withInlineSourcemap };
|
||||
57
GTA_P_V2/node_modules/vite-node/dist/trace-mapping.d-DLVdEqOp.d.ts
generated
vendored
Normal file
57
GTA_P_V2/node_modules/vite-node/dist/trace-mapping.d-DLVdEqOp.d.ts
generated
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
type GeneratedColumn = number;
|
||||
type SourcesIndex = number;
|
||||
type SourceLine = number;
|
||||
type SourceColumn = number;
|
||||
type NamesIndex = number;
|
||||
type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
|
||||
|
||||
interface SourceMapV3 {
|
||||
file?: string | null;
|
||||
names: string[];
|
||||
sourceRoot?: string;
|
||||
sources: (string | null)[];
|
||||
sourcesContent?: (string | null)[];
|
||||
version: 3;
|
||||
ignoreList?: number[];
|
||||
}
|
||||
interface EncodedSourceMap extends SourceMapV3 {
|
||||
mappings: string;
|
||||
}
|
||||
interface DecodedSourceMap extends SourceMapV3 {
|
||||
mappings: SourceMapSegment[][];
|
||||
}
|
||||
type XInput = {
|
||||
x_google_ignoreList?: SourceMapV3['ignoreList'];
|
||||
};
|
||||
type EncodedSourceMapXInput = EncodedSourceMap & XInput;
|
||||
type DecodedSourceMapXInput = DecodedSourceMap & XInput;
|
||||
type SourceMapInput = string | EncodedSourceMapXInput | DecodedSourceMapXInput | TraceMap;
|
||||
declare abstract class SourceMap {
|
||||
version: SourceMapV3['version'];
|
||||
file: SourceMapV3['file'];
|
||||
names: SourceMapV3['names'];
|
||||
sourceRoot: SourceMapV3['sourceRoot'];
|
||||
sources: SourceMapV3['sources'];
|
||||
sourcesContent: SourceMapV3['sourcesContent'];
|
||||
resolvedSources: SourceMapV3['sources'];
|
||||
ignoreList: SourceMapV3['ignoreList'];
|
||||
}
|
||||
|
||||
declare class TraceMap implements SourceMap {
|
||||
version: SourceMapV3['version'];
|
||||
file: SourceMapV3['file'];
|
||||
names: SourceMapV3['names'];
|
||||
sourceRoot: SourceMapV3['sourceRoot'];
|
||||
sources: SourceMapV3['sources'];
|
||||
sourcesContent: SourceMapV3['sourcesContent'];
|
||||
ignoreList: SourceMapV3['ignoreList'];
|
||||
resolvedSources: string[];
|
||||
private _encoded;
|
||||
private _decoded;
|
||||
private _decodedMemo;
|
||||
private _bySources;
|
||||
private _bySourceMemos;
|
||||
constructor(map: SourceMapInput, mapUrl?: string | null);
|
||||
}
|
||||
|
||||
export type { DecodedSourceMap as D, EncodedSourceMap as E, SourceMapInput as S };
|
||||
2
GTA_P_V2/node_modules/vite-node/dist/types.cjs
generated
vendored
Normal file
2
GTA_P_V2/node_modules/vite-node/dist/types.cjs
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
'use strict';
|
||||
|
||||
2
GTA_P_V2/node_modules/vite-node/dist/types.d.ts
generated
vendored
Normal file
2
GTA_P_V2/node_modules/vite-node/dist/types.d.ts
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
export { D as DecodedSourceMap, E as EncodedSourceMap, S as SourceMapInput } from './trace-mapping.d-DLVdEqOp.js';
|
||||
export { A as Arrayable, h as Awaitable, k as CreateHotContextFunction, D as DebuggerOptions, c as DepsHandlingOptions, i as FetchFunction, F as FetchResult, b as HotContext, l as ModuleCache, M as ModuleCacheMap, f as ModuleExecutionInfo, N as Nullable, R as RawSourceMap, j as ResolveIdFunction, S as StartOfSourceMap, d as ViteNodeResolveId, n as ViteNodeResolveModule, m as ViteNodeRunnerOptions, V as ViteNodeServerOptions } from './index.d-DGmxD2U7.js';
|
||||
1
GTA_P_V2/node_modules/vite-node/dist/types.mjs
generated
vendored
Normal file
1
GTA_P_V2/node_modules/vite-node/dist/types.mjs
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
216
GTA_P_V2/node_modules/vite-node/dist/utils.cjs
generated
vendored
Normal file
216
GTA_P_V2/node_modules/vite-node/dist/utils.cjs
generated
vendored
Normal file
@@ -0,0 +1,216 @@
|
||||
'use strict';
|
||||
|
||||
var fs = require('node:fs');
|
||||
var node_module = require('node:module');
|
||||
var node_url = require('node:url');
|
||||
var pathe = require('pathe');
|
||||
|
||||
const isWindows = process.platform === "win32";
|
||||
const drive = isWindows ? process.cwd()[0] : null;
|
||||
const driveOpposite = drive ? drive === drive.toUpperCase() ? drive.toLowerCase() : drive.toUpperCase() : null;
|
||||
const driveRegexp = drive ? new RegExp(`(?:^|/@fs/)${drive}(\:[\\/])`) : null;
|
||||
const driveOppositeRegext = driveOpposite ? new RegExp(`(?:^|/@fs/)${driveOpposite}(\:[\\/])`) : null;
|
||||
function slash(str) {
|
||||
return str.replace(/\\/g, "/");
|
||||
}
|
||||
const bareImportRE = /^(?![a-z]:)[\w@](?!.*:\/\/)/i;
|
||||
function isBareImport(id) {
|
||||
return bareImportRE.test(id);
|
||||
}
|
||||
const VALID_ID_PREFIX = "/@id/";
|
||||
function normalizeRequestId(id, base) {
|
||||
if (base && id.startsWith(withTrailingSlash(base))) id = `/${id.slice(base.length)}`;
|
||||
// keep drive the same as in process cwd. ideally, this should be resolved on Vite side
|
||||
// Vite always resolves drive letters to the upper case because of the use of `realpathSync`
|
||||
// https://github.com/vitejs/vite/blob/0ab20a3ee26eacf302415b3087732497d0a2f358/packages/vite/src/node/utils.ts#L635
|
||||
if (driveRegexp && !(driveRegexp === null || driveRegexp === void 0 ? void 0 : driveRegexp.test(id)) && (driveOppositeRegext === null || driveOppositeRegext === void 0 ? void 0 : driveOppositeRegext.test(id))) id = id.replace(driveOppositeRegext, `${drive}$1`);
|
||||
if (id.startsWith("file://")) {
|
||||
// preserve hash/query
|
||||
const { file, postfix } = splitFileAndPostfix(id);
|
||||
return node_url.fileURLToPath(file) + postfix;
|
||||
}
|
||||
return id.replace(/^\/@id\/__x00__/, "\0").replace(/^\/@id\//, "").replace(/^__vite-browser-external:/, "").replace(/\?v=\w+/, "?").replace(/&v=\w+/, "").replace(/\?t=\w+/, "?").replace(/&t=\w+/, "").replace(/\?import/, "?").replace(/&import/, "").replace(/\?&/, "?").replace(/\?+$/, "");
|
||||
}
|
||||
const postfixRE = /[?#].*$/;
|
||||
function cleanUrl(url) {
|
||||
return url.replace(postfixRE, "");
|
||||
}
|
||||
function splitFileAndPostfix(path) {
|
||||
const file = cleanUrl(path);
|
||||
return {
|
||||
file,
|
||||
postfix: path.slice(file.length)
|
||||
};
|
||||
}
|
||||
const internalRequests = ["@vite/client", "@vite/env"];
|
||||
const internalRequestRegexp = new RegExp(`^/?(?:${internalRequests.join("|")})$`);
|
||||
function isInternalRequest(id) {
|
||||
return internalRequestRegexp.test(id);
|
||||
}
|
||||
// https://nodejs.org/api/modules.html#built-in-modules-with-mandatory-node-prefix
|
||||
const prefixedBuiltins = new Set([
|
||||
"node:sea",
|
||||
"node:sqlite",
|
||||
"node:test",
|
||||
"node:test/reporters"
|
||||
]);
|
||||
const builtins = new Set([
|
||||
...node_module.builtinModules,
|
||||
"assert/strict",
|
||||
"diagnostics_channel",
|
||||
"dns/promises",
|
||||
"fs/promises",
|
||||
"path/posix",
|
||||
"path/win32",
|
||||
"readline/promises",
|
||||
"stream/consumers",
|
||||
"stream/promises",
|
||||
"stream/web",
|
||||
"timers/promises",
|
||||
"util/types",
|
||||
"wasi"
|
||||
]);
|
||||
function normalizeModuleId(id) {
|
||||
// unique id that is not available as "test"
|
||||
if (prefixedBuiltins.has(id)) return id;
|
||||
if (id.startsWith("file://")) return node_url.fileURLToPath(id);
|
||||
return id.replace(/\\/g, "/").replace(/^\/@fs\//, isWindows ? "" : "/").replace(/^node:/, "").replace(/^\/+/, "/");
|
||||
}
|
||||
function isPrimitive(v) {
|
||||
return v !== Object(v);
|
||||
}
|
||||
function toFilePath(id, root) {
|
||||
let { absolute, exists } = (() => {
|
||||
if (id.startsWith("/@fs/")) return {
|
||||
absolute: id.slice(4),
|
||||
exists: true
|
||||
};
|
||||
// check if /src/module.js -> <root>/src/module.js
|
||||
if (!id.startsWith(withTrailingSlash(root)) && id.startsWith("/")) {
|
||||
const resolved = pathe.resolve(root, id.slice(1));
|
||||
if (fs.existsSync(cleanUrl(resolved))) return {
|
||||
absolute: resolved,
|
||||
exists: true
|
||||
};
|
||||
} else if (id.startsWith(withTrailingSlash(root)) && fs.existsSync(cleanUrl(id))) return {
|
||||
absolute: id,
|
||||
exists: true
|
||||
};
|
||||
return {
|
||||
absolute: id,
|
||||
exists: false
|
||||
};
|
||||
})();
|
||||
if (absolute.startsWith("//")) absolute = absolute.slice(1);
|
||||
// disambiguate the `<UNIT>:/` on windows: see nodejs/node#31710
|
||||
return {
|
||||
path: isWindows && absolute.startsWith("/") ? slash(node_url.fileURLToPath(node_url.pathToFileURL(absolute.slice(1)).href)) : absolute,
|
||||
exists
|
||||
};
|
||||
}
|
||||
const NODE_BUILTIN_NAMESPACE = "node:";
|
||||
function isNodeBuiltin(id) {
|
||||
if (prefixedBuiltins.has(id)) return true;
|
||||
return builtins.has(id.startsWith(NODE_BUILTIN_NAMESPACE) ? id.slice(NODE_BUILTIN_NAMESPACE.length) : id);
|
||||
}
|
||||
/**
|
||||
* Convert `Arrayable<T>` to `Array<T>`
|
||||
*
|
||||
* @category Array
|
||||
*/
|
||||
function toArray(array) {
|
||||
if (array === null || array === void 0) array = [];
|
||||
if (Array.isArray(array)) return array;
|
||||
return [array];
|
||||
}
|
||||
function getCachedData(cache, basedir, originalBasedir) {
|
||||
const pkgData = cache.get(getFnpdCacheKey(basedir));
|
||||
if (pkgData) {
|
||||
traverseBetweenDirs(originalBasedir, basedir, (dir) => {
|
||||
cache.set(getFnpdCacheKey(dir), pkgData);
|
||||
});
|
||||
return pkgData;
|
||||
}
|
||||
}
|
||||
function setCacheData(cache, data, basedir, originalBasedir) {
|
||||
cache.set(getFnpdCacheKey(basedir), data);
|
||||
traverseBetweenDirs(originalBasedir, basedir, (dir) => {
|
||||
cache.set(getFnpdCacheKey(dir), data);
|
||||
});
|
||||
}
|
||||
function getFnpdCacheKey(basedir) {
|
||||
return `fnpd_${basedir}`;
|
||||
}
|
||||
/**
|
||||
* Traverse between `longerDir` (inclusive) and `shorterDir` (exclusive) and call `cb` for each dir.
|
||||
* @param longerDir Longer dir path, e.g. `/User/foo/bar/baz`
|
||||
* @param shorterDir Shorter dir path, e.g. `/User/foo`
|
||||
*/
|
||||
function traverseBetweenDirs(longerDir, shorterDir, cb) {
|
||||
while (longerDir !== shorterDir) {
|
||||
cb(longerDir);
|
||||
longerDir = pathe.dirname(longerDir);
|
||||
}
|
||||
}
|
||||
function withTrailingSlash(path) {
|
||||
if (path[path.length - 1] !== "/") return `${path}/`;
|
||||
return path;
|
||||
}
|
||||
function createImportMetaEnvProxy() {
|
||||
// packages/vitest/src/node/plugins/index.ts:146
|
||||
const booleanKeys = [
|
||||
"DEV",
|
||||
"PROD",
|
||||
"SSR"
|
||||
];
|
||||
return new Proxy(process.env, {
|
||||
get(_, key) {
|
||||
if (typeof key !== "string") return void 0;
|
||||
if (booleanKeys.includes(key)) return !!process.env[key];
|
||||
return process.env[key];
|
||||
},
|
||||
set(_, key, value) {
|
||||
if (typeof key !== "string") return true;
|
||||
if (booleanKeys.includes(key)) process.env[key] = value ? "1" : "";
|
||||
else process.env[key] = value;
|
||||
return true;
|
||||
}
|
||||
});
|
||||
}
|
||||
const packageCache = /* @__PURE__ */ new Map();
|
||||
async function findNearestPackageData(basedir) {
|
||||
const originalBasedir = basedir;
|
||||
while (basedir) {
|
||||
var _await$fsp$stat$catch;
|
||||
const cached = getCachedData(packageCache, basedir, originalBasedir);
|
||||
if (cached) return cached;
|
||||
const pkgPath = pathe.join(basedir, "package.json");
|
||||
if ((_await$fsp$stat$catch = await fs.promises.stat(pkgPath).catch(() => {})) === null || _await$fsp$stat$catch === void 0 ? void 0 : _await$fsp$stat$catch.isFile()) {
|
||||
const pkgData = JSON.parse(await fs.promises.readFile(pkgPath, "utf8"));
|
||||
if (packageCache) setCacheData(packageCache, pkgData, basedir, originalBasedir);
|
||||
return pkgData;
|
||||
}
|
||||
const nextBasedir = pathe.dirname(basedir);
|
||||
if (nextBasedir === basedir) break;
|
||||
basedir = nextBasedir;
|
||||
}
|
||||
return {};
|
||||
}
|
||||
|
||||
exports.VALID_ID_PREFIX = VALID_ID_PREFIX;
|
||||
exports.cleanUrl = cleanUrl;
|
||||
exports.createImportMetaEnvProxy = createImportMetaEnvProxy;
|
||||
exports.findNearestPackageData = findNearestPackageData;
|
||||
exports.getCachedData = getCachedData;
|
||||
exports.isBareImport = isBareImport;
|
||||
exports.isInternalRequest = isInternalRequest;
|
||||
exports.isNodeBuiltin = isNodeBuiltin;
|
||||
exports.isPrimitive = isPrimitive;
|
||||
exports.isWindows = isWindows;
|
||||
exports.normalizeModuleId = normalizeModuleId;
|
||||
exports.normalizeRequestId = normalizeRequestId;
|
||||
exports.setCacheData = setCacheData;
|
||||
exports.slash = slash;
|
||||
exports.toArray = toArray;
|
||||
exports.toFilePath = toFilePath;
|
||||
exports.withTrailingSlash = withTrailingSlash;
|
||||
32
GTA_P_V2/node_modules/vite-node/dist/utils.d.ts
generated
vendored
Normal file
32
GTA_P_V2/node_modules/vite-node/dist/utils.d.ts
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
import { N as Nullable, A as Arrayable } from './index.d-DGmxD2U7.js';
|
||||
import './trace-mapping.d-DLVdEqOp.js';
|
||||
|
||||
declare const isWindows: boolean;
|
||||
declare function slash(str: string): string;
|
||||
declare function isBareImport(id: string): boolean;
|
||||
declare const VALID_ID_PREFIX = "/@id/";
|
||||
declare function normalizeRequestId(id: string, base?: string): string;
|
||||
declare function cleanUrl(url: string): string;
|
||||
declare function isInternalRequest(id: string): boolean;
|
||||
declare function normalizeModuleId(id: string): string;
|
||||
declare function isPrimitive(v: any): boolean;
|
||||
declare function toFilePath(id: string, root: string): {
|
||||
path: string
|
||||
exists: boolean
|
||||
};
|
||||
declare function isNodeBuiltin(id: string): boolean;
|
||||
/**
|
||||
* Convert `Arrayable<T>` to `Array<T>`
|
||||
*
|
||||
* @category Array
|
||||
*/
|
||||
declare function toArray<T>(array?: Nullable<Arrayable<T>>): Array<T>;
|
||||
declare function getCachedData<T>(cache: Map<string, T>, basedir: string, originalBasedir: string): NonNullable<T> | undefined;
|
||||
declare function setCacheData<T>(cache: Map<string, T>, data: T, basedir: string, originalBasedir: string): void;
|
||||
declare function withTrailingSlash(path: string): string;
|
||||
declare function createImportMetaEnvProxy(): NodeJS.ProcessEnv;
|
||||
declare function findNearestPackageData(basedir: string): Promise<{
|
||||
type?: "module" | "commonjs"
|
||||
}>;
|
||||
|
||||
export { VALID_ID_PREFIX, cleanUrl, createImportMetaEnvProxy, findNearestPackageData, getCachedData, isBareImport, isInternalRequest, isNodeBuiltin, isPrimitive, isWindows, normalizeModuleId, normalizeRequestId, setCacheData, slash, toArray, toFilePath, withTrailingSlash };
|
||||
198
GTA_P_V2/node_modules/vite-node/dist/utils.mjs
generated
vendored
Normal file
198
GTA_P_V2/node_modules/vite-node/dist/utils.mjs
generated
vendored
Normal file
@@ -0,0 +1,198 @@
|
||||
import { existsSync, promises } from 'node:fs';
|
||||
import { builtinModules } from 'node:module';
|
||||
import { fileURLToPath, pathToFileURL } from 'node:url';
|
||||
import { resolve, join, dirname } from 'pathe';
|
||||
|
||||
const isWindows = process.platform === "win32";
|
||||
const drive = isWindows ? process.cwd()[0] : null;
|
||||
const driveOpposite = drive ? drive === drive.toUpperCase() ? drive.toLowerCase() : drive.toUpperCase() : null;
|
||||
const driveRegexp = drive ? new RegExp(`(?:^|/@fs/)${drive}(\:[\\/])`) : null;
|
||||
const driveOppositeRegext = driveOpposite ? new RegExp(`(?:^|/@fs/)${driveOpposite}(\:[\\/])`) : null;
|
||||
function slash(str) {
|
||||
return str.replace(/\\/g, "/");
|
||||
}
|
||||
const bareImportRE = /^(?![a-z]:)[\w@](?!.*:\/\/)/i;
|
||||
function isBareImport(id) {
|
||||
return bareImportRE.test(id);
|
||||
}
|
||||
const VALID_ID_PREFIX = "/@id/";
|
||||
function normalizeRequestId(id, base) {
|
||||
if (base && id.startsWith(withTrailingSlash(base))) id = `/${id.slice(base.length)}`;
|
||||
// keep drive the same as in process cwd. ideally, this should be resolved on Vite side
|
||||
// Vite always resolves drive letters to the upper case because of the use of `realpathSync`
|
||||
// https://github.com/vitejs/vite/blob/0ab20a3ee26eacf302415b3087732497d0a2f358/packages/vite/src/node/utils.ts#L635
|
||||
if (driveRegexp && !(driveRegexp === null || driveRegexp === void 0 ? void 0 : driveRegexp.test(id)) && (driveOppositeRegext === null || driveOppositeRegext === void 0 ? void 0 : driveOppositeRegext.test(id))) id = id.replace(driveOppositeRegext, `${drive}$1`);
|
||||
if (id.startsWith("file://")) {
|
||||
// preserve hash/query
|
||||
const { file, postfix } = splitFileAndPostfix(id);
|
||||
return fileURLToPath(file) + postfix;
|
||||
}
|
||||
return id.replace(/^\/@id\/__x00__/, "\0").replace(/^\/@id\//, "").replace(/^__vite-browser-external:/, "").replace(/\?v=\w+/, "?").replace(/&v=\w+/, "").replace(/\?t=\w+/, "?").replace(/&t=\w+/, "").replace(/\?import/, "?").replace(/&import/, "").replace(/\?&/, "?").replace(/\?+$/, "");
|
||||
}
|
||||
const postfixRE = /[?#].*$/;
|
||||
function cleanUrl(url) {
|
||||
return url.replace(postfixRE, "");
|
||||
}
|
||||
function splitFileAndPostfix(path) {
|
||||
const file = cleanUrl(path);
|
||||
return {
|
||||
file,
|
||||
postfix: path.slice(file.length)
|
||||
};
|
||||
}
|
||||
const internalRequests = ["@vite/client", "@vite/env"];
|
||||
const internalRequestRegexp = new RegExp(`^/?(?:${internalRequests.join("|")})$`);
|
||||
function isInternalRequest(id) {
|
||||
return internalRequestRegexp.test(id);
|
||||
}
|
||||
// https://nodejs.org/api/modules.html#built-in-modules-with-mandatory-node-prefix
|
||||
const prefixedBuiltins = new Set([
|
||||
"node:sea",
|
||||
"node:sqlite",
|
||||
"node:test",
|
||||
"node:test/reporters"
|
||||
]);
|
||||
const builtins = new Set([
|
||||
...builtinModules,
|
||||
"assert/strict",
|
||||
"diagnostics_channel",
|
||||
"dns/promises",
|
||||
"fs/promises",
|
||||
"path/posix",
|
||||
"path/win32",
|
||||
"readline/promises",
|
||||
"stream/consumers",
|
||||
"stream/promises",
|
||||
"stream/web",
|
||||
"timers/promises",
|
||||
"util/types",
|
||||
"wasi"
|
||||
]);
|
||||
function normalizeModuleId(id) {
|
||||
// unique id that is not available as "test"
|
||||
if (prefixedBuiltins.has(id)) return id;
|
||||
if (id.startsWith("file://")) return fileURLToPath(id);
|
||||
return id.replace(/\\/g, "/").replace(/^\/@fs\//, isWindows ? "" : "/").replace(/^node:/, "").replace(/^\/+/, "/");
|
||||
}
|
||||
function isPrimitive(v) {
|
||||
return v !== Object(v);
|
||||
}
|
||||
function toFilePath(id, root) {
|
||||
let { absolute, exists } = (() => {
|
||||
if (id.startsWith("/@fs/")) return {
|
||||
absolute: id.slice(4),
|
||||
exists: true
|
||||
};
|
||||
// check if /src/module.js -> <root>/src/module.js
|
||||
if (!id.startsWith(withTrailingSlash(root)) && id.startsWith("/")) {
|
||||
const resolved = resolve(root, id.slice(1));
|
||||
if (existsSync(cleanUrl(resolved))) return {
|
||||
absolute: resolved,
|
||||
exists: true
|
||||
};
|
||||
} else if (id.startsWith(withTrailingSlash(root)) && existsSync(cleanUrl(id))) return {
|
||||
absolute: id,
|
||||
exists: true
|
||||
};
|
||||
return {
|
||||
absolute: id,
|
||||
exists: false
|
||||
};
|
||||
})();
|
||||
if (absolute.startsWith("//")) absolute = absolute.slice(1);
|
||||
// disambiguate the `<UNIT>:/` on windows: see nodejs/node#31710
|
||||
return {
|
||||
path: isWindows && absolute.startsWith("/") ? slash(fileURLToPath(pathToFileURL(absolute.slice(1)).href)) : absolute,
|
||||
exists
|
||||
};
|
||||
}
|
||||
const NODE_BUILTIN_NAMESPACE = "node:";
|
||||
function isNodeBuiltin(id) {
|
||||
if (prefixedBuiltins.has(id)) return true;
|
||||
return builtins.has(id.startsWith(NODE_BUILTIN_NAMESPACE) ? id.slice(NODE_BUILTIN_NAMESPACE.length) : id);
|
||||
}
|
||||
/**
|
||||
* Convert `Arrayable<T>` to `Array<T>`
|
||||
*
|
||||
* @category Array
|
||||
*/
|
||||
function toArray(array) {
|
||||
if (array === null || array === void 0) array = [];
|
||||
if (Array.isArray(array)) return array;
|
||||
return [array];
|
||||
}
|
||||
function getCachedData(cache, basedir, originalBasedir) {
|
||||
const pkgData = cache.get(getFnpdCacheKey(basedir));
|
||||
if (pkgData) {
|
||||
traverseBetweenDirs(originalBasedir, basedir, (dir) => {
|
||||
cache.set(getFnpdCacheKey(dir), pkgData);
|
||||
});
|
||||
return pkgData;
|
||||
}
|
||||
}
|
||||
function setCacheData(cache, data, basedir, originalBasedir) {
|
||||
cache.set(getFnpdCacheKey(basedir), data);
|
||||
traverseBetweenDirs(originalBasedir, basedir, (dir) => {
|
||||
cache.set(getFnpdCacheKey(dir), data);
|
||||
});
|
||||
}
|
||||
function getFnpdCacheKey(basedir) {
|
||||
return `fnpd_${basedir}`;
|
||||
}
|
||||
/**
|
||||
* Traverse between `longerDir` (inclusive) and `shorterDir` (exclusive) and call `cb` for each dir.
|
||||
* @param longerDir Longer dir path, e.g. `/User/foo/bar/baz`
|
||||
* @param shorterDir Shorter dir path, e.g. `/User/foo`
|
||||
*/
|
||||
function traverseBetweenDirs(longerDir, shorterDir, cb) {
|
||||
while (longerDir !== shorterDir) {
|
||||
cb(longerDir);
|
||||
longerDir = dirname(longerDir);
|
||||
}
|
||||
}
|
||||
function withTrailingSlash(path) {
|
||||
if (path[path.length - 1] !== "/") return `${path}/`;
|
||||
return path;
|
||||
}
|
||||
function createImportMetaEnvProxy() {
|
||||
// packages/vitest/src/node/plugins/index.ts:146
|
||||
const booleanKeys = [
|
||||
"DEV",
|
||||
"PROD",
|
||||
"SSR"
|
||||
];
|
||||
return new Proxy(process.env, {
|
||||
get(_, key) {
|
||||
if (typeof key !== "string") return void 0;
|
||||
if (booleanKeys.includes(key)) return !!process.env[key];
|
||||
return process.env[key];
|
||||
},
|
||||
set(_, key, value) {
|
||||
if (typeof key !== "string") return true;
|
||||
if (booleanKeys.includes(key)) process.env[key] = value ? "1" : "";
|
||||
else process.env[key] = value;
|
||||
return true;
|
||||
}
|
||||
});
|
||||
}
|
||||
const packageCache = /* @__PURE__ */ new Map();
|
||||
async function findNearestPackageData(basedir) {
|
||||
const originalBasedir = basedir;
|
||||
while (basedir) {
|
||||
var _await$fsp$stat$catch;
|
||||
const cached = getCachedData(packageCache, basedir, originalBasedir);
|
||||
if (cached) return cached;
|
||||
const pkgPath = join(basedir, "package.json");
|
||||
if ((_await$fsp$stat$catch = await promises.stat(pkgPath).catch(() => {})) === null || _await$fsp$stat$catch === void 0 ? void 0 : _await$fsp$stat$catch.isFile()) {
|
||||
const pkgData = JSON.parse(await promises.readFile(pkgPath, "utf8"));
|
||||
if (packageCache) setCacheData(packageCache, pkgData, basedir, originalBasedir);
|
||||
return pkgData;
|
||||
}
|
||||
const nextBasedir = dirname(basedir);
|
||||
if (nextBasedir === basedir) break;
|
||||
basedir = nextBasedir;
|
||||
}
|
||||
return {};
|
||||
}
|
||||
|
||||
export { VALID_ID_PREFIX, cleanUrl, createImportMetaEnvProxy, findNearestPackageData, getCachedData, isBareImport, isInternalRequest, isNodeBuiltin, isPrimitive, isWindows, normalizeModuleId, normalizeRequestId, setCacheData, slash, toArray, toFilePath, withTrailingSlash };
|
||||
Reference in New Issue
Block a user