full site update
This commit is contained in:
21
node_modules/unstorage/LICENSE
generated
vendored
Normal file
21
node_modules/unstorage/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) Pooya Parsa <pooya@pi0.io>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
102
node_modules/unstorage/README.md
generated
vendored
Normal file
102
node_modules/unstorage/README.md
generated
vendored
Normal file
@@ -0,0 +1,102 @@
|
||||
# 💾 Unstorage
|
||||
|
||||
[![npm version][npm-version-src]][npm-version-href]
|
||||
[![npm downloads][npm-downloads-src]][npm-downloads-href]
|
||||
[![Codecov][codecov-src]][codecov-href]
|
||||
[![bundle][bundle-src]][bundle-href]
|
||||
[![License][license-src]][license-href]
|
||||
|
||||
<!--[![Github Actions][github-actions-src]][github-actions-href]-->
|
||||
|
||||
Unstorage provides an async Key-Value storage API with conventional features like multi driver mounting, watching and working with metadata, dozens of built-in drivers and a [tiny core](https://bundlephobia.com/package/unstorage).
|
||||
|
||||
👉 [Documentation](https://unstorage.unjs.io)
|
||||
|
||||
## Features
|
||||
|
||||
- Designed for all environments: Browser, NodeJS, and Workers
|
||||
- Lots of Built-in drivers
|
||||
- Asynchronous API
|
||||
- Unix-style driver mounting to combine storages
|
||||
- Default [in-memory](https://unstorage.unjs.io/drivers/memory) storage
|
||||
- Tree-shakable utils and tiny core
|
||||
- Auto JSON value serialization and deserialization
|
||||
- Binary and raw value support
|
||||
- State [snapshots](https://unstorage.unjs.io/getting-started/utils#snapshots) and hydration
|
||||
- Storage watcher
|
||||
- HTTP Storage with [built-in server](https://unstorage.unjs.io/guide/http-server)
|
||||
|
||||
## Usage
|
||||
|
||||
Install `unstorage` npm package:
|
||||
|
||||
```sh
|
||||
# yarn
|
||||
yarn add unstorage
|
||||
|
||||
# npm
|
||||
npm install unstorage
|
||||
|
||||
# pnpm
|
||||
pnpm add unstorage
|
||||
```
|
||||
|
||||
```js
|
||||
import { createStorage } from "unstorage";
|
||||
|
||||
const storage = createStorage(/* opts */);
|
||||
|
||||
await storage.getItem("foo:bar"); // or storage.getItem('/foo/bar')
|
||||
```
|
||||
|
||||
👉 Check out the [the documentation](https://unstorage.unjs.io) for usage information.
|
||||
|
||||
## Nightly release channel
|
||||
|
||||
You can use the nightly release channel to try the latest changes in the `main` branch via [`unstorage-nightly`](https://www.npmjs.com/package/unstorage-nightly).
|
||||
|
||||
If directly using `unstorage` in your project:
|
||||
|
||||
```json
|
||||
{
|
||||
"devDependencies": {
|
||||
"unstorage": "npm:unstorage-nightly"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
If using `unstorage` via another tool in your project:
|
||||
|
||||
```json
|
||||
{
|
||||
"resolutions": {
|
||||
"unstorage": "npm:unstorage-nightly"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Contribution
|
||||
|
||||
- Clone repository
|
||||
- Install dependencies with `pnpm install`
|
||||
- Use `pnpm dev` to start jest watcher verifying changes
|
||||
- Use `pnpm test` before pushing to ensure all tests and lint checks passing
|
||||
|
||||
## License
|
||||
|
||||
[MIT](./LICENSE)
|
||||
|
||||
<!-- Badges -->
|
||||
|
||||
[npm-version-src]: https://img.shields.io/npm/v/unstorage?style=flat&colorA=18181B&colorB=F0DB4F
|
||||
[npm-version-href]: https://npmjs.com/package/unstorage
|
||||
[npm-downloads-src]: https://img.shields.io/npm/dm/unstorage?style=flat&colorA=18181B&colorB=F0DB4F
|
||||
[npm-downloads-href]: https://npmjs.com/package/unstorage
|
||||
[github-actions-src]: https://img.shields.io/github/workflow/status/unjs/unstorage/ci/main?style=flat&colorA=18181B&colorB=F0DB4F
|
||||
[github-actions-href]: https://github.com/unjs/unstorage/actions?query=workflow%3Aci
|
||||
[codecov-src]: https://img.shields.io/codecov/c/gh/unjs/unstorage/main?style=flat&colorA=18181B&colorB=F0DB4F
|
||||
[codecov-href]: https://codecov.io/gh/unjs/unstorage
|
||||
[bundle-src]: https://img.shields.io/bundlephobia/minzip/unstorage?style=flat&colorA=18181B&colorB=F0DB4F
|
||||
[bundle-href]: https://bundlephobia.com/result?p=unstorage
|
||||
[license-src]: https://img.shields.io/github/license/unjs/unstorage.svg?style=flat&colorA=18181B&colorB=F0DB4F
|
||||
[license-href]: https://github.com/unjs/unstorage/blob/main/LICENSE
|
530
node_modules/unstorage/dist/index.cjs
generated
vendored
Normal file
530
node_modules/unstorage/dist/index.cjs
generated
vendored
Normal file
@@ -0,0 +1,530 @@
|
||||
'use strict';
|
||||
|
||||
const destr = require('destr');
|
||||
const utils = require('./shared/unstorage.DgtRghtF.cjs');
|
||||
|
||||
function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e.default : e; }
|
||||
|
||||
const destr__default = /*#__PURE__*/_interopDefaultCompat(destr);
|
||||
|
||||
function defineDriver(factory) {
|
||||
return factory;
|
||||
}
|
||||
|
||||
const DRIVER_NAME = "memory";
|
||||
const memory = defineDriver(() => {
|
||||
const data = /* @__PURE__ */ new Map();
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
getInstance: () => data,
|
||||
hasItem(key) {
|
||||
return data.has(key);
|
||||
},
|
||||
getItem(key) {
|
||||
return data.get(key) ?? null;
|
||||
},
|
||||
getItemRaw(key) {
|
||||
return data.get(key) ?? null;
|
||||
},
|
||||
setItem(key, value) {
|
||||
data.set(key, value);
|
||||
},
|
||||
setItemRaw(key, value) {
|
||||
data.set(key, value);
|
||||
},
|
||||
removeItem(key) {
|
||||
data.delete(key);
|
||||
},
|
||||
getKeys() {
|
||||
return [...data.keys()];
|
||||
},
|
||||
clear() {
|
||||
data.clear();
|
||||
},
|
||||
dispose() {
|
||||
data.clear();
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
function createStorage(options = {}) {
|
||||
const context = {
|
||||
mounts: { "": options.driver || memory() },
|
||||
mountpoints: [""],
|
||||
watching: false,
|
||||
watchListeners: [],
|
||||
unwatch: {}
|
||||
};
|
||||
const getMount = (key) => {
|
||||
for (const base of context.mountpoints) {
|
||||
if (key.startsWith(base)) {
|
||||
return {
|
||||
base,
|
||||
relativeKey: key.slice(base.length),
|
||||
driver: context.mounts[base]
|
||||
};
|
||||
}
|
||||
}
|
||||
return {
|
||||
base: "",
|
||||
relativeKey: key,
|
||||
driver: context.mounts[""]
|
||||
};
|
||||
};
|
||||
const getMounts = (base, includeParent) => {
|
||||
return context.mountpoints.filter(
|
||||
(mountpoint) => mountpoint.startsWith(base) || includeParent && base.startsWith(mountpoint)
|
||||
).map((mountpoint) => ({
|
||||
relativeBase: base.length > mountpoint.length ? base.slice(mountpoint.length) : void 0,
|
||||
mountpoint,
|
||||
driver: context.mounts[mountpoint]
|
||||
}));
|
||||
};
|
||||
const onChange = (event, key) => {
|
||||
if (!context.watching) {
|
||||
return;
|
||||
}
|
||||
key = utils.normalizeKey(key);
|
||||
for (const listener of context.watchListeners) {
|
||||
listener(event, key);
|
||||
}
|
||||
};
|
||||
const startWatch = async () => {
|
||||
if (context.watching) {
|
||||
return;
|
||||
}
|
||||
context.watching = true;
|
||||
for (const mountpoint in context.mounts) {
|
||||
context.unwatch[mountpoint] = await watch(
|
||||
context.mounts[mountpoint],
|
||||
onChange,
|
||||
mountpoint
|
||||
);
|
||||
}
|
||||
};
|
||||
const stopWatch = async () => {
|
||||
if (!context.watching) {
|
||||
return;
|
||||
}
|
||||
for (const mountpoint in context.unwatch) {
|
||||
await context.unwatch[mountpoint]();
|
||||
}
|
||||
context.unwatch = {};
|
||||
context.watching = false;
|
||||
};
|
||||
const runBatch = (items, commonOptions, cb) => {
|
||||
const batches = /* @__PURE__ */ new Map();
|
||||
const getBatch = (mount) => {
|
||||
let batch = batches.get(mount.base);
|
||||
if (!batch) {
|
||||
batch = {
|
||||
driver: mount.driver,
|
||||
base: mount.base,
|
||||
items: []
|
||||
};
|
||||
batches.set(mount.base, batch);
|
||||
}
|
||||
return batch;
|
||||
};
|
||||
for (const item of items) {
|
||||
const isStringItem = typeof item === "string";
|
||||
const key = utils.normalizeKey(isStringItem ? item : item.key);
|
||||
const value = isStringItem ? void 0 : item.value;
|
||||
const options2 = isStringItem || !item.options ? commonOptions : { ...commonOptions, ...item.options };
|
||||
const mount = getMount(key);
|
||||
getBatch(mount).items.push({
|
||||
key,
|
||||
value,
|
||||
relativeKey: mount.relativeKey,
|
||||
options: options2
|
||||
});
|
||||
}
|
||||
return Promise.all([...batches.values()].map((batch) => cb(batch))).then(
|
||||
(r) => r.flat()
|
||||
);
|
||||
};
|
||||
const storage = {
|
||||
// Item
|
||||
hasItem(key, opts = {}) {
|
||||
key = utils.normalizeKey(key);
|
||||
const { relativeKey, driver } = getMount(key);
|
||||
return utils.asyncCall(driver.hasItem, relativeKey, opts);
|
||||
},
|
||||
getItem(key, opts = {}) {
|
||||
key = utils.normalizeKey(key);
|
||||
const { relativeKey, driver } = getMount(key);
|
||||
return utils.asyncCall(driver.getItem, relativeKey, opts).then(
|
||||
(value) => destr__default(value)
|
||||
);
|
||||
},
|
||||
getItems(items, commonOptions = {}) {
|
||||
return runBatch(items, commonOptions, (batch) => {
|
||||
if (batch.driver.getItems) {
|
||||
return utils.asyncCall(
|
||||
batch.driver.getItems,
|
||||
batch.items.map((item) => ({
|
||||
key: item.relativeKey,
|
||||
options: item.options
|
||||
})),
|
||||
commonOptions
|
||||
).then(
|
||||
(r) => r.map((item) => ({
|
||||
key: utils.joinKeys(batch.base, item.key),
|
||||
value: destr__default(item.value)
|
||||
}))
|
||||
);
|
||||
}
|
||||
return Promise.all(
|
||||
batch.items.map((item) => {
|
||||
return utils.asyncCall(
|
||||
batch.driver.getItem,
|
||||
item.relativeKey,
|
||||
item.options
|
||||
).then((value) => ({
|
||||
key: item.key,
|
||||
value: destr__default(value)
|
||||
}));
|
||||
})
|
||||
);
|
||||
});
|
||||
},
|
||||
getItemRaw(key, opts = {}) {
|
||||
key = utils.normalizeKey(key);
|
||||
const { relativeKey, driver } = getMount(key);
|
||||
if (driver.getItemRaw) {
|
||||
return utils.asyncCall(driver.getItemRaw, relativeKey, opts);
|
||||
}
|
||||
return utils.asyncCall(driver.getItem, relativeKey, opts).then(
|
||||
(value) => utils.deserializeRaw(value)
|
||||
);
|
||||
},
|
||||
async setItem(key, value, opts = {}) {
|
||||
if (value === void 0) {
|
||||
return storage.removeItem(key);
|
||||
}
|
||||
key = utils.normalizeKey(key);
|
||||
const { relativeKey, driver } = getMount(key);
|
||||
if (!driver.setItem) {
|
||||
return;
|
||||
}
|
||||
await utils.asyncCall(driver.setItem, relativeKey, utils.stringify(value), opts);
|
||||
if (!driver.watch) {
|
||||
onChange("update", key);
|
||||
}
|
||||
},
|
||||
async setItems(items, commonOptions) {
|
||||
await runBatch(items, commonOptions, async (batch) => {
|
||||
if (batch.driver.setItems) {
|
||||
return utils.asyncCall(
|
||||
batch.driver.setItems,
|
||||
batch.items.map((item) => ({
|
||||
key: item.relativeKey,
|
||||
value: utils.stringify(item.value),
|
||||
options: item.options
|
||||
})),
|
||||
commonOptions
|
||||
);
|
||||
}
|
||||
if (!batch.driver.setItem) {
|
||||
return;
|
||||
}
|
||||
await Promise.all(
|
||||
batch.items.map((item) => {
|
||||
return utils.asyncCall(
|
||||
batch.driver.setItem,
|
||||
item.relativeKey,
|
||||
utils.stringify(item.value),
|
||||
item.options
|
||||
);
|
||||
})
|
||||
);
|
||||
});
|
||||
},
|
||||
async setItemRaw(key, value, opts = {}) {
|
||||
if (value === void 0) {
|
||||
return storage.removeItem(key, opts);
|
||||
}
|
||||
key = utils.normalizeKey(key);
|
||||
const { relativeKey, driver } = getMount(key);
|
||||
if (driver.setItemRaw) {
|
||||
await utils.asyncCall(driver.setItemRaw, relativeKey, value, opts);
|
||||
} else if (driver.setItem) {
|
||||
await utils.asyncCall(driver.setItem, relativeKey, utils.serializeRaw(value), opts);
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
if (!driver.watch) {
|
||||
onChange("update", key);
|
||||
}
|
||||
},
|
||||
async removeItem(key, opts = {}) {
|
||||
if (typeof opts === "boolean") {
|
||||
opts = { removeMeta: opts };
|
||||
}
|
||||
key = utils.normalizeKey(key);
|
||||
const { relativeKey, driver } = getMount(key);
|
||||
if (!driver.removeItem) {
|
||||
return;
|
||||
}
|
||||
await utils.asyncCall(driver.removeItem, relativeKey, opts);
|
||||
if (opts.removeMeta || opts.removeMata) {
|
||||
await utils.asyncCall(driver.removeItem, relativeKey + "$", opts);
|
||||
}
|
||||
if (!driver.watch) {
|
||||
onChange("remove", key);
|
||||
}
|
||||
},
|
||||
// Meta
|
||||
async getMeta(key, opts = {}) {
|
||||
if (typeof opts === "boolean") {
|
||||
opts = { nativeOnly: opts };
|
||||
}
|
||||
key = utils.normalizeKey(key);
|
||||
const { relativeKey, driver } = getMount(key);
|
||||
const meta = /* @__PURE__ */ Object.create(null);
|
||||
if (driver.getMeta) {
|
||||
Object.assign(meta, await utils.asyncCall(driver.getMeta, relativeKey, opts));
|
||||
}
|
||||
if (!opts.nativeOnly) {
|
||||
const value = await utils.asyncCall(
|
||||
driver.getItem,
|
||||
relativeKey + "$",
|
||||
opts
|
||||
).then((value_) => destr__default(value_));
|
||||
if (value && typeof value === "object") {
|
||||
if (typeof value.atime === "string") {
|
||||
value.atime = new Date(value.atime);
|
||||
}
|
||||
if (typeof value.mtime === "string") {
|
||||
value.mtime = new Date(value.mtime);
|
||||
}
|
||||
Object.assign(meta, value);
|
||||
}
|
||||
}
|
||||
return meta;
|
||||
},
|
||||
setMeta(key, value, opts = {}) {
|
||||
return this.setItem(key + "$", value, opts);
|
||||
},
|
||||
removeMeta(key, opts = {}) {
|
||||
return this.removeItem(key + "$", opts);
|
||||
},
|
||||
// Keys
|
||||
async getKeys(base, opts = {}) {
|
||||
base = utils.normalizeBaseKey(base);
|
||||
const mounts = getMounts(base, true);
|
||||
let maskedMounts = [];
|
||||
const allKeys = [];
|
||||
let allMountsSupportMaxDepth = true;
|
||||
for (const mount of mounts) {
|
||||
if (!mount.driver.flags?.maxDepth) {
|
||||
allMountsSupportMaxDepth = false;
|
||||
}
|
||||
const rawKeys = await utils.asyncCall(
|
||||
mount.driver.getKeys,
|
||||
mount.relativeBase,
|
||||
opts
|
||||
);
|
||||
for (const key of rawKeys) {
|
||||
const fullKey = mount.mountpoint + utils.normalizeKey(key);
|
||||
if (!maskedMounts.some((p) => fullKey.startsWith(p))) {
|
||||
allKeys.push(fullKey);
|
||||
}
|
||||
}
|
||||
maskedMounts = [
|
||||
mount.mountpoint,
|
||||
...maskedMounts.filter((p) => !p.startsWith(mount.mountpoint))
|
||||
];
|
||||
}
|
||||
const shouldFilterByDepth = opts.maxDepth !== void 0 && !allMountsSupportMaxDepth;
|
||||
return allKeys.filter(
|
||||
(key) => (!shouldFilterByDepth || utils.filterKeyByDepth(key, opts.maxDepth)) && utils.filterKeyByBase(key, base)
|
||||
);
|
||||
},
|
||||
// Utils
|
||||
async clear(base, opts = {}) {
|
||||
base = utils.normalizeBaseKey(base);
|
||||
await Promise.all(
|
||||
getMounts(base, false).map(async (m) => {
|
||||
if (m.driver.clear) {
|
||||
return utils.asyncCall(m.driver.clear, m.relativeBase, opts);
|
||||
}
|
||||
if (m.driver.removeItem) {
|
||||
const keys = await m.driver.getKeys(m.relativeBase || "", opts);
|
||||
return Promise.all(
|
||||
keys.map((key) => m.driver.removeItem(key, opts))
|
||||
);
|
||||
}
|
||||
})
|
||||
);
|
||||
},
|
||||
async dispose() {
|
||||
await Promise.all(
|
||||
Object.values(context.mounts).map((driver) => dispose(driver))
|
||||
);
|
||||
},
|
||||
async watch(callback) {
|
||||
await startWatch();
|
||||
context.watchListeners.push(callback);
|
||||
return async () => {
|
||||
context.watchListeners = context.watchListeners.filter(
|
||||
(listener) => listener !== callback
|
||||
);
|
||||
if (context.watchListeners.length === 0) {
|
||||
await stopWatch();
|
||||
}
|
||||
};
|
||||
},
|
||||
async unwatch() {
|
||||
context.watchListeners = [];
|
||||
await stopWatch();
|
||||
},
|
||||
// Mount
|
||||
mount(base, driver) {
|
||||
base = utils.normalizeBaseKey(base);
|
||||
if (base && context.mounts[base]) {
|
||||
throw new Error(`already mounted at ${base}`);
|
||||
}
|
||||
if (base) {
|
||||
context.mountpoints.push(base);
|
||||
context.mountpoints.sort((a, b) => b.length - a.length);
|
||||
}
|
||||
context.mounts[base] = driver;
|
||||
if (context.watching) {
|
||||
Promise.resolve(watch(driver, onChange, base)).then((unwatcher) => {
|
||||
context.unwatch[base] = unwatcher;
|
||||
}).catch(console.error);
|
||||
}
|
||||
return storage;
|
||||
},
|
||||
async unmount(base, _dispose = true) {
|
||||
base = utils.normalizeBaseKey(base);
|
||||
if (!base || !context.mounts[base]) {
|
||||
return;
|
||||
}
|
||||
if (context.watching && base in context.unwatch) {
|
||||
context.unwatch[base]?.();
|
||||
delete context.unwatch[base];
|
||||
}
|
||||
if (_dispose) {
|
||||
await dispose(context.mounts[base]);
|
||||
}
|
||||
context.mountpoints = context.mountpoints.filter((key) => key !== base);
|
||||
delete context.mounts[base];
|
||||
},
|
||||
getMount(key = "") {
|
||||
key = utils.normalizeKey(key) + ":";
|
||||
const m = getMount(key);
|
||||
return {
|
||||
driver: m.driver,
|
||||
base: m.base
|
||||
};
|
||||
},
|
||||
getMounts(base = "", opts = {}) {
|
||||
base = utils.normalizeKey(base);
|
||||
const mounts = getMounts(base, opts.parents);
|
||||
return mounts.map((m) => ({
|
||||
driver: m.driver,
|
||||
base: m.mountpoint
|
||||
}));
|
||||
},
|
||||
// Aliases
|
||||
keys: (base, opts = {}) => storage.getKeys(base, opts),
|
||||
get: (key, opts = {}) => storage.getItem(key, opts),
|
||||
set: (key, value, opts = {}) => storage.setItem(key, value, opts),
|
||||
has: (key, opts = {}) => storage.hasItem(key, opts),
|
||||
del: (key, opts = {}) => storage.removeItem(key, opts),
|
||||
remove: (key, opts = {}) => storage.removeItem(key, opts)
|
||||
};
|
||||
return storage;
|
||||
}
|
||||
async function snapshot(storage, base) {
|
||||
base = utils.normalizeBaseKey(base);
|
||||
const keys = await storage.getKeys(base);
|
||||
const snapshot2 = {};
|
||||
await Promise.all(
|
||||
keys.map(async (key) => {
|
||||
snapshot2[key.slice(base.length)] = await storage.getItem(key);
|
||||
})
|
||||
);
|
||||
return snapshot2;
|
||||
}
|
||||
async function restoreSnapshot(driver, snapshot2, base = "") {
|
||||
base = utils.normalizeBaseKey(base);
|
||||
await Promise.all(
|
||||
Object.entries(snapshot2).map((e) => driver.setItem(base + e[0], e[1]))
|
||||
);
|
||||
}
|
||||
function watch(driver, onChange, base) {
|
||||
return driver.watch ? driver.watch((event, key) => onChange(event, base + key)) : () => {
|
||||
};
|
||||
}
|
||||
async function dispose(driver) {
|
||||
if (typeof driver.dispose === "function") {
|
||||
await utils.asyncCall(driver.dispose);
|
||||
}
|
||||
}
|
||||
|
||||
const builtinDrivers = {
|
||||
"azure-app-configuration": "unstorage/drivers/azure-app-configuration",
|
||||
"azureAppConfiguration": "unstorage/drivers/azure-app-configuration",
|
||||
"azure-cosmos": "unstorage/drivers/azure-cosmos",
|
||||
"azureCosmos": "unstorage/drivers/azure-cosmos",
|
||||
"azure-key-vault": "unstorage/drivers/azure-key-vault",
|
||||
"azureKeyVault": "unstorage/drivers/azure-key-vault",
|
||||
"azure-storage-blob": "unstorage/drivers/azure-storage-blob",
|
||||
"azureStorageBlob": "unstorage/drivers/azure-storage-blob",
|
||||
"azure-storage-table": "unstorage/drivers/azure-storage-table",
|
||||
"azureStorageTable": "unstorage/drivers/azure-storage-table",
|
||||
"capacitor-preferences": "unstorage/drivers/capacitor-preferences",
|
||||
"capacitorPreferences": "unstorage/drivers/capacitor-preferences",
|
||||
"cloudflare-kv-binding": "unstorage/drivers/cloudflare-kv-binding",
|
||||
"cloudflareKVBinding": "unstorage/drivers/cloudflare-kv-binding",
|
||||
"cloudflare-kv-http": "unstorage/drivers/cloudflare-kv-http",
|
||||
"cloudflareKVHttp": "unstorage/drivers/cloudflare-kv-http",
|
||||
"cloudflare-r2-binding": "unstorage/drivers/cloudflare-r2-binding",
|
||||
"cloudflareR2Binding": "unstorage/drivers/cloudflare-r2-binding",
|
||||
"db0": "unstorage/drivers/db0",
|
||||
"deno-kv-node": "unstorage/drivers/deno-kv-node",
|
||||
"denoKVNode": "unstorage/drivers/deno-kv-node",
|
||||
"deno-kv": "unstorage/drivers/deno-kv",
|
||||
"denoKV": "unstorage/drivers/deno-kv",
|
||||
"fs-lite": "unstorage/drivers/fs-lite",
|
||||
"fsLite": "unstorage/drivers/fs-lite",
|
||||
"fs": "unstorage/drivers/fs",
|
||||
"github": "unstorage/drivers/github",
|
||||
"http": "unstorage/drivers/http",
|
||||
"indexedb": "unstorage/drivers/indexedb",
|
||||
"localstorage": "unstorage/drivers/localstorage",
|
||||
"lru-cache": "unstorage/drivers/lru-cache",
|
||||
"lruCache": "unstorage/drivers/lru-cache",
|
||||
"memory": "unstorage/drivers/memory",
|
||||
"mongodb": "unstorage/drivers/mongodb",
|
||||
"netlify-blobs": "unstorage/drivers/netlify-blobs",
|
||||
"netlifyBlobs": "unstorage/drivers/netlify-blobs",
|
||||
"null": "unstorage/drivers/null",
|
||||
"overlay": "unstorage/drivers/overlay",
|
||||
"planetscale": "unstorage/drivers/planetscale",
|
||||
"redis": "unstorage/drivers/redis",
|
||||
"s3": "unstorage/drivers/s3",
|
||||
"session-storage": "unstorage/drivers/session-storage",
|
||||
"sessionStorage": "unstorage/drivers/session-storage",
|
||||
"uploadthing": "unstorage/drivers/uploadthing",
|
||||
"upstash": "unstorage/drivers/upstash",
|
||||
"vercel-blob": "unstorage/drivers/vercel-blob",
|
||||
"vercelBlob": "unstorage/drivers/vercel-blob",
|
||||
"vercel-kv": "unstorage/drivers/vercel-kv",
|
||||
"vercelKV": "unstorage/drivers/vercel-kv"
|
||||
};
|
||||
|
||||
exports.filterKeyByBase = utils.filterKeyByBase;
|
||||
exports.filterKeyByDepth = utils.filterKeyByDepth;
|
||||
exports.joinKeys = utils.joinKeys;
|
||||
exports.normalizeBaseKey = utils.normalizeBaseKey;
|
||||
exports.normalizeKey = utils.normalizeKey;
|
||||
exports.prefixStorage = utils.prefixStorage;
|
||||
exports.builtinDrivers = builtinDrivers;
|
||||
exports.createStorage = createStorage;
|
||||
exports.defineDriver = defineDriver;
|
||||
exports.restoreSnapshot = restoreSnapshot;
|
||||
exports.snapshot = snapshot;
|
155
node_modules/unstorage/dist/index.d.cts
generated
vendored
Normal file
155
node_modules/unstorage/dist/index.d.cts
generated
vendored
Normal file
@@ -0,0 +1,155 @@
|
||||
import { D as Driver, S as StorageValue, a as Storage } from './shared/unstorage.Ca7R4QL2.cjs';
|
||||
export { d as DriverFlags, G as GetKeysOptions, c as StorageMeta, T as TransactionOptions, U as Unwatch, b as WatchCallback, W as WatchEvent } from './shared/unstorage.Ca7R4QL2.cjs';
|
||||
import { AzureAppConfigurationOptions } from 'unstorage/drivers/azure-app-configuration';
|
||||
import { AzureCosmosOptions } from 'unstorage/drivers/azure-cosmos';
|
||||
import { AzureKeyVaultOptions } from 'unstorage/drivers/azure-key-vault';
|
||||
import { AzureStorageBlobOptions } from 'unstorage/drivers/azure-storage-blob';
|
||||
import { AzureStorageTableOptions } from 'unstorage/drivers/azure-storage-table';
|
||||
import { CapacitorPreferencesOptions } from 'unstorage/drivers/capacitor-preferences';
|
||||
import { KVOptions } from 'unstorage/drivers/cloudflare-kv-binding';
|
||||
import { KVHTTPOptions } from 'unstorage/drivers/cloudflare-kv-http';
|
||||
import { CloudflareR2Options } from 'unstorage/drivers/cloudflare-r2-binding';
|
||||
import { DB0DriverOptions } from 'unstorage/drivers/db0';
|
||||
import { DenoKvNodeOptions } from 'unstorage/drivers/deno-kv-node';
|
||||
import { DenoKvOptions } from 'unstorage/drivers/deno-kv';
|
||||
import { FSStorageOptions } from 'unstorage/drivers/fs-lite';
|
||||
import { FSStorageOptions as FSStorageOptions$1 } from 'unstorage/drivers/fs';
|
||||
import { GithubOptions } from 'unstorage/drivers/github';
|
||||
import { HTTPOptions } from 'unstorage/drivers/http';
|
||||
import { IDBKeyvalOptions } from 'unstorage/drivers/indexedb';
|
||||
import { LocalStorageOptions } from 'unstorage/drivers/localstorage';
|
||||
import { LRUDriverOptions } from 'unstorage/drivers/lru-cache';
|
||||
import { MongoDbOptions } from 'unstorage/drivers/mongodb';
|
||||
import { NetlifyStoreOptions } from 'unstorage/drivers/netlify-blobs';
|
||||
import { OverlayStorageOptions } from 'unstorage/drivers/overlay';
|
||||
import { PlanetscaleDriverOptions } from 'unstorage/drivers/planetscale';
|
||||
import { RedisOptions } from 'unstorage/drivers/redis';
|
||||
import { S3DriverOptions } from 'unstorage/drivers/s3';
|
||||
import { SessionStorageOptions } from 'unstorage/drivers/session-storage';
|
||||
import { UploadThingOptions } from 'unstorage/drivers/uploadthing';
|
||||
import { UpstashOptions } from 'unstorage/drivers/upstash';
|
||||
import { VercelBlobOptions } from 'unstorage/drivers/vercel-blob';
|
||||
import { VercelKVOptions } from 'unstorage/drivers/vercel-kv';
|
||||
|
||||
interface CreateStorageOptions {
|
||||
driver?: Driver;
|
||||
}
|
||||
declare function createStorage<T extends StorageValue>(options?: CreateStorageOptions): Storage<T>;
|
||||
type Snapshot<T = string> = Record<string, T>;
|
||||
declare function snapshot(storage: Storage, base: string): Promise<Snapshot<string>>;
|
||||
declare function restoreSnapshot(driver: Storage, snapshot: Snapshot<StorageValue>, base?: string): Promise<void>;
|
||||
|
||||
declare function prefixStorage<T extends StorageValue>(storage: Storage<T> | Storage<any>, base: string): Storage<T>;
|
||||
declare function normalizeKey(key?: string): string;
|
||||
declare function joinKeys(...keys: string[]): string;
|
||||
declare function normalizeBaseKey(base?: string): string;
|
||||
declare function filterKeyByDepth(key: string, depth: number | undefined): boolean;
|
||||
declare function filterKeyByBase(key: string, base: string | undefined): boolean;
|
||||
|
||||
type DriverFactory<OptionsT, InstanceT> = (opts: OptionsT) => Driver<OptionsT, InstanceT>;
|
||||
declare function defineDriver<OptionsT = any, InstanceT = never>(factory: DriverFactory<OptionsT, InstanceT>): DriverFactory<OptionsT, InstanceT>;
|
||||
|
||||
type BuiltinDriverName = "azure-app-configuration" | "azureAppConfiguration" | "azure-cosmos" | "azureCosmos" | "azure-key-vault" | "azureKeyVault" | "azure-storage-blob" | "azureStorageBlob" | "azure-storage-table" | "azureStorageTable" | "capacitor-preferences" | "capacitorPreferences" | "cloudflare-kv-binding" | "cloudflareKVBinding" | "cloudflare-kv-http" | "cloudflareKVHttp" | "cloudflare-r2-binding" | "cloudflareR2Binding" | "db0" | "deno-kv-node" | "denoKVNode" | "deno-kv" | "denoKV" | "fs-lite" | "fsLite" | "fs" | "github" | "http" | "indexedb" | "localstorage" | "lru-cache" | "lruCache" | "memory" | "mongodb" | "netlify-blobs" | "netlifyBlobs" | "null" | "overlay" | "planetscale" | "redis" | "s3" | "session-storage" | "sessionStorage" | "uploadthing" | "upstash" | "vercel-blob" | "vercelBlob" | "vercel-kv" | "vercelKV";
|
||||
type BuiltinDriverOptions = {
|
||||
"azure-app-configuration": AzureAppConfigurationOptions;
|
||||
"azureAppConfiguration": AzureAppConfigurationOptions;
|
||||
"azure-cosmos": AzureCosmosOptions;
|
||||
"azureCosmos": AzureCosmosOptions;
|
||||
"azure-key-vault": AzureKeyVaultOptions;
|
||||
"azureKeyVault": AzureKeyVaultOptions;
|
||||
"azure-storage-blob": AzureStorageBlobOptions;
|
||||
"azureStorageBlob": AzureStorageBlobOptions;
|
||||
"azure-storage-table": AzureStorageTableOptions;
|
||||
"azureStorageTable": AzureStorageTableOptions;
|
||||
"capacitor-preferences": CapacitorPreferencesOptions;
|
||||
"capacitorPreferences": CapacitorPreferencesOptions;
|
||||
"cloudflare-kv-binding": KVOptions;
|
||||
"cloudflareKVBinding": KVOptions;
|
||||
"cloudflare-kv-http": KVHTTPOptions;
|
||||
"cloudflareKVHttp": KVHTTPOptions;
|
||||
"cloudflare-r2-binding": CloudflareR2Options;
|
||||
"cloudflareR2Binding": CloudflareR2Options;
|
||||
"db0": DB0DriverOptions;
|
||||
"deno-kv-node": DenoKvNodeOptions;
|
||||
"denoKVNode": DenoKvNodeOptions;
|
||||
"deno-kv": DenoKvOptions;
|
||||
"denoKV": DenoKvOptions;
|
||||
"fs-lite": FSStorageOptions;
|
||||
"fsLite": FSStorageOptions;
|
||||
"fs": FSStorageOptions$1;
|
||||
"github": GithubOptions;
|
||||
"http": HTTPOptions;
|
||||
"indexedb": IDBKeyvalOptions;
|
||||
"localstorage": LocalStorageOptions;
|
||||
"lru-cache": LRUDriverOptions;
|
||||
"lruCache": LRUDriverOptions;
|
||||
"mongodb": MongoDbOptions;
|
||||
"netlify-blobs": NetlifyStoreOptions;
|
||||
"netlifyBlobs": NetlifyStoreOptions;
|
||||
"overlay": OverlayStorageOptions;
|
||||
"planetscale": PlanetscaleDriverOptions;
|
||||
"redis": RedisOptions;
|
||||
"s3": S3DriverOptions;
|
||||
"session-storage": SessionStorageOptions;
|
||||
"sessionStorage": SessionStorageOptions;
|
||||
"uploadthing": UploadThingOptions;
|
||||
"upstash": UpstashOptions;
|
||||
"vercel-blob": VercelBlobOptions;
|
||||
"vercelBlob": VercelBlobOptions;
|
||||
"vercel-kv": VercelKVOptions;
|
||||
"vercelKV": VercelKVOptions;
|
||||
};
|
||||
declare const builtinDrivers: {
|
||||
readonly "azure-app-configuration": "unstorage/drivers/azure-app-configuration";
|
||||
readonly azureAppConfiguration: "unstorage/drivers/azure-app-configuration";
|
||||
readonly "azure-cosmos": "unstorage/drivers/azure-cosmos";
|
||||
readonly azureCosmos: "unstorage/drivers/azure-cosmos";
|
||||
readonly "azure-key-vault": "unstorage/drivers/azure-key-vault";
|
||||
readonly azureKeyVault: "unstorage/drivers/azure-key-vault";
|
||||
readonly "azure-storage-blob": "unstorage/drivers/azure-storage-blob";
|
||||
readonly azureStorageBlob: "unstorage/drivers/azure-storage-blob";
|
||||
readonly "azure-storage-table": "unstorage/drivers/azure-storage-table";
|
||||
readonly azureStorageTable: "unstorage/drivers/azure-storage-table";
|
||||
readonly "capacitor-preferences": "unstorage/drivers/capacitor-preferences";
|
||||
readonly capacitorPreferences: "unstorage/drivers/capacitor-preferences";
|
||||
readonly "cloudflare-kv-binding": "unstorage/drivers/cloudflare-kv-binding";
|
||||
readonly cloudflareKVBinding: "unstorage/drivers/cloudflare-kv-binding";
|
||||
readonly "cloudflare-kv-http": "unstorage/drivers/cloudflare-kv-http";
|
||||
readonly cloudflareKVHttp: "unstorage/drivers/cloudflare-kv-http";
|
||||
readonly "cloudflare-r2-binding": "unstorage/drivers/cloudflare-r2-binding";
|
||||
readonly cloudflareR2Binding: "unstorage/drivers/cloudflare-r2-binding";
|
||||
readonly db0: "unstorage/drivers/db0";
|
||||
readonly "deno-kv-node": "unstorage/drivers/deno-kv-node";
|
||||
readonly denoKVNode: "unstorage/drivers/deno-kv-node";
|
||||
readonly "deno-kv": "unstorage/drivers/deno-kv";
|
||||
readonly denoKV: "unstorage/drivers/deno-kv";
|
||||
readonly "fs-lite": "unstorage/drivers/fs-lite";
|
||||
readonly fsLite: "unstorage/drivers/fs-lite";
|
||||
readonly fs: "unstorage/drivers/fs";
|
||||
readonly github: "unstorage/drivers/github";
|
||||
readonly http: "unstorage/drivers/http";
|
||||
readonly indexedb: "unstorage/drivers/indexedb";
|
||||
readonly localstorage: "unstorage/drivers/localstorage";
|
||||
readonly "lru-cache": "unstorage/drivers/lru-cache";
|
||||
readonly lruCache: "unstorage/drivers/lru-cache";
|
||||
readonly memory: "unstorage/drivers/memory";
|
||||
readonly mongodb: "unstorage/drivers/mongodb";
|
||||
readonly "netlify-blobs": "unstorage/drivers/netlify-blobs";
|
||||
readonly netlifyBlobs: "unstorage/drivers/netlify-blobs";
|
||||
readonly null: "unstorage/drivers/null";
|
||||
readonly overlay: "unstorage/drivers/overlay";
|
||||
readonly planetscale: "unstorage/drivers/planetscale";
|
||||
readonly redis: "unstorage/drivers/redis";
|
||||
readonly s3: "unstorage/drivers/s3";
|
||||
readonly "session-storage": "unstorage/drivers/session-storage";
|
||||
readonly sessionStorage: "unstorage/drivers/session-storage";
|
||||
readonly uploadthing: "unstorage/drivers/uploadthing";
|
||||
readonly upstash: "unstorage/drivers/upstash";
|
||||
readonly "vercel-blob": "unstorage/drivers/vercel-blob";
|
||||
readonly vercelBlob: "unstorage/drivers/vercel-blob";
|
||||
readonly "vercel-kv": "unstorage/drivers/vercel-kv";
|
||||
readonly vercelKV: "unstorage/drivers/vercel-kv";
|
||||
};
|
||||
|
||||
export { Driver, Storage, StorageValue, builtinDrivers, createStorage, defineDriver, filterKeyByBase, filterKeyByDepth, joinKeys, normalizeBaseKey, normalizeKey, prefixStorage, restoreSnapshot, snapshot };
|
||||
export type { BuiltinDriverName, BuiltinDriverOptions, CreateStorageOptions, Snapshot };
|
155
node_modules/unstorage/dist/index.d.mts
generated
vendored
Normal file
155
node_modules/unstorage/dist/index.d.mts
generated
vendored
Normal file
@@ -0,0 +1,155 @@
|
||||
import { D as Driver, S as StorageValue, a as Storage } from './shared/unstorage.Ca7R4QL2.mjs';
|
||||
export { d as DriverFlags, G as GetKeysOptions, c as StorageMeta, T as TransactionOptions, U as Unwatch, b as WatchCallback, W as WatchEvent } from './shared/unstorage.Ca7R4QL2.mjs';
|
||||
import { AzureAppConfigurationOptions } from 'unstorage/drivers/azure-app-configuration';
|
||||
import { AzureCosmosOptions } from 'unstorage/drivers/azure-cosmos';
|
||||
import { AzureKeyVaultOptions } from 'unstorage/drivers/azure-key-vault';
|
||||
import { AzureStorageBlobOptions } from 'unstorage/drivers/azure-storage-blob';
|
||||
import { AzureStorageTableOptions } from 'unstorage/drivers/azure-storage-table';
|
||||
import { CapacitorPreferencesOptions } from 'unstorage/drivers/capacitor-preferences';
|
||||
import { KVOptions } from 'unstorage/drivers/cloudflare-kv-binding';
|
||||
import { KVHTTPOptions } from 'unstorage/drivers/cloudflare-kv-http';
|
||||
import { CloudflareR2Options } from 'unstorage/drivers/cloudflare-r2-binding';
|
||||
import { DB0DriverOptions } from 'unstorage/drivers/db0';
|
||||
import { DenoKvNodeOptions } from 'unstorage/drivers/deno-kv-node';
|
||||
import { DenoKvOptions } from 'unstorage/drivers/deno-kv';
|
||||
import { FSStorageOptions } from 'unstorage/drivers/fs-lite';
|
||||
import { FSStorageOptions as FSStorageOptions$1 } from 'unstorage/drivers/fs';
|
||||
import { GithubOptions } from 'unstorage/drivers/github';
|
||||
import { HTTPOptions } from 'unstorage/drivers/http';
|
||||
import { IDBKeyvalOptions } from 'unstorage/drivers/indexedb';
|
||||
import { LocalStorageOptions } from 'unstorage/drivers/localstorage';
|
||||
import { LRUDriverOptions } from 'unstorage/drivers/lru-cache';
|
||||
import { MongoDbOptions } from 'unstorage/drivers/mongodb';
|
||||
import { NetlifyStoreOptions } from 'unstorage/drivers/netlify-blobs';
|
||||
import { OverlayStorageOptions } from 'unstorage/drivers/overlay';
|
||||
import { PlanetscaleDriverOptions } from 'unstorage/drivers/planetscale';
|
||||
import { RedisOptions } from 'unstorage/drivers/redis';
|
||||
import { S3DriverOptions } from 'unstorage/drivers/s3';
|
||||
import { SessionStorageOptions } from 'unstorage/drivers/session-storage';
|
||||
import { UploadThingOptions } from 'unstorage/drivers/uploadthing';
|
||||
import { UpstashOptions } from 'unstorage/drivers/upstash';
|
||||
import { VercelBlobOptions } from 'unstorage/drivers/vercel-blob';
|
||||
import { VercelKVOptions } from 'unstorage/drivers/vercel-kv';
|
||||
|
||||
interface CreateStorageOptions {
|
||||
driver?: Driver;
|
||||
}
|
||||
declare function createStorage<T extends StorageValue>(options?: CreateStorageOptions): Storage<T>;
|
||||
type Snapshot<T = string> = Record<string, T>;
|
||||
declare function snapshot(storage: Storage, base: string): Promise<Snapshot<string>>;
|
||||
declare function restoreSnapshot(driver: Storage, snapshot: Snapshot<StorageValue>, base?: string): Promise<void>;
|
||||
|
||||
declare function prefixStorage<T extends StorageValue>(storage: Storage<T> | Storage<any>, base: string): Storage<T>;
|
||||
declare function normalizeKey(key?: string): string;
|
||||
declare function joinKeys(...keys: string[]): string;
|
||||
declare function normalizeBaseKey(base?: string): string;
|
||||
declare function filterKeyByDepth(key: string, depth: number | undefined): boolean;
|
||||
declare function filterKeyByBase(key: string, base: string | undefined): boolean;
|
||||
|
||||
type DriverFactory<OptionsT, InstanceT> = (opts: OptionsT) => Driver<OptionsT, InstanceT>;
|
||||
declare function defineDriver<OptionsT = any, InstanceT = never>(factory: DriverFactory<OptionsT, InstanceT>): DriverFactory<OptionsT, InstanceT>;
|
||||
|
||||
type BuiltinDriverName = "azure-app-configuration" | "azureAppConfiguration" | "azure-cosmos" | "azureCosmos" | "azure-key-vault" | "azureKeyVault" | "azure-storage-blob" | "azureStorageBlob" | "azure-storage-table" | "azureStorageTable" | "capacitor-preferences" | "capacitorPreferences" | "cloudflare-kv-binding" | "cloudflareKVBinding" | "cloudflare-kv-http" | "cloudflareKVHttp" | "cloudflare-r2-binding" | "cloudflareR2Binding" | "db0" | "deno-kv-node" | "denoKVNode" | "deno-kv" | "denoKV" | "fs-lite" | "fsLite" | "fs" | "github" | "http" | "indexedb" | "localstorage" | "lru-cache" | "lruCache" | "memory" | "mongodb" | "netlify-blobs" | "netlifyBlobs" | "null" | "overlay" | "planetscale" | "redis" | "s3" | "session-storage" | "sessionStorage" | "uploadthing" | "upstash" | "vercel-blob" | "vercelBlob" | "vercel-kv" | "vercelKV";
|
||||
type BuiltinDriverOptions = {
|
||||
"azure-app-configuration": AzureAppConfigurationOptions;
|
||||
"azureAppConfiguration": AzureAppConfigurationOptions;
|
||||
"azure-cosmos": AzureCosmosOptions;
|
||||
"azureCosmos": AzureCosmosOptions;
|
||||
"azure-key-vault": AzureKeyVaultOptions;
|
||||
"azureKeyVault": AzureKeyVaultOptions;
|
||||
"azure-storage-blob": AzureStorageBlobOptions;
|
||||
"azureStorageBlob": AzureStorageBlobOptions;
|
||||
"azure-storage-table": AzureStorageTableOptions;
|
||||
"azureStorageTable": AzureStorageTableOptions;
|
||||
"capacitor-preferences": CapacitorPreferencesOptions;
|
||||
"capacitorPreferences": CapacitorPreferencesOptions;
|
||||
"cloudflare-kv-binding": KVOptions;
|
||||
"cloudflareKVBinding": KVOptions;
|
||||
"cloudflare-kv-http": KVHTTPOptions;
|
||||
"cloudflareKVHttp": KVHTTPOptions;
|
||||
"cloudflare-r2-binding": CloudflareR2Options;
|
||||
"cloudflareR2Binding": CloudflareR2Options;
|
||||
"db0": DB0DriverOptions;
|
||||
"deno-kv-node": DenoKvNodeOptions;
|
||||
"denoKVNode": DenoKvNodeOptions;
|
||||
"deno-kv": DenoKvOptions;
|
||||
"denoKV": DenoKvOptions;
|
||||
"fs-lite": FSStorageOptions;
|
||||
"fsLite": FSStorageOptions;
|
||||
"fs": FSStorageOptions$1;
|
||||
"github": GithubOptions;
|
||||
"http": HTTPOptions;
|
||||
"indexedb": IDBKeyvalOptions;
|
||||
"localstorage": LocalStorageOptions;
|
||||
"lru-cache": LRUDriverOptions;
|
||||
"lruCache": LRUDriverOptions;
|
||||
"mongodb": MongoDbOptions;
|
||||
"netlify-blobs": NetlifyStoreOptions;
|
||||
"netlifyBlobs": NetlifyStoreOptions;
|
||||
"overlay": OverlayStorageOptions;
|
||||
"planetscale": PlanetscaleDriverOptions;
|
||||
"redis": RedisOptions;
|
||||
"s3": S3DriverOptions;
|
||||
"session-storage": SessionStorageOptions;
|
||||
"sessionStorage": SessionStorageOptions;
|
||||
"uploadthing": UploadThingOptions;
|
||||
"upstash": UpstashOptions;
|
||||
"vercel-blob": VercelBlobOptions;
|
||||
"vercelBlob": VercelBlobOptions;
|
||||
"vercel-kv": VercelKVOptions;
|
||||
"vercelKV": VercelKVOptions;
|
||||
};
|
||||
declare const builtinDrivers: {
|
||||
readonly "azure-app-configuration": "unstorage/drivers/azure-app-configuration";
|
||||
readonly azureAppConfiguration: "unstorage/drivers/azure-app-configuration";
|
||||
readonly "azure-cosmos": "unstorage/drivers/azure-cosmos";
|
||||
readonly azureCosmos: "unstorage/drivers/azure-cosmos";
|
||||
readonly "azure-key-vault": "unstorage/drivers/azure-key-vault";
|
||||
readonly azureKeyVault: "unstorage/drivers/azure-key-vault";
|
||||
readonly "azure-storage-blob": "unstorage/drivers/azure-storage-blob";
|
||||
readonly azureStorageBlob: "unstorage/drivers/azure-storage-blob";
|
||||
readonly "azure-storage-table": "unstorage/drivers/azure-storage-table";
|
||||
readonly azureStorageTable: "unstorage/drivers/azure-storage-table";
|
||||
readonly "capacitor-preferences": "unstorage/drivers/capacitor-preferences";
|
||||
readonly capacitorPreferences: "unstorage/drivers/capacitor-preferences";
|
||||
readonly "cloudflare-kv-binding": "unstorage/drivers/cloudflare-kv-binding";
|
||||
readonly cloudflareKVBinding: "unstorage/drivers/cloudflare-kv-binding";
|
||||
readonly "cloudflare-kv-http": "unstorage/drivers/cloudflare-kv-http";
|
||||
readonly cloudflareKVHttp: "unstorage/drivers/cloudflare-kv-http";
|
||||
readonly "cloudflare-r2-binding": "unstorage/drivers/cloudflare-r2-binding";
|
||||
readonly cloudflareR2Binding: "unstorage/drivers/cloudflare-r2-binding";
|
||||
readonly db0: "unstorage/drivers/db0";
|
||||
readonly "deno-kv-node": "unstorage/drivers/deno-kv-node";
|
||||
readonly denoKVNode: "unstorage/drivers/deno-kv-node";
|
||||
readonly "deno-kv": "unstorage/drivers/deno-kv";
|
||||
readonly denoKV: "unstorage/drivers/deno-kv";
|
||||
readonly "fs-lite": "unstorage/drivers/fs-lite";
|
||||
readonly fsLite: "unstorage/drivers/fs-lite";
|
||||
readonly fs: "unstorage/drivers/fs";
|
||||
readonly github: "unstorage/drivers/github";
|
||||
readonly http: "unstorage/drivers/http";
|
||||
readonly indexedb: "unstorage/drivers/indexedb";
|
||||
readonly localstorage: "unstorage/drivers/localstorage";
|
||||
readonly "lru-cache": "unstorage/drivers/lru-cache";
|
||||
readonly lruCache: "unstorage/drivers/lru-cache";
|
||||
readonly memory: "unstorage/drivers/memory";
|
||||
readonly mongodb: "unstorage/drivers/mongodb";
|
||||
readonly "netlify-blobs": "unstorage/drivers/netlify-blobs";
|
||||
readonly netlifyBlobs: "unstorage/drivers/netlify-blobs";
|
||||
readonly null: "unstorage/drivers/null";
|
||||
readonly overlay: "unstorage/drivers/overlay";
|
||||
readonly planetscale: "unstorage/drivers/planetscale";
|
||||
readonly redis: "unstorage/drivers/redis";
|
||||
readonly s3: "unstorage/drivers/s3";
|
||||
readonly "session-storage": "unstorage/drivers/session-storage";
|
||||
readonly sessionStorage: "unstorage/drivers/session-storage";
|
||||
readonly uploadthing: "unstorage/drivers/uploadthing";
|
||||
readonly upstash: "unstorage/drivers/upstash";
|
||||
readonly "vercel-blob": "unstorage/drivers/vercel-blob";
|
||||
readonly vercelBlob: "unstorage/drivers/vercel-blob";
|
||||
readonly "vercel-kv": "unstorage/drivers/vercel-kv";
|
||||
readonly vercelKV: "unstorage/drivers/vercel-kv";
|
||||
};
|
||||
|
||||
export { Driver, Storage, StorageValue, builtinDrivers, createStorage, defineDriver, filterKeyByBase, filterKeyByDepth, joinKeys, normalizeBaseKey, normalizeKey, prefixStorage, restoreSnapshot, snapshot };
|
||||
export type { BuiltinDriverName, BuiltinDriverOptions, CreateStorageOptions, Snapshot };
|
155
node_modules/unstorage/dist/index.d.ts
generated
vendored
Normal file
155
node_modules/unstorage/dist/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,155 @@
|
||||
import { D as Driver, S as StorageValue, a as Storage } from './shared/unstorage.Ca7R4QL2.js';
|
||||
export { d as DriverFlags, G as GetKeysOptions, c as StorageMeta, T as TransactionOptions, U as Unwatch, b as WatchCallback, W as WatchEvent } from './shared/unstorage.Ca7R4QL2.js';
|
||||
import { AzureAppConfigurationOptions } from 'unstorage/drivers/azure-app-configuration';
|
||||
import { AzureCosmosOptions } from 'unstorage/drivers/azure-cosmos';
|
||||
import { AzureKeyVaultOptions } from 'unstorage/drivers/azure-key-vault';
|
||||
import { AzureStorageBlobOptions } from 'unstorage/drivers/azure-storage-blob';
|
||||
import { AzureStorageTableOptions } from 'unstorage/drivers/azure-storage-table';
|
||||
import { CapacitorPreferencesOptions } from 'unstorage/drivers/capacitor-preferences';
|
||||
import { KVOptions } from 'unstorage/drivers/cloudflare-kv-binding';
|
||||
import { KVHTTPOptions } from 'unstorage/drivers/cloudflare-kv-http';
|
||||
import { CloudflareR2Options } from 'unstorage/drivers/cloudflare-r2-binding';
|
||||
import { DB0DriverOptions } from 'unstorage/drivers/db0';
|
||||
import { DenoKvNodeOptions } from 'unstorage/drivers/deno-kv-node';
|
||||
import { DenoKvOptions } from 'unstorage/drivers/deno-kv';
|
||||
import { FSStorageOptions } from 'unstorage/drivers/fs-lite';
|
||||
import { FSStorageOptions as FSStorageOptions$1 } from 'unstorage/drivers/fs';
|
||||
import { GithubOptions } from 'unstorage/drivers/github';
|
||||
import { HTTPOptions } from 'unstorage/drivers/http';
|
||||
import { IDBKeyvalOptions } from 'unstorage/drivers/indexedb';
|
||||
import { LocalStorageOptions } from 'unstorage/drivers/localstorage';
|
||||
import { LRUDriverOptions } from 'unstorage/drivers/lru-cache';
|
||||
import { MongoDbOptions } from 'unstorage/drivers/mongodb';
|
||||
import { NetlifyStoreOptions } from 'unstorage/drivers/netlify-blobs';
|
||||
import { OverlayStorageOptions } from 'unstorage/drivers/overlay';
|
||||
import { PlanetscaleDriverOptions } from 'unstorage/drivers/planetscale';
|
||||
import { RedisOptions } from 'unstorage/drivers/redis';
|
||||
import { S3DriverOptions } from 'unstorage/drivers/s3';
|
||||
import { SessionStorageOptions } from 'unstorage/drivers/session-storage';
|
||||
import { UploadThingOptions } from 'unstorage/drivers/uploadthing';
|
||||
import { UpstashOptions } from 'unstorage/drivers/upstash';
|
||||
import { VercelBlobOptions } from 'unstorage/drivers/vercel-blob';
|
||||
import { VercelKVOptions } from 'unstorage/drivers/vercel-kv';
|
||||
|
||||
interface CreateStorageOptions {
|
||||
driver?: Driver;
|
||||
}
|
||||
declare function createStorage<T extends StorageValue>(options?: CreateStorageOptions): Storage<T>;
|
||||
type Snapshot<T = string> = Record<string, T>;
|
||||
declare function snapshot(storage: Storage, base: string): Promise<Snapshot<string>>;
|
||||
declare function restoreSnapshot(driver: Storage, snapshot: Snapshot<StorageValue>, base?: string): Promise<void>;
|
||||
|
||||
declare function prefixStorage<T extends StorageValue>(storage: Storage<T> | Storage<any>, base: string): Storage<T>;
|
||||
declare function normalizeKey(key?: string): string;
|
||||
declare function joinKeys(...keys: string[]): string;
|
||||
declare function normalizeBaseKey(base?: string): string;
|
||||
declare function filterKeyByDepth(key: string, depth: number | undefined): boolean;
|
||||
declare function filterKeyByBase(key: string, base: string | undefined): boolean;
|
||||
|
||||
type DriverFactory<OptionsT, InstanceT> = (opts: OptionsT) => Driver<OptionsT, InstanceT>;
|
||||
declare function defineDriver<OptionsT = any, InstanceT = never>(factory: DriverFactory<OptionsT, InstanceT>): DriverFactory<OptionsT, InstanceT>;
|
||||
|
||||
type BuiltinDriverName = "azure-app-configuration" | "azureAppConfiguration" | "azure-cosmos" | "azureCosmos" | "azure-key-vault" | "azureKeyVault" | "azure-storage-blob" | "azureStorageBlob" | "azure-storage-table" | "azureStorageTable" | "capacitor-preferences" | "capacitorPreferences" | "cloudflare-kv-binding" | "cloudflareKVBinding" | "cloudflare-kv-http" | "cloudflareKVHttp" | "cloudflare-r2-binding" | "cloudflareR2Binding" | "db0" | "deno-kv-node" | "denoKVNode" | "deno-kv" | "denoKV" | "fs-lite" | "fsLite" | "fs" | "github" | "http" | "indexedb" | "localstorage" | "lru-cache" | "lruCache" | "memory" | "mongodb" | "netlify-blobs" | "netlifyBlobs" | "null" | "overlay" | "planetscale" | "redis" | "s3" | "session-storage" | "sessionStorage" | "uploadthing" | "upstash" | "vercel-blob" | "vercelBlob" | "vercel-kv" | "vercelKV";
|
||||
type BuiltinDriverOptions = {
|
||||
"azure-app-configuration": AzureAppConfigurationOptions;
|
||||
"azureAppConfiguration": AzureAppConfigurationOptions;
|
||||
"azure-cosmos": AzureCosmosOptions;
|
||||
"azureCosmos": AzureCosmosOptions;
|
||||
"azure-key-vault": AzureKeyVaultOptions;
|
||||
"azureKeyVault": AzureKeyVaultOptions;
|
||||
"azure-storage-blob": AzureStorageBlobOptions;
|
||||
"azureStorageBlob": AzureStorageBlobOptions;
|
||||
"azure-storage-table": AzureStorageTableOptions;
|
||||
"azureStorageTable": AzureStorageTableOptions;
|
||||
"capacitor-preferences": CapacitorPreferencesOptions;
|
||||
"capacitorPreferences": CapacitorPreferencesOptions;
|
||||
"cloudflare-kv-binding": KVOptions;
|
||||
"cloudflareKVBinding": KVOptions;
|
||||
"cloudflare-kv-http": KVHTTPOptions;
|
||||
"cloudflareKVHttp": KVHTTPOptions;
|
||||
"cloudflare-r2-binding": CloudflareR2Options;
|
||||
"cloudflareR2Binding": CloudflareR2Options;
|
||||
"db0": DB0DriverOptions;
|
||||
"deno-kv-node": DenoKvNodeOptions;
|
||||
"denoKVNode": DenoKvNodeOptions;
|
||||
"deno-kv": DenoKvOptions;
|
||||
"denoKV": DenoKvOptions;
|
||||
"fs-lite": FSStorageOptions;
|
||||
"fsLite": FSStorageOptions;
|
||||
"fs": FSStorageOptions$1;
|
||||
"github": GithubOptions;
|
||||
"http": HTTPOptions;
|
||||
"indexedb": IDBKeyvalOptions;
|
||||
"localstorage": LocalStorageOptions;
|
||||
"lru-cache": LRUDriverOptions;
|
||||
"lruCache": LRUDriverOptions;
|
||||
"mongodb": MongoDbOptions;
|
||||
"netlify-blobs": NetlifyStoreOptions;
|
||||
"netlifyBlobs": NetlifyStoreOptions;
|
||||
"overlay": OverlayStorageOptions;
|
||||
"planetscale": PlanetscaleDriverOptions;
|
||||
"redis": RedisOptions;
|
||||
"s3": S3DriverOptions;
|
||||
"session-storage": SessionStorageOptions;
|
||||
"sessionStorage": SessionStorageOptions;
|
||||
"uploadthing": UploadThingOptions;
|
||||
"upstash": UpstashOptions;
|
||||
"vercel-blob": VercelBlobOptions;
|
||||
"vercelBlob": VercelBlobOptions;
|
||||
"vercel-kv": VercelKVOptions;
|
||||
"vercelKV": VercelKVOptions;
|
||||
};
|
||||
declare const builtinDrivers: {
|
||||
readonly "azure-app-configuration": "unstorage/drivers/azure-app-configuration";
|
||||
readonly azureAppConfiguration: "unstorage/drivers/azure-app-configuration";
|
||||
readonly "azure-cosmos": "unstorage/drivers/azure-cosmos";
|
||||
readonly azureCosmos: "unstorage/drivers/azure-cosmos";
|
||||
readonly "azure-key-vault": "unstorage/drivers/azure-key-vault";
|
||||
readonly azureKeyVault: "unstorage/drivers/azure-key-vault";
|
||||
readonly "azure-storage-blob": "unstorage/drivers/azure-storage-blob";
|
||||
readonly azureStorageBlob: "unstorage/drivers/azure-storage-blob";
|
||||
readonly "azure-storage-table": "unstorage/drivers/azure-storage-table";
|
||||
readonly azureStorageTable: "unstorage/drivers/azure-storage-table";
|
||||
readonly "capacitor-preferences": "unstorage/drivers/capacitor-preferences";
|
||||
readonly capacitorPreferences: "unstorage/drivers/capacitor-preferences";
|
||||
readonly "cloudflare-kv-binding": "unstorage/drivers/cloudflare-kv-binding";
|
||||
readonly cloudflareKVBinding: "unstorage/drivers/cloudflare-kv-binding";
|
||||
readonly "cloudflare-kv-http": "unstorage/drivers/cloudflare-kv-http";
|
||||
readonly cloudflareKVHttp: "unstorage/drivers/cloudflare-kv-http";
|
||||
readonly "cloudflare-r2-binding": "unstorage/drivers/cloudflare-r2-binding";
|
||||
readonly cloudflareR2Binding: "unstorage/drivers/cloudflare-r2-binding";
|
||||
readonly db0: "unstorage/drivers/db0";
|
||||
readonly "deno-kv-node": "unstorage/drivers/deno-kv-node";
|
||||
readonly denoKVNode: "unstorage/drivers/deno-kv-node";
|
||||
readonly "deno-kv": "unstorage/drivers/deno-kv";
|
||||
readonly denoKV: "unstorage/drivers/deno-kv";
|
||||
readonly "fs-lite": "unstorage/drivers/fs-lite";
|
||||
readonly fsLite: "unstorage/drivers/fs-lite";
|
||||
readonly fs: "unstorage/drivers/fs";
|
||||
readonly github: "unstorage/drivers/github";
|
||||
readonly http: "unstorage/drivers/http";
|
||||
readonly indexedb: "unstorage/drivers/indexedb";
|
||||
readonly localstorage: "unstorage/drivers/localstorage";
|
||||
readonly "lru-cache": "unstorage/drivers/lru-cache";
|
||||
readonly lruCache: "unstorage/drivers/lru-cache";
|
||||
readonly memory: "unstorage/drivers/memory";
|
||||
readonly mongodb: "unstorage/drivers/mongodb";
|
||||
readonly "netlify-blobs": "unstorage/drivers/netlify-blobs";
|
||||
readonly netlifyBlobs: "unstorage/drivers/netlify-blobs";
|
||||
readonly null: "unstorage/drivers/null";
|
||||
readonly overlay: "unstorage/drivers/overlay";
|
||||
readonly planetscale: "unstorage/drivers/planetscale";
|
||||
readonly redis: "unstorage/drivers/redis";
|
||||
readonly s3: "unstorage/drivers/s3";
|
||||
readonly "session-storage": "unstorage/drivers/session-storage";
|
||||
readonly sessionStorage: "unstorage/drivers/session-storage";
|
||||
readonly uploadthing: "unstorage/drivers/uploadthing";
|
||||
readonly upstash: "unstorage/drivers/upstash";
|
||||
readonly "vercel-blob": "unstorage/drivers/vercel-blob";
|
||||
readonly vercelBlob: "unstorage/drivers/vercel-blob";
|
||||
readonly "vercel-kv": "unstorage/drivers/vercel-kv";
|
||||
readonly vercelKV: "unstorage/drivers/vercel-kv";
|
||||
};
|
||||
|
||||
export { Driver, Storage, StorageValue, builtinDrivers, createStorage, defineDriver, filterKeyByBase, filterKeyByDepth, joinKeys, normalizeBaseKey, normalizeKey, prefixStorage, restoreSnapshot, snapshot };
|
||||
export type { BuiltinDriverName, BuiltinDriverOptions, CreateStorageOptions, Snapshot };
|
515
node_modules/unstorage/dist/index.mjs
generated
vendored
Normal file
515
node_modules/unstorage/dist/index.mjs
generated
vendored
Normal file
@@ -0,0 +1,515 @@
|
||||
import destr from 'destr';
|
||||
import { n as normalizeBaseKey, a as normalizeKey, b as asyncCall, f as filterKeyByDepth, c as filterKeyByBase, s as serializeRaw, d as stringify, e as deserializeRaw, j as joinKeys } from './shared/unstorage.CoCt7NXC.mjs';
|
||||
export { p as prefixStorage } from './shared/unstorage.CoCt7NXC.mjs';
|
||||
|
||||
function defineDriver(factory) {
|
||||
return factory;
|
||||
}
|
||||
|
||||
const DRIVER_NAME = "memory";
|
||||
const memory = defineDriver(() => {
|
||||
const data = /* @__PURE__ */ new Map();
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
getInstance: () => data,
|
||||
hasItem(key) {
|
||||
return data.has(key);
|
||||
},
|
||||
getItem(key) {
|
||||
return data.get(key) ?? null;
|
||||
},
|
||||
getItemRaw(key) {
|
||||
return data.get(key) ?? null;
|
||||
},
|
||||
setItem(key, value) {
|
||||
data.set(key, value);
|
||||
},
|
||||
setItemRaw(key, value) {
|
||||
data.set(key, value);
|
||||
},
|
||||
removeItem(key) {
|
||||
data.delete(key);
|
||||
},
|
||||
getKeys() {
|
||||
return [...data.keys()];
|
||||
},
|
||||
clear() {
|
||||
data.clear();
|
||||
},
|
||||
dispose() {
|
||||
data.clear();
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
function createStorage(options = {}) {
|
||||
const context = {
|
||||
mounts: { "": options.driver || memory() },
|
||||
mountpoints: [""],
|
||||
watching: false,
|
||||
watchListeners: [],
|
||||
unwatch: {}
|
||||
};
|
||||
const getMount = (key) => {
|
||||
for (const base of context.mountpoints) {
|
||||
if (key.startsWith(base)) {
|
||||
return {
|
||||
base,
|
||||
relativeKey: key.slice(base.length),
|
||||
driver: context.mounts[base]
|
||||
};
|
||||
}
|
||||
}
|
||||
return {
|
||||
base: "",
|
||||
relativeKey: key,
|
||||
driver: context.mounts[""]
|
||||
};
|
||||
};
|
||||
const getMounts = (base, includeParent) => {
|
||||
return context.mountpoints.filter(
|
||||
(mountpoint) => mountpoint.startsWith(base) || includeParent && base.startsWith(mountpoint)
|
||||
).map((mountpoint) => ({
|
||||
relativeBase: base.length > mountpoint.length ? base.slice(mountpoint.length) : void 0,
|
||||
mountpoint,
|
||||
driver: context.mounts[mountpoint]
|
||||
}));
|
||||
};
|
||||
const onChange = (event, key) => {
|
||||
if (!context.watching) {
|
||||
return;
|
||||
}
|
||||
key = normalizeKey(key);
|
||||
for (const listener of context.watchListeners) {
|
||||
listener(event, key);
|
||||
}
|
||||
};
|
||||
const startWatch = async () => {
|
||||
if (context.watching) {
|
||||
return;
|
||||
}
|
||||
context.watching = true;
|
||||
for (const mountpoint in context.mounts) {
|
||||
context.unwatch[mountpoint] = await watch(
|
||||
context.mounts[mountpoint],
|
||||
onChange,
|
||||
mountpoint
|
||||
);
|
||||
}
|
||||
};
|
||||
const stopWatch = async () => {
|
||||
if (!context.watching) {
|
||||
return;
|
||||
}
|
||||
for (const mountpoint in context.unwatch) {
|
||||
await context.unwatch[mountpoint]();
|
||||
}
|
||||
context.unwatch = {};
|
||||
context.watching = false;
|
||||
};
|
||||
const runBatch = (items, commonOptions, cb) => {
|
||||
const batches = /* @__PURE__ */ new Map();
|
||||
const getBatch = (mount) => {
|
||||
let batch = batches.get(mount.base);
|
||||
if (!batch) {
|
||||
batch = {
|
||||
driver: mount.driver,
|
||||
base: mount.base,
|
||||
items: []
|
||||
};
|
||||
batches.set(mount.base, batch);
|
||||
}
|
||||
return batch;
|
||||
};
|
||||
for (const item of items) {
|
||||
const isStringItem = typeof item === "string";
|
||||
const key = normalizeKey(isStringItem ? item : item.key);
|
||||
const value = isStringItem ? void 0 : item.value;
|
||||
const options2 = isStringItem || !item.options ? commonOptions : { ...commonOptions, ...item.options };
|
||||
const mount = getMount(key);
|
||||
getBatch(mount).items.push({
|
||||
key,
|
||||
value,
|
||||
relativeKey: mount.relativeKey,
|
||||
options: options2
|
||||
});
|
||||
}
|
||||
return Promise.all([...batches.values()].map((batch) => cb(batch))).then(
|
||||
(r) => r.flat()
|
||||
);
|
||||
};
|
||||
const storage = {
|
||||
// Item
|
||||
hasItem(key, opts = {}) {
|
||||
key = normalizeKey(key);
|
||||
const { relativeKey, driver } = getMount(key);
|
||||
return asyncCall(driver.hasItem, relativeKey, opts);
|
||||
},
|
||||
getItem(key, opts = {}) {
|
||||
key = normalizeKey(key);
|
||||
const { relativeKey, driver } = getMount(key);
|
||||
return asyncCall(driver.getItem, relativeKey, opts).then(
|
||||
(value) => destr(value)
|
||||
);
|
||||
},
|
||||
getItems(items, commonOptions = {}) {
|
||||
return runBatch(items, commonOptions, (batch) => {
|
||||
if (batch.driver.getItems) {
|
||||
return asyncCall(
|
||||
batch.driver.getItems,
|
||||
batch.items.map((item) => ({
|
||||
key: item.relativeKey,
|
||||
options: item.options
|
||||
})),
|
||||
commonOptions
|
||||
).then(
|
||||
(r) => r.map((item) => ({
|
||||
key: joinKeys(batch.base, item.key),
|
||||
value: destr(item.value)
|
||||
}))
|
||||
);
|
||||
}
|
||||
return Promise.all(
|
||||
batch.items.map((item) => {
|
||||
return asyncCall(
|
||||
batch.driver.getItem,
|
||||
item.relativeKey,
|
||||
item.options
|
||||
).then((value) => ({
|
||||
key: item.key,
|
||||
value: destr(value)
|
||||
}));
|
||||
})
|
||||
);
|
||||
});
|
||||
},
|
||||
getItemRaw(key, opts = {}) {
|
||||
key = normalizeKey(key);
|
||||
const { relativeKey, driver } = getMount(key);
|
||||
if (driver.getItemRaw) {
|
||||
return asyncCall(driver.getItemRaw, relativeKey, opts);
|
||||
}
|
||||
return asyncCall(driver.getItem, relativeKey, opts).then(
|
||||
(value) => deserializeRaw(value)
|
||||
);
|
||||
},
|
||||
async setItem(key, value, opts = {}) {
|
||||
if (value === void 0) {
|
||||
return storage.removeItem(key);
|
||||
}
|
||||
key = normalizeKey(key);
|
||||
const { relativeKey, driver } = getMount(key);
|
||||
if (!driver.setItem) {
|
||||
return;
|
||||
}
|
||||
await asyncCall(driver.setItem, relativeKey, stringify(value), opts);
|
||||
if (!driver.watch) {
|
||||
onChange("update", key);
|
||||
}
|
||||
},
|
||||
async setItems(items, commonOptions) {
|
||||
await runBatch(items, commonOptions, async (batch) => {
|
||||
if (batch.driver.setItems) {
|
||||
return asyncCall(
|
||||
batch.driver.setItems,
|
||||
batch.items.map((item) => ({
|
||||
key: item.relativeKey,
|
||||
value: stringify(item.value),
|
||||
options: item.options
|
||||
})),
|
||||
commonOptions
|
||||
);
|
||||
}
|
||||
if (!batch.driver.setItem) {
|
||||
return;
|
||||
}
|
||||
await Promise.all(
|
||||
batch.items.map((item) => {
|
||||
return asyncCall(
|
||||
batch.driver.setItem,
|
||||
item.relativeKey,
|
||||
stringify(item.value),
|
||||
item.options
|
||||
);
|
||||
})
|
||||
);
|
||||
});
|
||||
},
|
||||
async setItemRaw(key, value, opts = {}) {
|
||||
if (value === void 0) {
|
||||
return storage.removeItem(key, opts);
|
||||
}
|
||||
key = normalizeKey(key);
|
||||
const { relativeKey, driver } = getMount(key);
|
||||
if (driver.setItemRaw) {
|
||||
await asyncCall(driver.setItemRaw, relativeKey, value, opts);
|
||||
} else if (driver.setItem) {
|
||||
await asyncCall(driver.setItem, relativeKey, serializeRaw(value), opts);
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
if (!driver.watch) {
|
||||
onChange("update", key);
|
||||
}
|
||||
},
|
||||
async removeItem(key, opts = {}) {
|
||||
if (typeof opts === "boolean") {
|
||||
opts = { removeMeta: opts };
|
||||
}
|
||||
key = normalizeKey(key);
|
||||
const { relativeKey, driver } = getMount(key);
|
||||
if (!driver.removeItem) {
|
||||
return;
|
||||
}
|
||||
await asyncCall(driver.removeItem, relativeKey, opts);
|
||||
if (opts.removeMeta || opts.removeMata) {
|
||||
await asyncCall(driver.removeItem, relativeKey + "$", opts);
|
||||
}
|
||||
if (!driver.watch) {
|
||||
onChange("remove", key);
|
||||
}
|
||||
},
|
||||
// Meta
|
||||
async getMeta(key, opts = {}) {
|
||||
if (typeof opts === "boolean") {
|
||||
opts = { nativeOnly: opts };
|
||||
}
|
||||
key = normalizeKey(key);
|
||||
const { relativeKey, driver } = getMount(key);
|
||||
const meta = /* @__PURE__ */ Object.create(null);
|
||||
if (driver.getMeta) {
|
||||
Object.assign(meta, await asyncCall(driver.getMeta, relativeKey, opts));
|
||||
}
|
||||
if (!opts.nativeOnly) {
|
||||
const value = await asyncCall(
|
||||
driver.getItem,
|
||||
relativeKey + "$",
|
||||
opts
|
||||
).then((value_) => destr(value_));
|
||||
if (value && typeof value === "object") {
|
||||
if (typeof value.atime === "string") {
|
||||
value.atime = new Date(value.atime);
|
||||
}
|
||||
if (typeof value.mtime === "string") {
|
||||
value.mtime = new Date(value.mtime);
|
||||
}
|
||||
Object.assign(meta, value);
|
||||
}
|
||||
}
|
||||
return meta;
|
||||
},
|
||||
setMeta(key, value, opts = {}) {
|
||||
return this.setItem(key + "$", value, opts);
|
||||
},
|
||||
removeMeta(key, opts = {}) {
|
||||
return this.removeItem(key + "$", opts);
|
||||
},
|
||||
// Keys
|
||||
async getKeys(base, opts = {}) {
|
||||
base = normalizeBaseKey(base);
|
||||
const mounts = getMounts(base, true);
|
||||
let maskedMounts = [];
|
||||
const allKeys = [];
|
||||
let allMountsSupportMaxDepth = true;
|
||||
for (const mount of mounts) {
|
||||
if (!mount.driver.flags?.maxDepth) {
|
||||
allMountsSupportMaxDepth = false;
|
||||
}
|
||||
const rawKeys = await asyncCall(
|
||||
mount.driver.getKeys,
|
||||
mount.relativeBase,
|
||||
opts
|
||||
);
|
||||
for (const key of rawKeys) {
|
||||
const fullKey = mount.mountpoint + normalizeKey(key);
|
||||
if (!maskedMounts.some((p) => fullKey.startsWith(p))) {
|
||||
allKeys.push(fullKey);
|
||||
}
|
||||
}
|
||||
maskedMounts = [
|
||||
mount.mountpoint,
|
||||
...maskedMounts.filter((p) => !p.startsWith(mount.mountpoint))
|
||||
];
|
||||
}
|
||||
const shouldFilterByDepth = opts.maxDepth !== void 0 && !allMountsSupportMaxDepth;
|
||||
return allKeys.filter(
|
||||
(key) => (!shouldFilterByDepth || filterKeyByDepth(key, opts.maxDepth)) && filterKeyByBase(key, base)
|
||||
);
|
||||
},
|
||||
// Utils
|
||||
async clear(base, opts = {}) {
|
||||
base = normalizeBaseKey(base);
|
||||
await Promise.all(
|
||||
getMounts(base, false).map(async (m) => {
|
||||
if (m.driver.clear) {
|
||||
return asyncCall(m.driver.clear, m.relativeBase, opts);
|
||||
}
|
||||
if (m.driver.removeItem) {
|
||||
const keys = await m.driver.getKeys(m.relativeBase || "", opts);
|
||||
return Promise.all(
|
||||
keys.map((key) => m.driver.removeItem(key, opts))
|
||||
);
|
||||
}
|
||||
})
|
||||
);
|
||||
},
|
||||
async dispose() {
|
||||
await Promise.all(
|
||||
Object.values(context.mounts).map((driver) => dispose(driver))
|
||||
);
|
||||
},
|
||||
async watch(callback) {
|
||||
await startWatch();
|
||||
context.watchListeners.push(callback);
|
||||
return async () => {
|
||||
context.watchListeners = context.watchListeners.filter(
|
||||
(listener) => listener !== callback
|
||||
);
|
||||
if (context.watchListeners.length === 0) {
|
||||
await stopWatch();
|
||||
}
|
||||
};
|
||||
},
|
||||
async unwatch() {
|
||||
context.watchListeners = [];
|
||||
await stopWatch();
|
||||
},
|
||||
// Mount
|
||||
mount(base, driver) {
|
||||
base = normalizeBaseKey(base);
|
||||
if (base && context.mounts[base]) {
|
||||
throw new Error(`already mounted at ${base}`);
|
||||
}
|
||||
if (base) {
|
||||
context.mountpoints.push(base);
|
||||
context.mountpoints.sort((a, b) => b.length - a.length);
|
||||
}
|
||||
context.mounts[base] = driver;
|
||||
if (context.watching) {
|
||||
Promise.resolve(watch(driver, onChange, base)).then((unwatcher) => {
|
||||
context.unwatch[base] = unwatcher;
|
||||
}).catch(console.error);
|
||||
}
|
||||
return storage;
|
||||
},
|
||||
async unmount(base, _dispose = true) {
|
||||
base = normalizeBaseKey(base);
|
||||
if (!base || !context.mounts[base]) {
|
||||
return;
|
||||
}
|
||||
if (context.watching && base in context.unwatch) {
|
||||
context.unwatch[base]?.();
|
||||
delete context.unwatch[base];
|
||||
}
|
||||
if (_dispose) {
|
||||
await dispose(context.mounts[base]);
|
||||
}
|
||||
context.mountpoints = context.mountpoints.filter((key) => key !== base);
|
||||
delete context.mounts[base];
|
||||
},
|
||||
getMount(key = "") {
|
||||
key = normalizeKey(key) + ":";
|
||||
const m = getMount(key);
|
||||
return {
|
||||
driver: m.driver,
|
||||
base: m.base
|
||||
};
|
||||
},
|
||||
getMounts(base = "", opts = {}) {
|
||||
base = normalizeKey(base);
|
||||
const mounts = getMounts(base, opts.parents);
|
||||
return mounts.map((m) => ({
|
||||
driver: m.driver,
|
||||
base: m.mountpoint
|
||||
}));
|
||||
},
|
||||
// Aliases
|
||||
keys: (base, opts = {}) => storage.getKeys(base, opts),
|
||||
get: (key, opts = {}) => storage.getItem(key, opts),
|
||||
set: (key, value, opts = {}) => storage.setItem(key, value, opts),
|
||||
has: (key, opts = {}) => storage.hasItem(key, opts),
|
||||
del: (key, opts = {}) => storage.removeItem(key, opts),
|
||||
remove: (key, opts = {}) => storage.removeItem(key, opts)
|
||||
};
|
||||
return storage;
|
||||
}
|
||||
async function snapshot(storage, base) {
|
||||
base = normalizeBaseKey(base);
|
||||
const keys = await storage.getKeys(base);
|
||||
const snapshot2 = {};
|
||||
await Promise.all(
|
||||
keys.map(async (key) => {
|
||||
snapshot2[key.slice(base.length)] = await storage.getItem(key);
|
||||
})
|
||||
);
|
||||
return snapshot2;
|
||||
}
|
||||
async function restoreSnapshot(driver, snapshot2, base = "") {
|
||||
base = normalizeBaseKey(base);
|
||||
await Promise.all(
|
||||
Object.entries(snapshot2).map((e) => driver.setItem(base + e[0], e[1]))
|
||||
);
|
||||
}
|
||||
function watch(driver, onChange, base) {
|
||||
return driver.watch ? driver.watch((event, key) => onChange(event, base + key)) : () => {
|
||||
};
|
||||
}
|
||||
async function dispose(driver) {
|
||||
if (typeof driver.dispose === "function") {
|
||||
await asyncCall(driver.dispose);
|
||||
}
|
||||
}
|
||||
|
||||
const builtinDrivers = {
|
||||
"azure-app-configuration": "unstorage/drivers/azure-app-configuration",
|
||||
"azureAppConfiguration": "unstorage/drivers/azure-app-configuration",
|
||||
"azure-cosmos": "unstorage/drivers/azure-cosmos",
|
||||
"azureCosmos": "unstorage/drivers/azure-cosmos",
|
||||
"azure-key-vault": "unstorage/drivers/azure-key-vault",
|
||||
"azureKeyVault": "unstorage/drivers/azure-key-vault",
|
||||
"azure-storage-blob": "unstorage/drivers/azure-storage-blob",
|
||||
"azureStorageBlob": "unstorage/drivers/azure-storage-blob",
|
||||
"azure-storage-table": "unstorage/drivers/azure-storage-table",
|
||||
"azureStorageTable": "unstorage/drivers/azure-storage-table",
|
||||
"capacitor-preferences": "unstorage/drivers/capacitor-preferences",
|
||||
"capacitorPreferences": "unstorage/drivers/capacitor-preferences",
|
||||
"cloudflare-kv-binding": "unstorage/drivers/cloudflare-kv-binding",
|
||||
"cloudflareKVBinding": "unstorage/drivers/cloudflare-kv-binding",
|
||||
"cloudflare-kv-http": "unstorage/drivers/cloudflare-kv-http",
|
||||
"cloudflareKVHttp": "unstorage/drivers/cloudflare-kv-http",
|
||||
"cloudflare-r2-binding": "unstorage/drivers/cloudflare-r2-binding",
|
||||
"cloudflareR2Binding": "unstorage/drivers/cloudflare-r2-binding",
|
||||
"db0": "unstorage/drivers/db0",
|
||||
"deno-kv-node": "unstorage/drivers/deno-kv-node",
|
||||
"denoKVNode": "unstorage/drivers/deno-kv-node",
|
||||
"deno-kv": "unstorage/drivers/deno-kv",
|
||||
"denoKV": "unstorage/drivers/deno-kv",
|
||||
"fs-lite": "unstorage/drivers/fs-lite",
|
||||
"fsLite": "unstorage/drivers/fs-lite",
|
||||
"fs": "unstorage/drivers/fs",
|
||||
"github": "unstorage/drivers/github",
|
||||
"http": "unstorage/drivers/http",
|
||||
"indexedb": "unstorage/drivers/indexedb",
|
||||
"localstorage": "unstorage/drivers/localstorage",
|
||||
"lru-cache": "unstorage/drivers/lru-cache",
|
||||
"lruCache": "unstorage/drivers/lru-cache",
|
||||
"memory": "unstorage/drivers/memory",
|
||||
"mongodb": "unstorage/drivers/mongodb",
|
||||
"netlify-blobs": "unstorage/drivers/netlify-blobs",
|
||||
"netlifyBlobs": "unstorage/drivers/netlify-blobs",
|
||||
"null": "unstorage/drivers/null",
|
||||
"overlay": "unstorage/drivers/overlay",
|
||||
"planetscale": "unstorage/drivers/planetscale",
|
||||
"redis": "unstorage/drivers/redis",
|
||||
"s3": "unstorage/drivers/s3",
|
||||
"session-storage": "unstorage/drivers/session-storage",
|
||||
"sessionStorage": "unstorage/drivers/session-storage",
|
||||
"uploadthing": "unstorage/drivers/uploadthing",
|
||||
"upstash": "unstorage/drivers/upstash",
|
||||
"vercel-blob": "unstorage/drivers/vercel-blob",
|
||||
"vercelBlob": "unstorage/drivers/vercel-blob",
|
||||
"vercel-kv": "unstorage/drivers/vercel-kv",
|
||||
"vercelKV": "unstorage/drivers/vercel-kv"
|
||||
};
|
||||
|
||||
export { builtinDrivers, createStorage, defineDriver, filterKeyByBase, filterKeyByDepth, joinKeys, normalizeBaseKey, normalizeKey, restoreSnapshot, snapshot };
|
113
node_modules/unstorage/dist/server.cjs
generated
vendored
Normal file
113
node_modules/unstorage/dist/server.cjs
generated
vendored
Normal file
@@ -0,0 +1,113 @@
|
||||
'use strict';
|
||||
|
||||
const h3 = require('h3');
|
||||
const utils = require('./shared/unstorage.DgtRghtF.cjs');
|
||||
|
||||
const MethodToTypeMap = {
|
||||
GET: "read",
|
||||
HEAD: "read",
|
||||
PUT: "write",
|
||||
DELETE: "write"
|
||||
};
|
||||
function createH3StorageHandler(storage, opts = {}) {
|
||||
return h3.eventHandler(async (event) => {
|
||||
const _path = opts.resolvePath?.(event) ?? event.path;
|
||||
const lastChar = _path[_path.length - 1];
|
||||
const isBaseKey = lastChar === ":" || lastChar === "/";
|
||||
const key = isBaseKey ? utils.normalizeBaseKey(_path) : utils.normalizeKey(_path);
|
||||
if (!(event.method in MethodToTypeMap)) {
|
||||
throw h3.createError({
|
||||
statusCode: 405,
|
||||
statusMessage: `Method Not Allowed: ${event.method}`
|
||||
});
|
||||
}
|
||||
try {
|
||||
await opts.authorize?.({
|
||||
type: MethodToTypeMap[event.method],
|
||||
event,
|
||||
key
|
||||
});
|
||||
} catch (error) {
|
||||
const _httpError = h3.isError(error) ? error : h3.createError({
|
||||
statusMessage: error?.message,
|
||||
statusCode: 401,
|
||||
...error
|
||||
});
|
||||
throw _httpError;
|
||||
}
|
||||
if (event.method === "GET") {
|
||||
if (isBaseKey) {
|
||||
const keys = await storage.getKeys(key);
|
||||
return keys.map((key2) => key2.replace(/:/g, "/"));
|
||||
}
|
||||
const isRaw = h3.getRequestHeader(event, "accept") === "application/octet-stream";
|
||||
const driverValue = await (isRaw ? storage.getItemRaw(key) : storage.getItem(key));
|
||||
if (driverValue === null) {
|
||||
throw h3.createError({
|
||||
statusCode: 404,
|
||||
statusMessage: "KV value not found"
|
||||
});
|
||||
}
|
||||
setMetaHeaders(event, await storage.getMeta(key));
|
||||
return isRaw ? driverValue : utils.stringify(driverValue);
|
||||
}
|
||||
if (event.method === "HEAD") {
|
||||
if (!await storage.hasItem(key)) {
|
||||
throw h3.createError({
|
||||
statusCode: 404,
|
||||
statusMessage: "KV value not found"
|
||||
});
|
||||
}
|
||||
setMetaHeaders(event, await storage.getMeta(key));
|
||||
return "";
|
||||
}
|
||||
if (event.method === "PUT") {
|
||||
const isRaw = h3.getRequestHeader(event, "content-type") === "application/octet-stream";
|
||||
const topts = {
|
||||
ttl: Number(h3.getRequestHeader(event, "x-ttl")) || void 0
|
||||
};
|
||||
if (isRaw) {
|
||||
const value = await h3.readRawBody(event, false);
|
||||
await storage.setItemRaw(key, value, topts);
|
||||
} else {
|
||||
const value = await h3.readRawBody(event, "utf8");
|
||||
if (value !== void 0) {
|
||||
await storage.setItem(key, value, topts);
|
||||
}
|
||||
}
|
||||
return "OK";
|
||||
}
|
||||
if (event.method === "DELETE") {
|
||||
await (isBaseKey ? storage.clear(key) : storage.removeItem(key));
|
||||
return "OK";
|
||||
}
|
||||
throw h3.createError({
|
||||
statusCode: 405,
|
||||
statusMessage: `Method Not Allowed: ${event.method}`
|
||||
});
|
||||
});
|
||||
}
|
||||
function setMetaHeaders(event, meta) {
|
||||
if (meta.mtime) {
|
||||
h3.setResponseHeader(
|
||||
event,
|
||||
"last-modified",
|
||||
new Date(meta.mtime).toUTCString()
|
||||
);
|
||||
}
|
||||
if (meta.ttl) {
|
||||
h3.setResponseHeader(event, "x-ttl", `${meta.ttl}`);
|
||||
h3.setResponseHeader(event, "cache-control", `max-age=${meta.ttl}`);
|
||||
}
|
||||
}
|
||||
function createStorageServer(storage, options = {}) {
|
||||
const app = h3.createApp({ debug: true });
|
||||
const handler = createH3StorageHandler(storage, options);
|
||||
app.use(handler);
|
||||
return {
|
||||
handle: h3.toNodeListener(app)
|
||||
};
|
||||
}
|
||||
|
||||
exports.createH3StorageHandler = createH3StorageHandler;
|
||||
exports.createStorageServer = createStorageServer;
|
43
node_modules/unstorage/dist/server.d.cts
generated
vendored
Normal file
43
node_modules/unstorage/dist/server.d.cts
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
import { RequestListener } from 'node:http';
|
||||
import { H3Event, EventHandler } from 'h3';
|
||||
import { a as Storage } from './shared/unstorage.Ca7R4QL2.cjs';
|
||||
|
||||
type StorageServerRequest = {
|
||||
event: H3Event;
|
||||
key: string;
|
||||
type: "read" | "write";
|
||||
};
|
||||
interface StorageServerOptions {
|
||||
authorize?: (request: StorageServerRequest) => void | Promise<void>;
|
||||
resolvePath?: (event: H3Event) => string;
|
||||
}
|
||||
/**
|
||||
* This function creates an h3-based handler for the storage server. It can then be used as event handler in h3 or Nitro
|
||||
* @param storage The storage which should be used for the storage server
|
||||
* @param opts Storage options to set the authorization check or a custom path resolver
|
||||
* @returns
|
||||
* @see createStorageServer if a node-compatible handler is needed
|
||||
*/
|
||||
declare function createH3StorageHandler(storage: Storage, opts?: StorageServerOptions): EventHandler;
|
||||
/**
|
||||
* This function creates a node-compatible handler for your custom storage server.
|
||||
*
|
||||
* The storage server will handle HEAD, GET, PUT and DELETE requests.
|
||||
* HEAD: Return if the request item exists in the storage, including a last-modified header if the storage supports it and the meta is stored
|
||||
* GET: Return the item if it exists
|
||||
* PUT: Sets the item
|
||||
* DELETE: Removes the item (or clears the whole storage if the base key was used)
|
||||
*
|
||||
* If the request sets the `Accept` header to `application/octet-stream`, the server will handle the item as raw data.
|
||||
*
|
||||
* @param storage The storage which should be used for the storage server
|
||||
* @param options Defining functions such as an authorization check and a custom path resolver
|
||||
* @returns An object containing then `handle` function for the handler
|
||||
* @see createH3StorageHandler For the bare h3 version which can be used with h3 or Nitro
|
||||
*/
|
||||
declare function createStorageServer(storage: Storage, options?: StorageServerOptions): {
|
||||
handle: RequestListener;
|
||||
};
|
||||
|
||||
export { createH3StorageHandler, createStorageServer };
|
||||
export type { StorageServerOptions, StorageServerRequest };
|
43
node_modules/unstorage/dist/server.d.mts
generated
vendored
Normal file
43
node_modules/unstorage/dist/server.d.mts
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
import { RequestListener } from 'node:http';
|
||||
import { H3Event, EventHandler } from 'h3';
|
||||
import { a as Storage } from './shared/unstorage.Ca7R4QL2.mjs';
|
||||
|
||||
type StorageServerRequest = {
|
||||
event: H3Event;
|
||||
key: string;
|
||||
type: "read" | "write";
|
||||
};
|
||||
interface StorageServerOptions {
|
||||
authorize?: (request: StorageServerRequest) => void | Promise<void>;
|
||||
resolvePath?: (event: H3Event) => string;
|
||||
}
|
||||
/**
|
||||
* This function creates an h3-based handler for the storage server. It can then be used as event handler in h3 or Nitro
|
||||
* @param storage The storage which should be used for the storage server
|
||||
* @param opts Storage options to set the authorization check or a custom path resolver
|
||||
* @returns
|
||||
* @see createStorageServer if a node-compatible handler is needed
|
||||
*/
|
||||
declare function createH3StorageHandler(storage: Storage, opts?: StorageServerOptions): EventHandler;
|
||||
/**
|
||||
* This function creates a node-compatible handler for your custom storage server.
|
||||
*
|
||||
* The storage server will handle HEAD, GET, PUT and DELETE requests.
|
||||
* HEAD: Return if the request item exists in the storage, including a last-modified header if the storage supports it and the meta is stored
|
||||
* GET: Return the item if it exists
|
||||
* PUT: Sets the item
|
||||
* DELETE: Removes the item (or clears the whole storage if the base key was used)
|
||||
*
|
||||
* If the request sets the `Accept` header to `application/octet-stream`, the server will handle the item as raw data.
|
||||
*
|
||||
* @param storage The storage which should be used for the storage server
|
||||
* @param options Defining functions such as an authorization check and a custom path resolver
|
||||
* @returns An object containing then `handle` function for the handler
|
||||
* @see createH3StorageHandler For the bare h3 version which can be used with h3 or Nitro
|
||||
*/
|
||||
declare function createStorageServer(storage: Storage, options?: StorageServerOptions): {
|
||||
handle: RequestListener;
|
||||
};
|
||||
|
||||
export { createH3StorageHandler, createStorageServer };
|
||||
export type { StorageServerOptions, StorageServerRequest };
|
43
node_modules/unstorage/dist/server.d.ts
generated
vendored
Normal file
43
node_modules/unstorage/dist/server.d.ts
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
import { RequestListener } from 'node:http';
|
||||
import { H3Event, EventHandler } from 'h3';
|
||||
import { a as Storage } from './shared/unstorage.Ca7R4QL2.js';
|
||||
|
||||
type StorageServerRequest = {
|
||||
event: H3Event;
|
||||
key: string;
|
||||
type: "read" | "write";
|
||||
};
|
||||
interface StorageServerOptions {
|
||||
authorize?: (request: StorageServerRequest) => void | Promise<void>;
|
||||
resolvePath?: (event: H3Event) => string;
|
||||
}
|
||||
/**
|
||||
* This function creates an h3-based handler for the storage server. It can then be used as event handler in h3 or Nitro
|
||||
* @param storage The storage which should be used for the storage server
|
||||
* @param opts Storage options to set the authorization check or a custom path resolver
|
||||
* @returns
|
||||
* @see createStorageServer if a node-compatible handler is needed
|
||||
*/
|
||||
declare function createH3StorageHandler(storage: Storage, opts?: StorageServerOptions): EventHandler;
|
||||
/**
|
||||
* This function creates a node-compatible handler for your custom storage server.
|
||||
*
|
||||
* The storage server will handle HEAD, GET, PUT and DELETE requests.
|
||||
* HEAD: Return if the request item exists in the storage, including a last-modified header if the storage supports it and the meta is stored
|
||||
* GET: Return the item if it exists
|
||||
* PUT: Sets the item
|
||||
* DELETE: Removes the item (or clears the whole storage if the base key was used)
|
||||
*
|
||||
* If the request sets the `Accept` header to `application/octet-stream`, the server will handle the item as raw data.
|
||||
*
|
||||
* @param storage The storage which should be used for the storage server
|
||||
* @param options Defining functions such as an authorization check and a custom path resolver
|
||||
* @returns An object containing then `handle` function for the handler
|
||||
* @see createH3StorageHandler For the bare h3 version which can be used with h3 or Nitro
|
||||
*/
|
||||
declare function createStorageServer(storage: Storage, options?: StorageServerOptions): {
|
||||
handle: RequestListener;
|
||||
};
|
||||
|
||||
export { createH3StorageHandler, createStorageServer };
|
||||
export type { StorageServerOptions, StorageServerRequest };
|
110
node_modules/unstorage/dist/server.mjs
generated
vendored
Normal file
110
node_modules/unstorage/dist/server.mjs
generated
vendored
Normal file
@@ -0,0 +1,110 @@
|
||||
import { eventHandler, createError, isError, getRequestHeader, readRawBody, createApp, toNodeListener, setResponseHeader } from 'h3';
|
||||
import { n as normalizeBaseKey, a as normalizeKey, d as stringify } from './shared/unstorage.CoCt7NXC.mjs';
|
||||
|
||||
const MethodToTypeMap = {
|
||||
GET: "read",
|
||||
HEAD: "read",
|
||||
PUT: "write",
|
||||
DELETE: "write"
|
||||
};
|
||||
function createH3StorageHandler(storage, opts = {}) {
|
||||
return eventHandler(async (event) => {
|
||||
const _path = opts.resolvePath?.(event) ?? event.path;
|
||||
const lastChar = _path[_path.length - 1];
|
||||
const isBaseKey = lastChar === ":" || lastChar === "/";
|
||||
const key = isBaseKey ? normalizeBaseKey(_path) : normalizeKey(_path);
|
||||
if (!(event.method in MethodToTypeMap)) {
|
||||
throw createError({
|
||||
statusCode: 405,
|
||||
statusMessage: `Method Not Allowed: ${event.method}`
|
||||
});
|
||||
}
|
||||
try {
|
||||
await opts.authorize?.({
|
||||
type: MethodToTypeMap[event.method],
|
||||
event,
|
||||
key
|
||||
});
|
||||
} catch (error) {
|
||||
const _httpError = isError(error) ? error : createError({
|
||||
statusMessage: error?.message,
|
||||
statusCode: 401,
|
||||
...error
|
||||
});
|
||||
throw _httpError;
|
||||
}
|
||||
if (event.method === "GET") {
|
||||
if (isBaseKey) {
|
||||
const keys = await storage.getKeys(key);
|
||||
return keys.map((key2) => key2.replace(/:/g, "/"));
|
||||
}
|
||||
const isRaw = getRequestHeader(event, "accept") === "application/octet-stream";
|
||||
const driverValue = await (isRaw ? storage.getItemRaw(key) : storage.getItem(key));
|
||||
if (driverValue === null) {
|
||||
throw createError({
|
||||
statusCode: 404,
|
||||
statusMessage: "KV value not found"
|
||||
});
|
||||
}
|
||||
setMetaHeaders(event, await storage.getMeta(key));
|
||||
return isRaw ? driverValue : stringify(driverValue);
|
||||
}
|
||||
if (event.method === "HEAD") {
|
||||
if (!await storage.hasItem(key)) {
|
||||
throw createError({
|
||||
statusCode: 404,
|
||||
statusMessage: "KV value not found"
|
||||
});
|
||||
}
|
||||
setMetaHeaders(event, await storage.getMeta(key));
|
||||
return "";
|
||||
}
|
||||
if (event.method === "PUT") {
|
||||
const isRaw = getRequestHeader(event, "content-type") === "application/octet-stream";
|
||||
const topts = {
|
||||
ttl: Number(getRequestHeader(event, "x-ttl")) || void 0
|
||||
};
|
||||
if (isRaw) {
|
||||
const value = await readRawBody(event, false);
|
||||
await storage.setItemRaw(key, value, topts);
|
||||
} else {
|
||||
const value = await readRawBody(event, "utf8");
|
||||
if (value !== void 0) {
|
||||
await storage.setItem(key, value, topts);
|
||||
}
|
||||
}
|
||||
return "OK";
|
||||
}
|
||||
if (event.method === "DELETE") {
|
||||
await (isBaseKey ? storage.clear(key) : storage.removeItem(key));
|
||||
return "OK";
|
||||
}
|
||||
throw createError({
|
||||
statusCode: 405,
|
||||
statusMessage: `Method Not Allowed: ${event.method}`
|
||||
});
|
||||
});
|
||||
}
|
||||
function setMetaHeaders(event, meta) {
|
||||
if (meta.mtime) {
|
||||
setResponseHeader(
|
||||
event,
|
||||
"last-modified",
|
||||
new Date(meta.mtime).toUTCString()
|
||||
);
|
||||
}
|
||||
if (meta.ttl) {
|
||||
setResponseHeader(event, "x-ttl", `${meta.ttl}`);
|
||||
setResponseHeader(event, "cache-control", `max-age=${meta.ttl}`);
|
||||
}
|
||||
}
|
||||
function createStorageServer(storage, options = {}) {
|
||||
const app = createApp({ debug: true });
|
||||
const handler = createH3StorageHandler(storage, options);
|
||||
app.use(handler);
|
||||
return {
|
||||
handle: toNodeListener(app)
|
||||
};
|
||||
}
|
||||
|
||||
export { createH3StorageHandler, createStorageServer };
|
121
node_modules/unstorage/dist/shared/unstorage.Ca7R4QL2.d.cts
generated
vendored
Normal file
121
node_modules/unstorage/dist/shared/unstorage.Ca7R4QL2.d.cts
generated
vendored
Normal file
@@ -0,0 +1,121 @@
|
||||
type StorageValue = null | string | number | boolean | object;
|
||||
type WatchEvent = "update" | "remove";
|
||||
type WatchCallback = (event: WatchEvent, key: string) => any;
|
||||
type MaybePromise<T> = T | Promise<T>;
|
||||
type MaybeDefined<T> = T extends any ? T : any;
|
||||
type Unwatch = () => MaybePromise<void>;
|
||||
interface StorageMeta {
|
||||
atime?: Date;
|
||||
mtime?: Date;
|
||||
ttl?: number;
|
||||
[key: string]: StorageValue | Date | undefined;
|
||||
}
|
||||
type TransactionOptions = Record<string, any>;
|
||||
type GetKeysOptions = TransactionOptions & {
|
||||
maxDepth?: number;
|
||||
};
|
||||
interface DriverFlags {
|
||||
maxDepth?: boolean;
|
||||
ttl?: boolean;
|
||||
}
|
||||
interface Driver<OptionsT = any, InstanceT = any> {
|
||||
name?: string;
|
||||
flags?: DriverFlags;
|
||||
options?: OptionsT;
|
||||
getInstance?: () => InstanceT;
|
||||
hasItem: (key: string, opts: TransactionOptions) => MaybePromise<boolean>;
|
||||
getItem: (key: string, opts?: TransactionOptions) => MaybePromise<StorageValue>;
|
||||
/** @experimental */
|
||||
getItems?: (items: {
|
||||
key: string;
|
||||
options?: TransactionOptions;
|
||||
}[], commonOptions?: TransactionOptions) => MaybePromise<{
|
||||
key: string;
|
||||
value: StorageValue;
|
||||
}[]>;
|
||||
/** @experimental */
|
||||
getItemRaw?: (key: string, opts: TransactionOptions) => MaybePromise<unknown>;
|
||||
setItem?: (key: string, value: string, opts: TransactionOptions) => MaybePromise<void>;
|
||||
/** @experimental */
|
||||
setItems?: (items: {
|
||||
key: string;
|
||||
value: string;
|
||||
options?: TransactionOptions;
|
||||
}[], commonOptions?: TransactionOptions) => MaybePromise<void>;
|
||||
/** @experimental */
|
||||
setItemRaw?: (key: string, value: any, opts: TransactionOptions) => MaybePromise<void>;
|
||||
removeItem?: (key: string, opts: TransactionOptions) => MaybePromise<void>;
|
||||
getMeta?: (key: string, opts: TransactionOptions) => MaybePromise<StorageMeta | null>;
|
||||
getKeys: (base: string, opts: GetKeysOptions) => MaybePromise<string[]>;
|
||||
clear?: (base: string, opts: TransactionOptions) => MaybePromise<void>;
|
||||
dispose?: () => MaybePromise<void>;
|
||||
watch?: (callback: WatchCallback) => MaybePromise<Unwatch>;
|
||||
}
|
||||
type StorageDefinition = {
|
||||
items: unknown;
|
||||
[key: string]: unknown;
|
||||
};
|
||||
type StorageItemMap<T> = T extends StorageDefinition ? T["items"] : T;
|
||||
type StorageItemType<T, K> = K extends keyof StorageItemMap<T> ? StorageItemMap<T>[K] : T extends StorageDefinition ? StorageValue : T;
|
||||
interface Storage<T extends StorageValue = StorageValue> {
|
||||
hasItem<U extends Extract<T, StorageDefinition>, K extends keyof StorageItemMap<U>>(key: K, opts?: TransactionOptions): Promise<boolean>;
|
||||
hasItem(key: string, opts?: TransactionOptions): Promise<boolean>;
|
||||
getItem<U extends Extract<T, StorageDefinition>, K extends string & keyof StorageItemMap<U>>(key: K, ops?: TransactionOptions): Promise<StorageItemType<T, K> | null>;
|
||||
getItem<R = StorageItemType<T, string>>(key: string, opts?: TransactionOptions): Promise<R | null>;
|
||||
/** @experimental */
|
||||
getItems: <U extends T>(items: (string | {
|
||||
key: string;
|
||||
options?: TransactionOptions;
|
||||
})[], commonOptions?: TransactionOptions) => Promise<{
|
||||
key: string;
|
||||
value: U;
|
||||
}[]>;
|
||||
/** @experimental See https://github.com/unjs/unstorage/issues/142 */
|
||||
getItemRaw: <T = any>(key: string, opts?: TransactionOptions) => Promise<MaybeDefined<T> | null>;
|
||||
setItem<U extends Extract<T, StorageDefinition>, K extends keyof StorageItemMap<U>>(key: K, value: StorageItemType<T, K>, opts?: TransactionOptions): Promise<void>;
|
||||
setItem<U extends T>(key: string, value: U, opts?: TransactionOptions): Promise<void>;
|
||||
/** @experimental */
|
||||
setItems: <U extends T>(items: {
|
||||
key: string;
|
||||
value: U;
|
||||
options?: TransactionOptions;
|
||||
}[], commonOptions?: TransactionOptions) => Promise<void>;
|
||||
/** @experimental See https://github.com/unjs/unstorage/issues/142 */
|
||||
setItemRaw: <T = any>(key: string, value: MaybeDefined<T>, opts?: TransactionOptions) => Promise<void>;
|
||||
removeItem<U extends Extract<T, StorageDefinition>, K extends keyof StorageItemMap<U>>(key: K, opts?: (TransactionOptions & {
|
||||
removeMeta?: boolean;
|
||||
}) | boolean): Promise<void>;
|
||||
removeItem(key: string, opts?: (TransactionOptions & {
|
||||
removeMeta?: boolean;
|
||||
}) | boolean): Promise<void>;
|
||||
getMeta: (key: string, opts?: (TransactionOptions & {
|
||||
nativeOnly?: boolean;
|
||||
}) | boolean) => MaybePromise<StorageMeta>;
|
||||
setMeta: (key: string, value: StorageMeta, opts?: TransactionOptions) => Promise<void>;
|
||||
removeMeta: (key: string, opts?: TransactionOptions) => Promise<void>;
|
||||
getKeys: (base?: string, opts?: GetKeysOptions) => Promise<string[]>;
|
||||
clear: (base?: string, opts?: TransactionOptions) => Promise<void>;
|
||||
dispose: () => Promise<void>;
|
||||
watch: (callback: WatchCallback) => Promise<Unwatch>;
|
||||
unwatch: () => Promise<void>;
|
||||
mount: (base: string, driver: Driver) => Storage;
|
||||
unmount: (base: string, dispose?: boolean) => Promise<void>;
|
||||
getMount: (key?: string) => {
|
||||
base: string;
|
||||
driver: Driver;
|
||||
};
|
||||
getMounts: (base?: string, options?: {
|
||||
parents?: boolean;
|
||||
}) => {
|
||||
base: string;
|
||||
driver: Driver;
|
||||
}[];
|
||||
keys: Storage["getKeys"];
|
||||
get: Storage<T>["getItem"];
|
||||
set: Storage<T>["setItem"];
|
||||
has: Storage<T>["hasItem"];
|
||||
del: Storage<T>["removeItem"];
|
||||
remove: Storage<T>["removeItem"];
|
||||
}
|
||||
|
||||
export type { Driver as D, GetKeysOptions as G, StorageValue as S, TransactionOptions as T, Unwatch as U, WatchEvent as W, Storage as a, WatchCallback as b, StorageMeta as c, DriverFlags as d };
|
121
node_modules/unstorage/dist/shared/unstorage.Ca7R4QL2.d.mts
generated
vendored
Normal file
121
node_modules/unstorage/dist/shared/unstorage.Ca7R4QL2.d.mts
generated
vendored
Normal file
@@ -0,0 +1,121 @@
|
||||
type StorageValue = null | string | number | boolean | object;
|
||||
type WatchEvent = "update" | "remove";
|
||||
type WatchCallback = (event: WatchEvent, key: string) => any;
|
||||
type MaybePromise<T> = T | Promise<T>;
|
||||
type MaybeDefined<T> = T extends any ? T : any;
|
||||
type Unwatch = () => MaybePromise<void>;
|
||||
interface StorageMeta {
|
||||
atime?: Date;
|
||||
mtime?: Date;
|
||||
ttl?: number;
|
||||
[key: string]: StorageValue | Date | undefined;
|
||||
}
|
||||
type TransactionOptions = Record<string, any>;
|
||||
type GetKeysOptions = TransactionOptions & {
|
||||
maxDepth?: number;
|
||||
};
|
||||
interface DriverFlags {
|
||||
maxDepth?: boolean;
|
||||
ttl?: boolean;
|
||||
}
|
||||
interface Driver<OptionsT = any, InstanceT = any> {
|
||||
name?: string;
|
||||
flags?: DriverFlags;
|
||||
options?: OptionsT;
|
||||
getInstance?: () => InstanceT;
|
||||
hasItem: (key: string, opts: TransactionOptions) => MaybePromise<boolean>;
|
||||
getItem: (key: string, opts?: TransactionOptions) => MaybePromise<StorageValue>;
|
||||
/** @experimental */
|
||||
getItems?: (items: {
|
||||
key: string;
|
||||
options?: TransactionOptions;
|
||||
}[], commonOptions?: TransactionOptions) => MaybePromise<{
|
||||
key: string;
|
||||
value: StorageValue;
|
||||
}[]>;
|
||||
/** @experimental */
|
||||
getItemRaw?: (key: string, opts: TransactionOptions) => MaybePromise<unknown>;
|
||||
setItem?: (key: string, value: string, opts: TransactionOptions) => MaybePromise<void>;
|
||||
/** @experimental */
|
||||
setItems?: (items: {
|
||||
key: string;
|
||||
value: string;
|
||||
options?: TransactionOptions;
|
||||
}[], commonOptions?: TransactionOptions) => MaybePromise<void>;
|
||||
/** @experimental */
|
||||
setItemRaw?: (key: string, value: any, opts: TransactionOptions) => MaybePromise<void>;
|
||||
removeItem?: (key: string, opts: TransactionOptions) => MaybePromise<void>;
|
||||
getMeta?: (key: string, opts: TransactionOptions) => MaybePromise<StorageMeta | null>;
|
||||
getKeys: (base: string, opts: GetKeysOptions) => MaybePromise<string[]>;
|
||||
clear?: (base: string, opts: TransactionOptions) => MaybePromise<void>;
|
||||
dispose?: () => MaybePromise<void>;
|
||||
watch?: (callback: WatchCallback) => MaybePromise<Unwatch>;
|
||||
}
|
||||
type StorageDefinition = {
|
||||
items: unknown;
|
||||
[key: string]: unknown;
|
||||
};
|
||||
type StorageItemMap<T> = T extends StorageDefinition ? T["items"] : T;
|
||||
type StorageItemType<T, K> = K extends keyof StorageItemMap<T> ? StorageItemMap<T>[K] : T extends StorageDefinition ? StorageValue : T;
|
||||
interface Storage<T extends StorageValue = StorageValue> {
|
||||
hasItem<U extends Extract<T, StorageDefinition>, K extends keyof StorageItemMap<U>>(key: K, opts?: TransactionOptions): Promise<boolean>;
|
||||
hasItem(key: string, opts?: TransactionOptions): Promise<boolean>;
|
||||
getItem<U extends Extract<T, StorageDefinition>, K extends string & keyof StorageItemMap<U>>(key: K, ops?: TransactionOptions): Promise<StorageItemType<T, K> | null>;
|
||||
getItem<R = StorageItemType<T, string>>(key: string, opts?: TransactionOptions): Promise<R | null>;
|
||||
/** @experimental */
|
||||
getItems: <U extends T>(items: (string | {
|
||||
key: string;
|
||||
options?: TransactionOptions;
|
||||
})[], commonOptions?: TransactionOptions) => Promise<{
|
||||
key: string;
|
||||
value: U;
|
||||
}[]>;
|
||||
/** @experimental See https://github.com/unjs/unstorage/issues/142 */
|
||||
getItemRaw: <T = any>(key: string, opts?: TransactionOptions) => Promise<MaybeDefined<T> | null>;
|
||||
setItem<U extends Extract<T, StorageDefinition>, K extends keyof StorageItemMap<U>>(key: K, value: StorageItemType<T, K>, opts?: TransactionOptions): Promise<void>;
|
||||
setItem<U extends T>(key: string, value: U, opts?: TransactionOptions): Promise<void>;
|
||||
/** @experimental */
|
||||
setItems: <U extends T>(items: {
|
||||
key: string;
|
||||
value: U;
|
||||
options?: TransactionOptions;
|
||||
}[], commonOptions?: TransactionOptions) => Promise<void>;
|
||||
/** @experimental See https://github.com/unjs/unstorage/issues/142 */
|
||||
setItemRaw: <T = any>(key: string, value: MaybeDefined<T>, opts?: TransactionOptions) => Promise<void>;
|
||||
removeItem<U extends Extract<T, StorageDefinition>, K extends keyof StorageItemMap<U>>(key: K, opts?: (TransactionOptions & {
|
||||
removeMeta?: boolean;
|
||||
}) | boolean): Promise<void>;
|
||||
removeItem(key: string, opts?: (TransactionOptions & {
|
||||
removeMeta?: boolean;
|
||||
}) | boolean): Promise<void>;
|
||||
getMeta: (key: string, opts?: (TransactionOptions & {
|
||||
nativeOnly?: boolean;
|
||||
}) | boolean) => MaybePromise<StorageMeta>;
|
||||
setMeta: (key: string, value: StorageMeta, opts?: TransactionOptions) => Promise<void>;
|
||||
removeMeta: (key: string, opts?: TransactionOptions) => Promise<void>;
|
||||
getKeys: (base?: string, opts?: GetKeysOptions) => Promise<string[]>;
|
||||
clear: (base?: string, opts?: TransactionOptions) => Promise<void>;
|
||||
dispose: () => Promise<void>;
|
||||
watch: (callback: WatchCallback) => Promise<Unwatch>;
|
||||
unwatch: () => Promise<void>;
|
||||
mount: (base: string, driver: Driver) => Storage;
|
||||
unmount: (base: string, dispose?: boolean) => Promise<void>;
|
||||
getMount: (key?: string) => {
|
||||
base: string;
|
||||
driver: Driver;
|
||||
};
|
||||
getMounts: (base?: string, options?: {
|
||||
parents?: boolean;
|
||||
}) => {
|
||||
base: string;
|
||||
driver: Driver;
|
||||
}[];
|
||||
keys: Storage["getKeys"];
|
||||
get: Storage<T>["getItem"];
|
||||
set: Storage<T>["setItem"];
|
||||
has: Storage<T>["hasItem"];
|
||||
del: Storage<T>["removeItem"];
|
||||
remove: Storage<T>["removeItem"];
|
||||
}
|
||||
|
||||
export type { Driver as D, GetKeysOptions as G, StorageValue as S, TransactionOptions as T, Unwatch as U, WatchEvent as W, Storage as a, WatchCallback as b, StorageMeta as c, DriverFlags as d };
|
121
node_modules/unstorage/dist/shared/unstorage.Ca7R4QL2.d.ts
generated
vendored
Normal file
121
node_modules/unstorage/dist/shared/unstorage.Ca7R4QL2.d.ts
generated
vendored
Normal file
@@ -0,0 +1,121 @@
|
||||
type StorageValue = null | string | number | boolean | object;
|
||||
type WatchEvent = "update" | "remove";
|
||||
type WatchCallback = (event: WatchEvent, key: string) => any;
|
||||
type MaybePromise<T> = T | Promise<T>;
|
||||
type MaybeDefined<T> = T extends any ? T : any;
|
||||
type Unwatch = () => MaybePromise<void>;
|
||||
interface StorageMeta {
|
||||
atime?: Date;
|
||||
mtime?: Date;
|
||||
ttl?: number;
|
||||
[key: string]: StorageValue | Date | undefined;
|
||||
}
|
||||
type TransactionOptions = Record<string, any>;
|
||||
type GetKeysOptions = TransactionOptions & {
|
||||
maxDepth?: number;
|
||||
};
|
||||
interface DriverFlags {
|
||||
maxDepth?: boolean;
|
||||
ttl?: boolean;
|
||||
}
|
||||
interface Driver<OptionsT = any, InstanceT = any> {
|
||||
name?: string;
|
||||
flags?: DriverFlags;
|
||||
options?: OptionsT;
|
||||
getInstance?: () => InstanceT;
|
||||
hasItem: (key: string, opts: TransactionOptions) => MaybePromise<boolean>;
|
||||
getItem: (key: string, opts?: TransactionOptions) => MaybePromise<StorageValue>;
|
||||
/** @experimental */
|
||||
getItems?: (items: {
|
||||
key: string;
|
||||
options?: TransactionOptions;
|
||||
}[], commonOptions?: TransactionOptions) => MaybePromise<{
|
||||
key: string;
|
||||
value: StorageValue;
|
||||
}[]>;
|
||||
/** @experimental */
|
||||
getItemRaw?: (key: string, opts: TransactionOptions) => MaybePromise<unknown>;
|
||||
setItem?: (key: string, value: string, opts: TransactionOptions) => MaybePromise<void>;
|
||||
/** @experimental */
|
||||
setItems?: (items: {
|
||||
key: string;
|
||||
value: string;
|
||||
options?: TransactionOptions;
|
||||
}[], commonOptions?: TransactionOptions) => MaybePromise<void>;
|
||||
/** @experimental */
|
||||
setItemRaw?: (key: string, value: any, opts: TransactionOptions) => MaybePromise<void>;
|
||||
removeItem?: (key: string, opts: TransactionOptions) => MaybePromise<void>;
|
||||
getMeta?: (key: string, opts: TransactionOptions) => MaybePromise<StorageMeta | null>;
|
||||
getKeys: (base: string, opts: GetKeysOptions) => MaybePromise<string[]>;
|
||||
clear?: (base: string, opts: TransactionOptions) => MaybePromise<void>;
|
||||
dispose?: () => MaybePromise<void>;
|
||||
watch?: (callback: WatchCallback) => MaybePromise<Unwatch>;
|
||||
}
|
||||
type StorageDefinition = {
|
||||
items: unknown;
|
||||
[key: string]: unknown;
|
||||
};
|
||||
type StorageItemMap<T> = T extends StorageDefinition ? T["items"] : T;
|
||||
type StorageItemType<T, K> = K extends keyof StorageItemMap<T> ? StorageItemMap<T>[K] : T extends StorageDefinition ? StorageValue : T;
|
||||
interface Storage<T extends StorageValue = StorageValue> {
|
||||
hasItem<U extends Extract<T, StorageDefinition>, K extends keyof StorageItemMap<U>>(key: K, opts?: TransactionOptions): Promise<boolean>;
|
||||
hasItem(key: string, opts?: TransactionOptions): Promise<boolean>;
|
||||
getItem<U extends Extract<T, StorageDefinition>, K extends string & keyof StorageItemMap<U>>(key: K, ops?: TransactionOptions): Promise<StorageItemType<T, K> | null>;
|
||||
getItem<R = StorageItemType<T, string>>(key: string, opts?: TransactionOptions): Promise<R | null>;
|
||||
/** @experimental */
|
||||
getItems: <U extends T>(items: (string | {
|
||||
key: string;
|
||||
options?: TransactionOptions;
|
||||
})[], commonOptions?: TransactionOptions) => Promise<{
|
||||
key: string;
|
||||
value: U;
|
||||
}[]>;
|
||||
/** @experimental See https://github.com/unjs/unstorage/issues/142 */
|
||||
getItemRaw: <T = any>(key: string, opts?: TransactionOptions) => Promise<MaybeDefined<T> | null>;
|
||||
setItem<U extends Extract<T, StorageDefinition>, K extends keyof StorageItemMap<U>>(key: K, value: StorageItemType<T, K>, opts?: TransactionOptions): Promise<void>;
|
||||
setItem<U extends T>(key: string, value: U, opts?: TransactionOptions): Promise<void>;
|
||||
/** @experimental */
|
||||
setItems: <U extends T>(items: {
|
||||
key: string;
|
||||
value: U;
|
||||
options?: TransactionOptions;
|
||||
}[], commonOptions?: TransactionOptions) => Promise<void>;
|
||||
/** @experimental See https://github.com/unjs/unstorage/issues/142 */
|
||||
setItemRaw: <T = any>(key: string, value: MaybeDefined<T>, opts?: TransactionOptions) => Promise<void>;
|
||||
removeItem<U extends Extract<T, StorageDefinition>, K extends keyof StorageItemMap<U>>(key: K, opts?: (TransactionOptions & {
|
||||
removeMeta?: boolean;
|
||||
}) | boolean): Promise<void>;
|
||||
removeItem(key: string, opts?: (TransactionOptions & {
|
||||
removeMeta?: boolean;
|
||||
}) | boolean): Promise<void>;
|
||||
getMeta: (key: string, opts?: (TransactionOptions & {
|
||||
nativeOnly?: boolean;
|
||||
}) | boolean) => MaybePromise<StorageMeta>;
|
||||
setMeta: (key: string, value: StorageMeta, opts?: TransactionOptions) => Promise<void>;
|
||||
removeMeta: (key: string, opts?: TransactionOptions) => Promise<void>;
|
||||
getKeys: (base?: string, opts?: GetKeysOptions) => Promise<string[]>;
|
||||
clear: (base?: string, opts?: TransactionOptions) => Promise<void>;
|
||||
dispose: () => Promise<void>;
|
||||
watch: (callback: WatchCallback) => Promise<Unwatch>;
|
||||
unwatch: () => Promise<void>;
|
||||
mount: (base: string, driver: Driver) => Storage;
|
||||
unmount: (base: string, dispose?: boolean) => Promise<void>;
|
||||
getMount: (key?: string) => {
|
||||
base: string;
|
||||
driver: Driver;
|
||||
};
|
||||
getMounts: (base?: string, options?: {
|
||||
parents?: boolean;
|
||||
}) => {
|
||||
base: string;
|
||||
driver: Driver;
|
||||
}[];
|
||||
keys: Storage["getKeys"];
|
||||
get: Storage<T>["getItem"];
|
||||
set: Storage<T>["setItem"];
|
||||
has: Storage<T>["hasItem"];
|
||||
del: Storage<T>["removeItem"];
|
||||
remove: Storage<T>["removeItem"];
|
||||
}
|
||||
|
||||
export type { Driver as D, GetKeysOptions as G, StorageValue as S, TransactionOptions as T, Unwatch as U, WatchEvent as W, Storage as a, WatchCallback as b, StorageMeta as c, DriverFlags as d };
|
151
node_modules/unstorage/dist/shared/unstorage.CoCt7NXC.mjs
generated
vendored
Normal file
151
node_modules/unstorage/dist/shared/unstorage.CoCt7NXC.mjs
generated
vendored
Normal file
@@ -0,0 +1,151 @@
|
||||
function wrapToPromise(value) {
|
||||
if (!value || typeof value.then !== "function") {
|
||||
return Promise.resolve(value);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
function asyncCall(function_, ...arguments_) {
|
||||
try {
|
||||
return wrapToPromise(function_(...arguments_));
|
||||
} catch (error) {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
}
|
||||
function isPrimitive(value) {
|
||||
const type = typeof value;
|
||||
return value === null || type !== "object" && type !== "function";
|
||||
}
|
||||
function isPureObject(value) {
|
||||
const proto = Object.getPrototypeOf(value);
|
||||
return !proto || proto.isPrototypeOf(Object);
|
||||
}
|
||||
function stringify(value) {
|
||||
if (isPrimitive(value)) {
|
||||
return String(value);
|
||||
}
|
||||
if (isPureObject(value) || Array.isArray(value)) {
|
||||
return JSON.stringify(value);
|
||||
}
|
||||
if (typeof value.toJSON === "function") {
|
||||
return stringify(value.toJSON());
|
||||
}
|
||||
throw new Error("[unstorage] Cannot stringify value!");
|
||||
}
|
||||
const BASE64_PREFIX = "base64:";
|
||||
function serializeRaw(value) {
|
||||
if (typeof value === "string") {
|
||||
return value;
|
||||
}
|
||||
return BASE64_PREFIX + base64Encode(value);
|
||||
}
|
||||
function deserializeRaw(value) {
|
||||
if (typeof value !== "string") {
|
||||
return value;
|
||||
}
|
||||
if (!value.startsWith(BASE64_PREFIX)) {
|
||||
return value;
|
||||
}
|
||||
return base64Decode(value.slice(BASE64_PREFIX.length));
|
||||
}
|
||||
function base64Decode(input) {
|
||||
if (globalThis.Buffer) {
|
||||
return Buffer.from(input, "base64");
|
||||
}
|
||||
return Uint8Array.from(
|
||||
globalThis.atob(input),
|
||||
(c) => c.codePointAt(0)
|
||||
);
|
||||
}
|
||||
function base64Encode(input) {
|
||||
if (globalThis.Buffer) {
|
||||
return Buffer.from(input).toString("base64");
|
||||
}
|
||||
return globalThis.btoa(String.fromCodePoint(...input));
|
||||
}
|
||||
|
||||
const storageKeyProperties = [
|
||||
"has",
|
||||
"hasItem",
|
||||
"get",
|
||||
"getItem",
|
||||
"getItemRaw",
|
||||
"set",
|
||||
"setItem",
|
||||
"setItemRaw",
|
||||
"del",
|
||||
"remove",
|
||||
"removeItem",
|
||||
"getMeta",
|
||||
"setMeta",
|
||||
"removeMeta",
|
||||
"getKeys",
|
||||
"clear",
|
||||
"mount",
|
||||
"unmount"
|
||||
];
|
||||
function prefixStorage(storage, base) {
|
||||
base = normalizeBaseKey(base);
|
||||
if (!base) {
|
||||
return storage;
|
||||
}
|
||||
const nsStorage = { ...storage };
|
||||
for (const property of storageKeyProperties) {
|
||||
nsStorage[property] = (key = "", ...args) => (
|
||||
// @ts-ignore
|
||||
storage[property](base + key, ...args)
|
||||
);
|
||||
}
|
||||
nsStorage.getKeys = (key = "", ...arguments_) => storage.getKeys(base + key, ...arguments_).then((keys) => keys.map((key2) => key2.slice(base.length)));
|
||||
nsStorage.getItems = async (items, commonOptions) => {
|
||||
const prefixedItems = items.map(
|
||||
(item) => typeof item === "string" ? base + item : { ...item, key: base + item.key }
|
||||
);
|
||||
const results = await storage.getItems(prefixedItems, commonOptions);
|
||||
return results.map((entry) => ({
|
||||
key: entry.key.slice(base.length),
|
||||
value: entry.value
|
||||
}));
|
||||
};
|
||||
nsStorage.setItems = async (items, commonOptions) => {
|
||||
const prefixedItems = items.map((item) => ({
|
||||
key: base + item.key,
|
||||
value: item.value,
|
||||
options: item.options
|
||||
}));
|
||||
return storage.setItems(prefixedItems, commonOptions);
|
||||
};
|
||||
return nsStorage;
|
||||
}
|
||||
function normalizeKey(key) {
|
||||
if (!key) {
|
||||
return "";
|
||||
}
|
||||
return key.split("?")[0]?.replace(/[/\\]/g, ":").replace(/:+/g, ":").replace(/^:|:$/g, "") || "";
|
||||
}
|
||||
function joinKeys(...keys) {
|
||||
return normalizeKey(keys.join(":"));
|
||||
}
|
||||
function normalizeBaseKey(base) {
|
||||
base = normalizeKey(base);
|
||||
return base ? base + ":" : "";
|
||||
}
|
||||
function filterKeyByDepth(key, depth) {
|
||||
if (depth === void 0) {
|
||||
return true;
|
||||
}
|
||||
let substrCount = 0;
|
||||
let index = key.indexOf(":");
|
||||
while (index > -1) {
|
||||
substrCount++;
|
||||
index = key.indexOf(":", index + 1);
|
||||
}
|
||||
return substrCount <= depth;
|
||||
}
|
||||
function filterKeyByBase(key, base) {
|
||||
if (base) {
|
||||
return key.startsWith(base) && key[key.length - 1] !== "$";
|
||||
}
|
||||
return key[key.length - 1] !== "$";
|
||||
}
|
||||
|
||||
export { normalizeKey as a, asyncCall as b, filterKeyByBase as c, stringify as d, deserializeRaw as e, filterKeyByDepth as f, joinKeys as j, normalizeBaseKey as n, prefixStorage as p, serializeRaw as s };
|
162
node_modules/unstorage/dist/shared/unstorage.DgtRghtF.cjs
generated
vendored
Normal file
162
node_modules/unstorage/dist/shared/unstorage.DgtRghtF.cjs
generated
vendored
Normal file
@@ -0,0 +1,162 @@
|
||||
'use strict';
|
||||
|
||||
function wrapToPromise(value) {
|
||||
if (!value || typeof value.then !== "function") {
|
||||
return Promise.resolve(value);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
function asyncCall(function_, ...arguments_) {
|
||||
try {
|
||||
return wrapToPromise(function_(...arguments_));
|
||||
} catch (error) {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
}
|
||||
function isPrimitive(value) {
|
||||
const type = typeof value;
|
||||
return value === null || type !== "object" && type !== "function";
|
||||
}
|
||||
function isPureObject(value) {
|
||||
const proto = Object.getPrototypeOf(value);
|
||||
return !proto || proto.isPrototypeOf(Object);
|
||||
}
|
||||
function stringify(value) {
|
||||
if (isPrimitive(value)) {
|
||||
return String(value);
|
||||
}
|
||||
if (isPureObject(value) || Array.isArray(value)) {
|
||||
return JSON.stringify(value);
|
||||
}
|
||||
if (typeof value.toJSON === "function") {
|
||||
return stringify(value.toJSON());
|
||||
}
|
||||
throw new Error("[unstorage] Cannot stringify value!");
|
||||
}
|
||||
const BASE64_PREFIX = "base64:";
|
||||
function serializeRaw(value) {
|
||||
if (typeof value === "string") {
|
||||
return value;
|
||||
}
|
||||
return BASE64_PREFIX + base64Encode(value);
|
||||
}
|
||||
function deserializeRaw(value) {
|
||||
if (typeof value !== "string") {
|
||||
return value;
|
||||
}
|
||||
if (!value.startsWith(BASE64_PREFIX)) {
|
||||
return value;
|
||||
}
|
||||
return base64Decode(value.slice(BASE64_PREFIX.length));
|
||||
}
|
||||
function base64Decode(input) {
|
||||
if (globalThis.Buffer) {
|
||||
return Buffer.from(input, "base64");
|
||||
}
|
||||
return Uint8Array.from(
|
||||
globalThis.atob(input),
|
||||
(c) => c.codePointAt(0)
|
||||
);
|
||||
}
|
||||
function base64Encode(input) {
|
||||
if (globalThis.Buffer) {
|
||||
return Buffer.from(input).toString("base64");
|
||||
}
|
||||
return globalThis.btoa(String.fromCodePoint(...input));
|
||||
}
|
||||
|
||||
const storageKeyProperties = [
|
||||
"has",
|
||||
"hasItem",
|
||||
"get",
|
||||
"getItem",
|
||||
"getItemRaw",
|
||||
"set",
|
||||
"setItem",
|
||||
"setItemRaw",
|
||||
"del",
|
||||
"remove",
|
||||
"removeItem",
|
||||
"getMeta",
|
||||
"setMeta",
|
||||
"removeMeta",
|
||||
"getKeys",
|
||||
"clear",
|
||||
"mount",
|
||||
"unmount"
|
||||
];
|
||||
function prefixStorage(storage, base) {
|
||||
base = normalizeBaseKey(base);
|
||||
if (!base) {
|
||||
return storage;
|
||||
}
|
||||
const nsStorage = { ...storage };
|
||||
for (const property of storageKeyProperties) {
|
||||
nsStorage[property] = (key = "", ...args) => (
|
||||
// @ts-ignore
|
||||
storage[property](base + key, ...args)
|
||||
);
|
||||
}
|
||||
nsStorage.getKeys = (key = "", ...arguments_) => storage.getKeys(base + key, ...arguments_).then((keys) => keys.map((key2) => key2.slice(base.length)));
|
||||
nsStorage.getItems = async (items, commonOptions) => {
|
||||
const prefixedItems = items.map(
|
||||
(item) => typeof item === "string" ? base + item : { ...item, key: base + item.key }
|
||||
);
|
||||
const results = await storage.getItems(prefixedItems, commonOptions);
|
||||
return results.map((entry) => ({
|
||||
key: entry.key.slice(base.length),
|
||||
value: entry.value
|
||||
}));
|
||||
};
|
||||
nsStorage.setItems = async (items, commonOptions) => {
|
||||
const prefixedItems = items.map((item) => ({
|
||||
key: base + item.key,
|
||||
value: item.value,
|
||||
options: item.options
|
||||
}));
|
||||
return storage.setItems(prefixedItems, commonOptions);
|
||||
};
|
||||
return nsStorage;
|
||||
}
|
||||
function normalizeKey(key) {
|
||||
if (!key) {
|
||||
return "";
|
||||
}
|
||||
return key.split("?")[0]?.replace(/[/\\]/g, ":").replace(/:+/g, ":").replace(/^:|:$/g, "") || "";
|
||||
}
|
||||
function joinKeys(...keys) {
|
||||
return normalizeKey(keys.join(":"));
|
||||
}
|
||||
function normalizeBaseKey(base) {
|
||||
base = normalizeKey(base);
|
||||
return base ? base + ":" : "";
|
||||
}
|
||||
function filterKeyByDepth(key, depth) {
|
||||
if (depth === void 0) {
|
||||
return true;
|
||||
}
|
||||
let substrCount = 0;
|
||||
let index = key.indexOf(":");
|
||||
while (index > -1) {
|
||||
substrCount++;
|
||||
index = key.indexOf(":", index + 1);
|
||||
}
|
||||
return substrCount <= depth;
|
||||
}
|
||||
function filterKeyByBase(key, base) {
|
||||
if (base) {
|
||||
return key.startsWith(base) && key[key.length - 1] !== "$";
|
||||
}
|
||||
return key[key.length - 1] !== "$";
|
||||
}
|
||||
|
||||
exports.asyncCall = asyncCall;
|
||||
exports.deserializeRaw = deserializeRaw;
|
||||
exports.filterKeyByBase = filterKeyByBase;
|
||||
exports.filterKeyByDepth = filterKeyByDepth;
|
||||
exports.joinKeys = joinKeys;
|
||||
exports.normalizeBaseKey = normalizeBaseKey;
|
||||
exports.normalizeKey = normalizeKey;
|
||||
exports.prefixStorage = prefixStorage;
|
||||
exports.serializeRaw = serializeRaw;
|
||||
exports.stringify = stringify;
|
111
node_modules/unstorage/drivers/azure-app-configuration.cjs
generated
vendored
Normal file
111
node_modules/unstorage/drivers/azure-app-configuration.cjs
generated
vendored
Normal file
@@ -0,0 +1,111 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _appConfiguration = require("@azure/app-configuration");
|
||||
var _identity = require("@azure/identity");
|
||||
const DRIVER_NAME = "azure-app-configuration";
|
||||
module.exports = (0, _utils.defineDriver)((opts = {}) => {
|
||||
const labelFilter = opts.label || "\0";
|
||||
const keyFilter = opts.prefix ? `${opts.prefix}:*` : "*";
|
||||
const p = key => opts.prefix ? `${opts.prefix}:${key}` : key;
|
||||
const d = key => opts.prefix ? key.replace(opts.prefix, "") : key;
|
||||
let client;
|
||||
const getClient = () => {
|
||||
if (client) {
|
||||
return client;
|
||||
}
|
||||
if (!opts.endpoint && !opts.appConfigName && !opts.connectionString) {
|
||||
throw (0, _utils.createRequiredError)(DRIVER_NAME, ["endpoint", "appConfigName", "connectionString"]);
|
||||
}
|
||||
const appConfigEndpoint = opts.endpoint || `https://${opts.appConfigName}.azconfig.io`;
|
||||
if (opts.connectionString) {
|
||||
client = new _appConfiguration.AppConfigurationClient(opts.connectionString);
|
||||
} else {
|
||||
const credential = new _identity.DefaultAzureCredential();
|
||||
client = new _appConfiguration.AppConfigurationClient(appConfigEndpoint, credential);
|
||||
}
|
||||
return client;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: getClient,
|
||||
async hasItem(key) {
|
||||
try {
|
||||
await getClient().getConfigurationSetting({
|
||||
key: p(key),
|
||||
label: opts.label
|
||||
});
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
async getItem(key) {
|
||||
try {
|
||||
const setting = await getClient().getConfigurationSetting({
|
||||
key: p(key),
|
||||
label: opts.label
|
||||
});
|
||||
return setting.value;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
async setItem(key, value) {
|
||||
await getClient().setConfigurationSetting({
|
||||
key: p(key),
|
||||
value,
|
||||
label: opts.label
|
||||
});
|
||||
return;
|
||||
},
|
||||
async removeItem(key) {
|
||||
await getClient().deleteConfigurationSetting({
|
||||
key: p(key),
|
||||
label: opts.label
|
||||
});
|
||||
return;
|
||||
},
|
||||
async getKeys() {
|
||||
const settings = getClient().listConfigurationSettings({
|
||||
keyFilter,
|
||||
labelFilter,
|
||||
fields: ["key", "value", "label"]
|
||||
});
|
||||
const keys = [];
|
||||
for await (const setting of settings) {
|
||||
keys.push(d(setting.key));
|
||||
}
|
||||
return keys;
|
||||
},
|
||||
async getMeta(key) {
|
||||
const setting = await getClient().getConfigurationSetting({
|
||||
key: p(key),
|
||||
label: opts.label
|
||||
});
|
||||
return {
|
||||
mtime: setting.lastModified,
|
||||
etag: setting.etag,
|
||||
tags: setting.tags
|
||||
};
|
||||
},
|
||||
async clear() {
|
||||
const settings = getClient().listConfigurationSettings({
|
||||
keyFilter,
|
||||
labelFilter,
|
||||
fields: ["key", "value", "label"]
|
||||
});
|
||||
for await (const setting of settings) {
|
||||
await getClient().deleteConfigurationSetting({
|
||||
key: setting.key,
|
||||
label: setting.label
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
30
node_modules/unstorage/drivers/azure-app-configuration.d.ts
generated
vendored
Normal file
30
node_modules/unstorage/drivers/azure-app-configuration.d.ts
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
import { AppConfigurationClient } from "@azure/app-configuration";
|
||||
export interface AzureAppConfigurationOptions {
|
||||
/**
|
||||
* Optional prefix for keys. This can be used to isolate keys from different applications in the same Azure App Configuration instance. E.g. "app01" results in keys like "app01:foo" and "app01:bar".
|
||||
* @default null
|
||||
*/
|
||||
prefix?: string;
|
||||
/**
|
||||
* Optional label for keys. If not provided, all keys will be created and listed without labels. This can be used to isolate keys from different environments in the same Azure App Configuration instance. E.g. "dev" results in keys like "foo" and "bar" with the label "dev".
|
||||
* @default '\0'
|
||||
*/
|
||||
label?: string;
|
||||
/**
|
||||
* Optional endpoint to use when connecting to Azure App Configuration. If not provided, the appConfigName option must be provided. If both are provided, the endpoint option takes precedence.
|
||||
* @default null
|
||||
*/
|
||||
endpoint?: string;
|
||||
/**
|
||||
* Optional name of the Azure App Configuration instance to connect to. If not provided, the endpoint option must be provided. If both are provided, the endpoint option takes precedence.
|
||||
* @default null
|
||||
*/
|
||||
appConfigName?: string;
|
||||
/**
|
||||
* Optional connection string to use when connecting to Azure App Configuration. If not provided, the endpoint option must be provided. If both are provided, the endpoint option takes precedence.
|
||||
* @default null
|
||||
*/
|
||||
connectionString?: string;
|
||||
}
|
||||
declare const _default: (opts: AzureAppConfigurationOptions | undefined) => import("..").Driver<AzureAppConfigurationOptions | undefined, AppConfigurationClient>;
|
||||
export default _default;
|
109
node_modules/unstorage/drivers/azure-app-configuration.mjs
generated
vendored
Normal file
109
node_modules/unstorage/drivers/azure-app-configuration.mjs
generated
vendored
Normal file
@@ -0,0 +1,109 @@
|
||||
import { defineDriver, createRequiredError } from "./utils/index.mjs";
|
||||
import { AppConfigurationClient } from "@azure/app-configuration";
|
||||
import { DefaultAzureCredential } from "@azure/identity";
|
||||
const DRIVER_NAME = "azure-app-configuration";
|
||||
export default defineDriver((opts = {}) => {
|
||||
const labelFilter = opts.label || "\0";
|
||||
const keyFilter = opts.prefix ? `${opts.prefix}:*` : "*";
|
||||
const p = (key) => opts.prefix ? `${opts.prefix}:${key}` : key;
|
||||
const d = (key) => opts.prefix ? key.replace(opts.prefix, "") : key;
|
||||
let client;
|
||||
const getClient = () => {
|
||||
if (client) {
|
||||
return client;
|
||||
}
|
||||
if (!opts.endpoint && !opts.appConfigName && !opts.connectionString) {
|
||||
throw createRequiredError(DRIVER_NAME, [
|
||||
"endpoint",
|
||||
"appConfigName",
|
||||
"connectionString"
|
||||
]);
|
||||
}
|
||||
const appConfigEndpoint = opts.endpoint || `https://${opts.appConfigName}.azconfig.io`;
|
||||
if (opts.connectionString) {
|
||||
client = new AppConfigurationClient(opts.connectionString);
|
||||
} else {
|
||||
const credential = new DefaultAzureCredential();
|
||||
client = new AppConfigurationClient(appConfigEndpoint, credential);
|
||||
}
|
||||
return client;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: getClient,
|
||||
async hasItem(key) {
|
||||
try {
|
||||
await getClient().getConfigurationSetting({
|
||||
key: p(key),
|
||||
label: opts.label
|
||||
});
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
async getItem(key) {
|
||||
try {
|
||||
const setting = await getClient().getConfigurationSetting({
|
||||
key: p(key),
|
||||
label: opts.label
|
||||
});
|
||||
return setting.value;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
async setItem(key, value) {
|
||||
await getClient().setConfigurationSetting({
|
||||
key: p(key),
|
||||
value,
|
||||
label: opts.label
|
||||
});
|
||||
return;
|
||||
},
|
||||
async removeItem(key) {
|
||||
await getClient().deleteConfigurationSetting({
|
||||
key: p(key),
|
||||
label: opts.label
|
||||
});
|
||||
return;
|
||||
},
|
||||
async getKeys() {
|
||||
const settings = getClient().listConfigurationSettings({
|
||||
keyFilter,
|
||||
labelFilter,
|
||||
fields: ["key", "value", "label"]
|
||||
});
|
||||
const keys = [];
|
||||
for await (const setting of settings) {
|
||||
keys.push(d(setting.key));
|
||||
}
|
||||
return keys;
|
||||
},
|
||||
async getMeta(key) {
|
||||
const setting = await getClient().getConfigurationSetting({
|
||||
key: p(key),
|
||||
label: opts.label
|
||||
});
|
||||
return {
|
||||
mtime: setting.lastModified,
|
||||
etag: setting.etag,
|
||||
tags: setting.tags
|
||||
};
|
||||
},
|
||||
async clear() {
|
||||
const settings = getClient().listConfigurationSettings({
|
||||
keyFilter,
|
||||
labelFilter,
|
||||
fields: ["key", "value", "label"]
|
||||
});
|
||||
for await (const setting of settings) {
|
||||
await getClient().deleteConfigurationSetting({
|
||||
key: setting.key,
|
||||
label: setting.label
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
103
node_modules/unstorage/drivers/azure-cosmos.cjs
generated
vendored
Normal file
103
node_modules/unstorage/drivers/azure-cosmos.cjs
generated
vendored
Normal file
@@ -0,0 +1,103 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _cosmos = require("@azure/cosmos");
|
||||
var _identity = require("@azure/identity");
|
||||
const DRIVER_NAME = "azure-cosmos";
|
||||
module.exports = (0, _utils.defineDriver)(opts => {
|
||||
let client;
|
||||
const getCosmosClient = async () => {
|
||||
if (client) {
|
||||
return client;
|
||||
}
|
||||
if (!opts.endpoint) {
|
||||
throw (0, _utils.createRequiredError)(DRIVER_NAME, "endpoint");
|
||||
}
|
||||
if (opts.accountKey) {
|
||||
const cosmosClient = new _cosmos.CosmosClient({
|
||||
endpoint: opts.endpoint,
|
||||
key: opts.accountKey
|
||||
});
|
||||
const {
|
||||
database
|
||||
} = await cosmosClient.databases.createIfNotExists({
|
||||
id: opts.databaseName || "unstorage"
|
||||
});
|
||||
const {
|
||||
container
|
||||
} = await database.containers.createIfNotExists({
|
||||
id: opts.containerName || "unstorage"
|
||||
});
|
||||
client = container;
|
||||
} else {
|
||||
const credential = new _identity.DefaultAzureCredential();
|
||||
const cosmosClient = new _cosmos.CosmosClient({
|
||||
endpoint: opts.endpoint,
|
||||
aadCredentials: credential
|
||||
});
|
||||
const {
|
||||
database
|
||||
} = await cosmosClient.databases.createIfNotExists({
|
||||
id: opts.databaseName || "unstorage"
|
||||
});
|
||||
const {
|
||||
container
|
||||
} = await database.containers.createIfNotExists({
|
||||
id: opts.containerName || "unstorage"
|
||||
});
|
||||
client = container;
|
||||
}
|
||||
return client;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: getCosmosClient,
|
||||
async hasItem(key) {
|
||||
const item = await (await getCosmosClient()).item(key).read();
|
||||
return item.resource ? true : false;
|
||||
},
|
||||
async getItem(key) {
|
||||
const item = await (await getCosmosClient()).item(key).read();
|
||||
return item.resource ? item.resource.value : null;
|
||||
},
|
||||
async setItem(key, value) {
|
||||
const modified = /* @__PURE__ */new Date();
|
||||
await (await getCosmosClient()).items.upsert({
|
||||
id: key,
|
||||
value,
|
||||
modified
|
||||
}, {
|
||||
consistencyLevel: "Session"
|
||||
});
|
||||
},
|
||||
async removeItem(key) {
|
||||
await (await getCosmosClient()).item(key).delete({
|
||||
consistencyLevel: "Session"
|
||||
});
|
||||
},
|
||||
async getKeys() {
|
||||
const iterator = (await getCosmosClient()).items.query(`SELECT { id } from c`);
|
||||
return (await iterator.fetchAll()).resources.map(item => item.id);
|
||||
},
|
||||
async getMeta(key) {
|
||||
const item = await (await getCosmosClient()).item(key).read();
|
||||
return {
|
||||
mtime: item.resource?.modified ? new Date(item.resource.modified) : void 0
|
||||
};
|
||||
},
|
||||
async clear() {
|
||||
const iterator = (await getCosmosClient()).items.query(`SELECT { id } from c`);
|
||||
const items = (await iterator.fetchAll()).resources;
|
||||
for (const item of items) {
|
||||
await (await getCosmosClient()).item(item.id).delete({
|
||||
consistencyLevel: "Session"
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
37
node_modules/unstorage/drivers/azure-cosmos.d.ts
generated
vendored
Normal file
37
node_modules/unstorage/drivers/azure-cosmos.d.ts
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
import { Container } from "@azure/cosmos";
|
||||
export interface AzureCosmosOptions {
|
||||
/**
|
||||
* CosmosDB endpoint in the format of https://<account>.documents.azure.com:443/.
|
||||
*/
|
||||
endpoint: string;
|
||||
/**
|
||||
* CosmosDB account key. If not provided, the driver will use the DefaultAzureCredential (recommended).
|
||||
*/
|
||||
accountKey?: string;
|
||||
/**
|
||||
* The name of the database to use. Defaults to `unstorage`.
|
||||
* @default "unstorage"
|
||||
*/
|
||||
databaseName?: string;
|
||||
/**
|
||||
* The name of the container to use. Defaults to `unstorage`.
|
||||
* @default "unstorage"
|
||||
*/
|
||||
containerName?: string;
|
||||
}
|
||||
export interface AzureCosmosItem {
|
||||
/**
|
||||
* The unstorage key as id of the item.
|
||||
*/
|
||||
id: string;
|
||||
/**
|
||||
* The unstorage value of the item.
|
||||
*/
|
||||
value: string;
|
||||
/**
|
||||
* The unstorage mtime metadata of the item.
|
||||
*/
|
||||
modified: string | Date;
|
||||
}
|
||||
declare const _default: (opts: AzureCosmosOptions) => import("..").Driver<AzureCosmosOptions, Promise<Container>>;
|
||||
export default _default;
|
86
node_modules/unstorage/drivers/azure-cosmos.mjs
generated
vendored
Normal file
86
node_modules/unstorage/drivers/azure-cosmos.mjs
generated
vendored
Normal file
@@ -0,0 +1,86 @@
|
||||
import { createRequiredError, defineDriver } from "./utils/index.mjs";
|
||||
import { CosmosClient } from "@azure/cosmos";
|
||||
import { DefaultAzureCredential } from "@azure/identity";
|
||||
const DRIVER_NAME = "azure-cosmos";
|
||||
export default defineDriver((opts) => {
|
||||
let client;
|
||||
const getCosmosClient = async () => {
|
||||
if (client) {
|
||||
return client;
|
||||
}
|
||||
if (!opts.endpoint) {
|
||||
throw createRequiredError(DRIVER_NAME, "endpoint");
|
||||
}
|
||||
if (opts.accountKey) {
|
||||
const cosmosClient = new CosmosClient({
|
||||
endpoint: opts.endpoint,
|
||||
key: opts.accountKey
|
||||
});
|
||||
const { database } = await cosmosClient.databases.createIfNotExists({
|
||||
id: opts.databaseName || "unstorage"
|
||||
});
|
||||
const { container } = await database.containers.createIfNotExists({
|
||||
id: opts.containerName || "unstorage"
|
||||
});
|
||||
client = container;
|
||||
} else {
|
||||
const credential = new DefaultAzureCredential();
|
||||
const cosmosClient = new CosmosClient({
|
||||
endpoint: opts.endpoint,
|
||||
aadCredentials: credential
|
||||
});
|
||||
const { database } = await cosmosClient.databases.createIfNotExists({
|
||||
id: opts.databaseName || "unstorage"
|
||||
});
|
||||
const { container } = await database.containers.createIfNotExists({
|
||||
id: opts.containerName || "unstorage"
|
||||
});
|
||||
client = container;
|
||||
}
|
||||
return client;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: getCosmosClient,
|
||||
async hasItem(key) {
|
||||
const item = await (await getCosmosClient()).item(key).read();
|
||||
return item.resource ? true : false;
|
||||
},
|
||||
async getItem(key) {
|
||||
const item = await (await getCosmosClient()).item(key).read();
|
||||
return item.resource ? item.resource.value : null;
|
||||
},
|
||||
async setItem(key, value) {
|
||||
const modified = /* @__PURE__ */ new Date();
|
||||
await (await getCosmosClient()).items.upsert(
|
||||
{ id: key, value, modified },
|
||||
{ consistencyLevel: "Session" }
|
||||
);
|
||||
},
|
||||
async removeItem(key) {
|
||||
await (await getCosmosClient()).item(key).delete({ consistencyLevel: "Session" });
|
||||
},
|
||||
async getKeys() {
|
||||
const iterator = (await getCosmosClient()).items.query(
|
||||
`SELECT { id } from c`
|
||||
);
|
||||
return (await iterator.fetchAll()).resources.map((item) => item.id);
|
||||
},
|
||||
async getMeta(key) {
|
||||
const item = await (await getCosmosClient()).item(key).read();
|
||||
return {
|
||||
mtime: item.resource?.modified ? new Date(item.resource.modified) : void 0
|
||||
};
|
||||
},
|
||||
async clear() {
|
||||
const iterator = (await getCosmosClient()).items.query(
|
||||
`SELECT { id } from c`
|
||||
);
|
||||
const items = (await iterator.fetchAll()).resources;
|
||||
for (const item of items) {
|
||||
await (await getCosmosClient()).item(item.id).delete({ consistencyLevel: "Session" });
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
116
node_modules/unstorage/drivers/azure-key-vault.cjs
generated
vendored
Normal file
116
node_modules/unstorage/drivers/azure-key-vault.cjs
generated
vendored
Normal file
@@ -0,0 +1,116 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _keyvaultSecrets = require("@azure/keyvault-secrets");
|
||||
var _identity = require("@azure/identity");
|
||||
const DRIVER_NAME = "azure-key-vault";
|
||||
module.exports = (0, _utils.defineDriver)(opts => {
|
||||
let keyVaultClient;
|
||||
const getKeyVaultClient = () => {
|
||||
if (keyVaultClient) {
|
||||
return keyVaultClient;
|
||||
}
|
||||
const {
|
||||
vaultName = null,
|
||||
serviceVersion = "7.3",
|
||||
pageSize = 25
|
||||
} = opts;
|
||||
if (!vaultName) {
|
||||
throw (0, _utils.createRequiredError)(DRIVER_NAME, "vaultName");
|
||||
}
|
||||
if (pageSize > 25) {
|
||||
throw (0, _utils.createError)(DRIVER_NAME, "`pageSize` cannot be greater than `25`");
|
||||
}
|
||||
const credential = new _identity.DefaultAzureCredential();
|
||||
const url = `https://${vaultName}.vault.azure.net`;
|
||||
keyVaultClient = new _keyvaultSecrets.SecretClient(url, credential, {
|
||||
serviceVersion
|
||||
});
|
||||
return keyVaultClient;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: getKeyVaultClient,
|
||||
async hasItem(key) {
|
||||
try {
|
||||
await getKeyVaultClient().getSecret(encode(key));
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
async getItem(key) {
|
||||
try {
|
||||
const secret = await getKeyVaultClient().getSecret(encode(key));
|
||||
return secret.value;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
async setItem(key, value) {
|
||||
await getKeyVaultClient().setSecret(encode(key), value);
|
||||
},
|
||||
async removeItem(key) {
|
||||
const poller = await getKeyVaultClient().beginDeleteSecret(encode(key));
|
||||
await poller.pollUntilDone();
|
||||
await getKeyVaultClient().purgeDeletedSecret(encode(key));
|
||||
},
|
||||
async getKeys() {
|
||||
const secrets = getKeyVaultClient().listPropertiesOfSecrets().byPage({
|
||||
maxPageSize: opts.pageSize || 25
|
||||
});
|
||||
const keys = [];
|
||||
for await (const page of secrets) {
|
||||
const pageKeys = page.map(secret => decode(secret.name));
|
||||
keys.push(...pageKeys);
|
||||
}
|
||||
return keys;
|
||||
},
|
||||
async getMeta(key) {
|
||||
const secret = await getKeyVaultClient().getSecret(encode(key));
|
||||
return {
|
||||
mtime: secret.properties.updatedOn,
|
||||
birthtime: secret.properties.createdOn,
|
||||
expireTime: secret.properties.expiresOn
|
||||
};
|
||||
},
|
||||
async clear() {
|
||||
const secrets = getKeyVaultClient().listPropertiesOfSecrets().byPage({
|
||||
maxPageSize: opts.pageSize || 25
|
||||
});
|
||||
for await (const page of secrets) {
|
||||
const deletionPromises = page.map(async secret => {
|
||||
const poller = await getKeyVaultClient().beginDeleteSecret(secret.name);
|
||||
await poller.pollUntilDone();
|
||||
await getKeyVaultClient().purgeDeletedSecret(secret.name);
|
||||
});
|
||||
await Promise.all(deletionPromises);
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
||||
const base64Map = {
|
||||
"=": "-e-",
|
||||
"+": "-p-",
|
||||
"/": "-s-"
|
||||
};
|
||||
function encode(value) {
|
||||
let encoded = Buffer.from(value).toString("base64");
|
||||
for (const key in base64Map) {
|
||||
encoded = encoded.replace(new RegExp(key.replace(/[$()*+.?[\\\]^{|}]/g, "\\$&"), "g"), base64Map[key]);
|
||||
}
|
||||
return encoded;
|
||||
}
|
||||
function decode(value) {
|
||||
let decoded = value;
|
||||
const search = new RegExp(Object.values(base64Map).join("|"), "g");
|
||||
decoded = decoded.replace(search, match => {
|
||||
return Object.keys(base64Map).find(key => base64Map[key] === match);
|
||||
});
|
||||
return Buffer.from(decoded, "base64").toString();
|
||||
}
|
19
node_modules/unstorage/drivers/azure-key-vault.d.ts
generated
vendored
Normal file
19
node_modules/unstorage/drivers/azure-key-vault.d.ts
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
import { SecretClient, type SecretClientOptions } from "@azure/keyvault-secrets";
|
||||
export interface AzureKeyVaultOptions {
|
||||
/**
|
||||
* The name of the key vault to use.
|
||||
*/
|
||||
vaultName: string;
|
||||
/**
|
||||
* Version of the Azure Key Vault service to use. Defaults to 7.3.
|
||||
* @default '7.3'
|
||||
*/
|
||||
serviceVersion?: SecretClientOptions["serviceVersion"];
|
||||
/**
|
||||
* The number of entries to retrieve per request. Impacts getKeys() and clear() performance. Maximum value is 25.
|
||||
* @default 25
|
||||
*/
|
||||
pageSize?: number;
|
||||
}
|
||||
declare const _default: (opts: AzureKeyVaultOptions) => import("..").Driver<AzureKeyVaultOptions, SecretClient>;
|
||||
export default _default;
|
107
node_modules/unstorage/drivers/azure-key-vault.mjs
generated
vendored
Normal file
107
node_modules/unstorage/drivers/azure-key-vault.mjs
generated
vendored
Normal file
@@ -0,0 +1,107 @@
|
||||
import { createError, createRequiredError, defineDriver } from "./utils/index.mjs";
|
||||
import {
|
||||
SecretClient
|
||||
} from "@azure/keyvault-secrets";
|
||||
import { DefaultAzureCredential } from "@azure/identity";
|
||||
const DRIVER_NAME = "azure-key-vault";
|
||||
export default defineDriver((opts) => {
|
||||
let keyVaultClient;
|
||||
const getKeyVaultClient = () => {
|
||||
if (keyVaultClient) {
|
||||
return keyVaultClient;
|
||||
}
|
||||
const { vaultName = null, serviceVersion = "7.3", pageSize = 25 } = opts;
|
||||
if (!vaultName) {
|
||||
throw createRequiredError(DRIVER_NAME, "vaultName");
|
||||
}
|
||||
if (pageSize > 25) {
|
||||
throw createError(DRIVER_NAME, "`pageSize` cannot be greater than `25`");
|
||||
}
|
||||
const credential = new DefaultAzureCredential();
|
||||
const url = `https://${vaultName}.vault.azure.net`;
|
||||
keyVaultClient = new SecretClient(url, credential, { serviceVersion });
|
||||
return keyVaultClient;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: getKeyVaultClient,
|
||||
async hasItem(key) {
|
||||
try {
|
||||
await getKeyVaultClient().getSecret(encode(key));
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
async getItem(key) {
|
||||
try {
|
||||
const secret = await getKeyVaultClient().getSecret(encode(key));
|
||||
return secret.value;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
async setItem(key, value) {
|
||||
await getKeyVaultClient().setSecret(encode(key), value);
|
||||
},
|
||||
async removeItem(key) {
|
||||
const poller = await getKeyVaultClient().beginDeleteSecret(encode(key));
|
||||
await poller.pollUntilDone();
|
||||
await getKeyVaultClient().purgeDeletedSecret(encode(key));
|
||||
},
|
||||
async getKeys() {
|
||||
const secrets = getKeyVaultClient().listPropertiesOfSecrets().byPage({ maxPageSize: opts.pageSize || 25 });
|
||||
const keys = [];
|
||||
for await (const page of secrets) {
|
||||
const pageKeys = page.map((secret) => decode(secret.name));
|
||||
keys.push(...pageKeys);
|
||||
}
|
||||
return keys;
|
||||
},
|
||||
async getMeta(key) {
|
||||
const secret = await getKeyVaultClient().getSecret(encode(key));
|
||||
return {
|
||||
mtime: secret.properties.updatedOn,
|
||||
birthtime: secret.properties.createdOn,
|
||||
expireTime: secret.properties.expiresOn
|
||||
};
|
||||
},
|
||||
async clear() {
|
||||
const secrets = getKeyVaultClient().listPropertiesOfSecrets().byPage({ maxPageSize: opts.pageSize || 25 });
|
||||
for await (const page of secrets) {
|
||||
const deletionPromises = page.map(async (secret) => {
|
||||
const poller = await getKeyVaultClient().beginDeleteSecret(
|
||||
secret.name
|
||||
);
|
||||
await poller.pollUntilDone();
|
||||
await getKeyVaultClient().purgeDeletedSecret(secret.name);
|
||||
});
|
||||
await Promise.all(deletionPromises);
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
||||
const base64Map = {
|
||||
"=": "-e-",
|
||||
"+": "-p-",
|
||||
"/": "-s-"
|
||||
};
|
||||
function encode(value) {
|
||||
let encoded = Buffer.from(value).toString("base64");
|
||||
for (const key in base64Map) {
|
||||
encoded = encoded.replace(
|
||||
new RegExp(key.replace(/[$()*+.?[\\\]^{|}]/g, "\\$&"), "g"),
|
||||
base64Map[key]
|
||||
);
|
||||
}
|
||||
return encoded;
|
||||
}
|
||||
function decode(value) {
|
||||
let decoded = value;
|
||||
const search = new RegExp(Object.values(base64Map).join("|"), "g");
|
||||
decoded = decoded.replace(search, (match) => {
|
||||
return Object.keys(base64Map).find((key) => base64Map[key] === match);
|
||||
});
|
||||
return Buffer.from(decoded, "base64").toString();
|
||||
}
|
141
node_modules/unstorage/drivers/azure-storage-blob.cjs
generated
vendored
Normal file
141
node_modules/unstorage/drivers/azure-storage-blob.cjs
generated
vendored
Normal file
@@ -0,0 +1,141 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _storageBlob = require("@azure/storage-blob");
|
||||
var _identity = require("@azure/identity");
|
||||
const DRIVER_NAME = "azure-storage-blob";
|
||||
module.exports = (0, _utils.defineDriver)(opts => {
|
||||
let containerClient;
|
||||
const endpointSuffix = opts.endpointSuffix || ".blob.core.windows.net";
|
||||
const getContainerClient = () => {
|
||||
if (containerClient) {
|
||||
return containerClient;
|
||||
}
|
||||
if (!opts.connectionString && !opts.sasUrl && !opts.accountName) {
|
||||
throw (0, _utils.createError)(DRIVER_NAME, "missing accountName");
|
||||
}
|
||||
let serviceClient;
|
||||
if (opts.accountKey) {
|
||||
const credential = new _storageBlob.StorageSharedKeyCredential(opts.accountName, opts.accountKey);
|
||||
serviceClient = new _storageBlob.BlobServiceClient(`https://${opts.accountName}${endpointSuffix}`, credential);
|
||||
} else if (opts.sasUrl) {
|
||||
if (opts.containerName && opts.sasUrl.includes(`${opts.containerName}?`)) {
|
||||
containerClient = new _storageBlob.ContainerClient(`${opts.sasUrl}`);
|
||||
return containerClient;
|
||||
}
|
||||
serviceClient = new _storageBlob.BlobServiceClient(opts.sasUrl);
|
||||
} else if (opts.sasKey) {
|
||||
if (opts.containerName) {
|
||||
containerClient = new _storageBlob.ContainerClient(`https://${opts.accountName}${endpointSuffix}/${opts.containerName}?${opts.sasKey}`);
|
||||
return containerClient;
|
||||
}
|
||||
serviceClient = new _storageBlob.BlobServiceClient(`https://${opts.accountName}${endpointSuffix}?${opts.sasKey}`);
|
||||
} else if (opts.connectionString) {
|
||||
serviceClient = _storageBlob.BlobServiceClient.fromConnectionString(opts.connectionString);
|
||||
} else {
|
||||
const credential = new _identity.DefaultAzureCredential();
|
||||
serviceClient = new _storageBlob.BlobServiceClient(`https://${opts.accountName}${endpointSuffix}`, credential);
|
||||
}
|
||||
containerClient = serviceClient.getContainerClient(opts.containerName || "unstorage");
|
||||
containerClient.createIfNotExists();
|
||||
return containerClient;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: getContainerClient,
|
||||
async hasItem(key) {
|
||||
return await getContainerClient().getBlockBlobClient(key).exists();
|
||||
},
|
||||
async getItem(key) {
|
||||
try {
|
||||
const blob = await getContainerClient().getBlockBlobClient(key).download();
|
||||
if (isBrowser) {
|
||||
return blob.blobBody ? await blobToString(await blob.blobBody) : null;
|
||||
}
|
||||
return blob.readableStreamBody ? (await streamToBuffer(blob.readableStreamBody)).toString() : null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
async getItemRaw(key) {
|
||||
try {
|
||||
const blob = await getContainerClient().getBlockBlobClient(key).download();
|
||||
if (isBrowser) {
|
||||
return blob.blobBody ? await blobToString(await blob.blobBody) : null;
|
||||
}
|
||||
return blob.readableStreamBody ? await streamToBuffer(blob.readableStreamBody) : null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
async setItem(key, value) {
|
||||
await getContainerClient().getBlockBlobClient(key).upload(value, Buffer.byteLength(value));
|
||||
},
|
||||
async setItemRaw(key, value) {
|
||||
await getContainerClient().getBlockBlobClient(key).upload(value, Buffer.byteLength(value));
|
||||
},
|
||||
async removeItem(key) {
|
||||
await getContainerClient().getBlockBlobClient(key).deleteIfExists({
|
||||
deleteSnapshots: "include"
|
||||
});
|
||||
},
|
||||
async getKeys() {
|
||||
const iterator = getContainerClient().listBlobsFlat().byPage({
|
||||
maxPageSize: 1e3
|
||||
});
|
||||
const keys = [];
|
||||
for await (const page of iterator) {
|
||||
const pageKeys = page.segment.blobItems.map(blob => blob.name);
|
||||
keys.push(...pageKeys);
|
||||
}
|
||||
return keys;
|
||||
},
|
||||
async getMeta(key) {
|
||||
const blobProperties = await getContainerClient().getBlockBlobClient(key).getProperties();
|
||||
return {
|
||||
mtime: blobProperties.lastModified,
|
||||
atime: blobProperties.lastAccessed,
|
||||
cr: blobProperties.createdOn,
|
||||
...blobProperties.metadata
|
||||
};
|
||||
},
|
||||
async clear() {
|
||||
const iterator = getContainerClient().listBlobsFlat().byPage({
|
||||
maxPageSize: 1e3
|
||||
});
|
||||
for await (const page of iterator) {
|
||||
await Promise.all(page.segment.blobItems.map(async blob => await getContainerClient().deleteBlob(blob.name, {
|
||||
deleteSnapshots: "include"
|
||||
})));
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
||||
const isBrowser = typeof window !== "undefined";
|
||||
async function streamToBuffer(readableStream) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const chunks = [];
|
||||
readableStream.on("data", data => {
|
||||
chunks.push(data instanceof Buffer ? data : Buffer.from(data));
|
||||
});
|
||||
readableStream.on("end", () => {
|
||||
resolve(Buffer.concat(chunks));
|
||||
});
|
||||
readableStream.on("error", reject);
|
||||
});
|
||||
}
|
||||
async function blobToString(blob) {
|
||||
const fileReader = new FileReader();
|
||||
return new Promise((resolve, reject) => {
|
||||
fileReader.onloadend = ev => {
|
||||
resolve(ev.target?.result);
|
||||
};
|
||||
fileReader.onerror = reject;
|
||||
fileReader.readAsText(blob);
|
||||
});
|
||||
}
|
35
node_modules/unstorage/drivers/azure-storage-blob.d.ts
generated
vendored
Normal file
35
node_modules/unstorage/drivers/azure-storage-blob.d.ts
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
import { ContainerClient } from "@azure/storage-blob";
|
||||
export interface AzureStorageBlobOptions {
|
||||
/**
|
||||
* The name of the Azure Storage account.
|
||||
*/
|
||||
accountName?: string;
|
||||
/**
|
||||
* The name of the storage container. All entities will be stored in the same container. Will be created if it doesn't exist.
|
||||
* @default "unstorage"
|
||||
*/
|
||||
containerName?: string;
|
||||
/**
|
||||
* The account key. If provided, the SAS key will be ignored. Only available in Node.js runtime.
|
||||
*/
|
||||
accountKey?: string;
|
||||
/**
|
||||
* The SAS token. If provided, the account key will be ignored. Include at least read, list and write permissions to be able to list keys.
|
||||
*/
|
||||
sasKey?: string;
|
||||
/**
|
||||
* The SAS URL. If provided, the account key, SAS key and container name will be ignored.
|
||||
*/
|
||||
sasUrl?: string;
|
||||
/**
|
||||
* The connection string. If provided, the account key and SAS key will be ignored. Only available in Node.js runtime.
|
||||
*/
|
||||
connectionString?: string;
|
||||
/**
|
||||
* Storage account endpoint suffix. Need to be changed for Microsoft Azure operated by 21Vianet, Azure Government or Azurite.
|
||||
* @default ".blob.core.windows.net"
|
||||
*/
|
||||
endpointSuffix?: string;
|
||||
}
|
||||
declare const _default: (opts: AzureStorageBlobOptions) => import("..").Driver<AzureStorageBlobOptions, ContainerClient>;
|
||||
export default _default;
|
154
node_modules/unstorage/drivers/azure-storage-blob.mjs
generated
vendored
Normal file
154
node_modules/unstorage/drivers/azure-storage-blob.mjs
generated
vendored
Normal file
@@ -0,0 +1,154 @@
|
||||
import { createError, defineDriver } from "./utils/index.mjs";
|
||||
import {
|
||||
BlobServiceClient,
|
||||
ContainerClient,
|
||||
StorageSharedKeyCredential
|
||||
} from "@azure/storage-blob";
|
||||
import { DefaultAzureCredential } from "@azure/identity";
|
||||
const DRIVER_NAME = "azure-storage-blob";
|
||||
export default defineDriver((opts) => {
|
||||
let containerClient;
|
||||
const endpointSuffix = opts.endpointSuffix || ".blob.core.windows.net";
|
||||
const getContainerClient = () => {
|
||||
if (containerClient) {
|
||||
return containerClient;
|
||||
}
|
||||
if (!opts.connectionString && !opts.sasUrl && !opts.accountName) {
|
||||
throw createError(DRIVER_NAME, "missing accountName");
|
||||
}
|
||||
let serviceClient;
|
||||
if (opts.accountKey) {
|
||||
const credential = new StorageSharedKeyCredential(
|
||||
opts.accountName,
|
||||
opts.accountKey
|
||||
);
|
||||
serviceClient = new BlobServiceClient(
|
||||
`https://${opts.accountName}${endpointSuffix}`,
|
||||
credential
|
||||
);
|
||||
} else if (opts.sasUrl) {
|
||||
if (opts.containerName && opts.sasUrl.includes(`${opts.containerName}?`)) {
|
||||
containerClient = new ContainerClient(`${opts.sasUrl}`);
|
||||
return containerClient;
|
||||
}
|
||||
serviceClient = new BlobServiceClient(opts.sasUrl);
|
||||
} else if (opts.sasKey) {
|
||||
if (opts.containerName) {
|
||||
containerClient = new ContainerClient(
|
||||
`https://${opts.accountName}${endpointSuffix}/${opts.containerName}?${opts.sasKey}`
|
||||
);
|
||||
return containerClient;
|
||||
}
|
||||
serviceClient = new BlobServiceClient(
|
||||
`https://${opts.accountName}${endpointSuffix}?${opts.sasKey}`
|
||||
);
|
||||
} else if (opts.connectionString) {
|
||||
serviceClient = BlobServiceClient.fromConnectionString(
|
||||
opts.connectionString
|
||||
);
|
||||
} else {
|
||||
const credential = new DefaultAzureCredential();
|
||||
serviceClient = new BlobServiceClient(
|
||||
`https://${opts.accountName}${endpointSuffix}`,
|
||||
credential
|
||||
);
|
||||
}
|
||||
containerClient = serviceClient.getContainerClient(
|
||||
opts.containerName || "unstorage"
|
||||
);
|
||||
containerClient.createIfNotExists();
|
||||
return containerClient;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: getContainerClient,
|
||||
async hasItem(key) {
|
||||
return await getContainerClient().getBlockBlobClient(key).exists();
|
||||
},
|
||||
async getItem(key) {
|
||||
try {
|
||||
const blob = await getContainerClient().getBlockBlobClient(key).download();
|
||||
if (isBrowser) {
|
||||
return blob.blobBody ? await blobToString(await blob.blobBody) : null;
|
||||
}
|
||||
return blob.readableStreamBody ? (await streamToBuffer(blob.readableStreamBody)).toString() : null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
async getItemRaw(key) {
|
||||
try {
|
||||
const blob = await getContainerClient().getBlockBlobClient(key).download();
|
||||
if (isBrowser) {
|
||||
return blob.blobBody ? await blobToString(await blob.blobBody) : null;
|
||||
}
|
||||
return blob.readableStreamBody ? await streamToBuffer(blob.readableStreamBody) : null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
async setItem(key, value) {
|
||||
await getContainerClient().getBlockBlobClient(key).upload(value, Buffer.byteLength(value));
|
||||
},
|
||||
async setItemRaw(key, value) {
|
||||
await getContainerClient().getBlockBlobClient(key).upload(value, Buffer.byteLength(value));
|
||||
},
|
||||
async removeItem(key) {
|
||||
await getContainerClient().getBlockBlobClient(key).deleteIfExists({ deleteSnapshots: "include" });
|
||||
},
|
||||
async getKeys() {
|
||||
const iterator = getContainerClient().listBlobsFlat().byPage({ maxPageSize: 1e3 });
|
||||
const keys = [];
|
||||
for await (const page of iterator) {
|
||||
const pageKeys = page.segment.blobItems.map((blob) => blob.name);
|
||||
keys.push(...pageKeys);
|
||||
}
|
||||
return keys;
|
||||
},
|
||||
async getMeta(key) {
|
||||
const blobProperties = await getContainerClient().getBlockBlobClient(key).getProperties();
|
||||
return {
|
||||
mtime: blobProperties.lastModified,
|
||||
atime: blobProperties.lastAccessed,
|
||||
cr: blobProperties.createdOn,
|
||||
...blobProperties.metadata
|
||||
};
|
||||
},
|
||||
async clear() {
|
||||
const iterator = getContainerClient().listBlobsFlat().byPage({ maxPageSize: 1e3 });
|
||||
for await (const page of iterator) {
|
||||
await Promise.all(
|
||||
page.segment.blobItems.map(
|
||||
async (blob) => await getContainerClient().deleteBlob(blob.name, {
|
||||
deleteSnapshots: "include"
|
||||
})
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
||||
const isBrowser = typeof window !== "undefined";
|
||||
async function streamToBuffer(readableStream) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const chunks = [];
|
||||
readableStream.on("data", (data) => {
|
||||
chunks.push(data instanceof Buffer ? data : Buffer.from(data));
|
||||
});
|
||||
readableStream.on("end", () => {
|
||||
resolve(Buffer.concat(chunks));
|
||||
});
|
||||
readableStream.on("error", reject);
|
||||
});
|
||||
}
|
||||
async function blobToString(blob) {
|
||||
const fileReader = new FileReader();
|
||||
return new Promise((resolve, reject) => {
|
||||
fileReader.onloadend = (ev) => {
|
||||
resolve(ev.target?.result);
|
||||
};
|
||||
fileReader.onerror = reject;
|
||||
fileReader.readAsText(blob);
|
||||
});
|
||||
}
|
108
node_modules/unstorage/drivers/azure-storage-table.cjs
generated
vendored
Normal file
108
node_modules/unstorage/drivers/azure-storage-table.cjs
generated
vendored
Normal file
@@ -0,0 +1,108 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _dataTables = require("@azure/data-tables");
|
||||
var _identity = require("@azure/identity");
|
||||
const DRIVER_NAME = "azure-storage-table";
|
||||
module.exports = (0, _utils.defineDriver)(opts => {
|
||||
const {
|
||||
accountName = null,
|
||||
tableName = "unstorage",
|
||||
partitionKey = "unstorage",
|
||||
accountKey = null,
|
||||
sasKey = null,
|
||||
connectionString = null,
|
||||
pageSize = 1e3
|
||||
} = opts;
|
||||
let client;
|
||||
const getClient = () => {
|
||||
if (client) {
|
||||
return client;
|
||||
}
|
||||
if (!accountName) {
|
||||
throw (0, _utils.createRequiredError)(DRIVER_NAME, "accountName");
|
||||
}
|
||||
if (pageSize > 1e3) {
|
||||
throw (0, _utils.createError)(DRIVER_NAME, "`pageSize` exceeds the maximum allowed value of `1000`");
|
||||
}
|
||||
if (accountKey) {
|
||||
const credential = new _dataTables.AzureNamedKeyCredential(accountName, accountKey);
|
||||
client = new _dataTables.TableClient(`https://${accountName}.table.core.windows.net`, tableName, credential);
|
||||
} else if (sasKey) {
|
||||
const credential = new _dataTables.AzureSASCredential(sasKey);
|
||||
client = new _dataTables.TableClient(`https://${accountName}.table.core.windows.net`, tableName, credential);
|
||||
} else if (connectionString) {
|
||||
client = _dataTables.TableClient.fromConnectionString(connectionString, tableName);
|
||||
} else {
|
||||
const credential = new _identity.DefaultAzureCredential();
|
||||
client = new _dataTables.TableClient(`https://${accountName}.table.core.windows.net`, tableName, credential);
|
||||
}
|
||||
return client;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: getClient,
|
||||
async hasItem(key) {
|
||||
try {
|
||||
await getClient().getEntity(partitionKey, key);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
async getItem(key) {
|
||||
try {
|
||||
const entity = await getClient().getEntity(partitionKey, key);
|
||||
return entity.unstorageValue;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
async setItem(key, value) {
|
||||
const entity = {
|
||||
partitionKey,
|
||||
rowKey: key,
|
||||
unstorageValue: value
|
||||
};
|
||||
await getClient().upsertEntity(entity, "Replace");
|
||||
},
|
||||
async removeItem(key) {
|
||||
await getClient().deleteEntity(partitionKey, key);
|
||||
},
|
||||
async getKeys() {
|
||||
const iterator = getClient().listEntities().byPage({
|
||||
maxPageSize: pageSize
|
||||
});
|
||||
const keys = [];
|
||||
for await (const page of iterator) {
|
||||
const pageKeys = page.map(entity => entity.rowKey).filter(Boolean);
|
||||
keys.push(...pageKeys);
|
||||
}
|
||||
return keys;
|
||||
},
|
||||
async getMeta(key) {
|
||||
const entity = await getClient().getEntity(partitionKey, key);
|
||||
return {
|
||||
mtime: entity.timestamp ? new Date(entity.timestamp) : void 0,
|
||||
etag: entity.etag
|
||||
};
|
||||
},
|
||||
async clear() {
|
||||
const iterator = getClient().listEntities().byPage({
|
||||
maxPageSize: pageSize
|
||||
});
|
||||
for await (const page of iterator) {
|
||||
await Promise.all(page.map(async entity => {
|
||||
if (entity.partitionKey && entity.rowKey) {
|
||||
await getClient().deleteEntity(entity.partitionKey, entity.rowKey);
|
||||
}
|
||||
}));
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
36
node_modules/unstorage/drivers/azure-storage-table.d.ts
generated
vendored
Normal file
36
node_modules/unstorage/drivers/azure-storage-table.d.ts
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
import { TableClient } from "@azure/data-tables";
|
||||
export interface AzureStorageTableOptions {
|
||||
/**
|
||||
* The name of the Azure Storage account.
|
||||
*/
|
||||
accountName: string;
|
||||
/**
|
||||
* The name of the table. All entities will be stored in the same table.
|
||||
* @default 'unstorage'
|
||||
*/
|
||||
tableName?: string;
|
||||
/**
|
||||
* The partition key. All entities will be stored in the same partition.
|
||||
* @default 'unstorage'
|
||||
*/
|
||||
partitionKey?: string;
|
||||
/**
|
||||
* The account key. If provided, the SAS key will be ignored. Only available in Node.js runtime.
|
||||
*/
|
||||
accountKey?: string;
|
||||
/**
|
||||
* The SAS key. If provided, the account key will be ignored.
|
||||
*/
|
||||
sasKey?: string;
|
||||
/**
|
||||
* The connection string. If provided, the account key and SAS key will be ignored. Only available in Node.js runtime.
|
||||
*/
|
||||
connectionString?: string;
|
||||
/**
|
||||
* The number of entries to retrive per request. Impacts getKeys() and clear() performance. Maximum value is 1000.
|
||||
* @default 1000
|
||||
*/
|
||||
pageSize?: number;
|
||||
}
|
||||
declare const _default: (opts: AzureStorageTableOptions) => import("..").Driver<AzureStorageTableOptions, TableClient>;
|
||||
export default _default;
|
122
node_modules/unstorage/drivers/azure-storage-table.mjs
generated
vendored
Normal file
122
node_modules/unstorage/drivers/azure-storage-table.mjs
generated
vendored
Normal file
@@ -0,0 +1,122 @@
|
||||
import { createError, createRequiredError, defineDriver } from "./utils/index.mjs";
|
||||
import {
|
||||
TableClient,
|
||||
AzureNamedKeyCredential,
|
||||
AzureSASCredential
|
||||
} from "@azure/data-tables";
|
||||
import { DefaultAzureCredential } from "@azure/identity";
|
||||
const DRIVER_NAME = "azure-storage-table";
|
||||
export default defineDriver((opts) => {
|
||||
const {
|
||||
accountName = null,
|
||||
tableName = "unstorage",
|
||||
partitionKey = "unstorage",
|
||||
accountKey = null,
|
||||
sasKey = null,
|
||||
connectionString = null,
|
||||
pageSize = 1e3
|
||||
} = opts;
|
||||
let client;
|
||||
const getClient = () => {
|
||||
if (client) {
|
||||
return client;
|
||||
}
|
||||
if (!accountName) {
|
||||
throw createRequiredError(DRIVER_NAME, "accountName");
|
||||
}
|
||||
if (pageSize > 1e3) {
|
||||
throw createError(
|
||||
DRIVER_NAME,
|
||||
"`pageSize` exceeds the maximum allowed value of `1000`"
|
||||
);
|
||||
}
|
||||
if (accountKey) {
|
||||
const credential = new AzureNamedKeyCredential(accountName, accountKey);
|
||||
client = new TableClient(
|
||||
`https://${accountName}.table.core.windows.net`,
|
||||
tableName,
|
||||
credential
|
||||
);
|
||||
} else if (sasKey) {
|
||||
const credential = new AzureSASCredential(sasKey);
|
||||
client = new TableClient(
|
||||
`https://${accountName}.table.core.windows.net`,
|
||||
tableName,
|
||||
credential
|
||||
);
|
||||
} else if (connectionString) {
|
||||
client = TableClient.fromConnectionString(connectionString, tableName);
|
||||
} else {
|
||||
const credential = new DefaultAzureCredential();
|
||||
client = new TableClient(
|
||||
`https://${accountName}.table.core.windows.net`,
|
||||
tableName,
|
||||
credential
|
||||
);
|
||||
}
|
||||
return client;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: getClient,
|
||||
async hasItem(key) {
|
||||
try {
|
||||
await getClient().getEntity(partitionKey, key);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
async getItem(key) {
|
||||
try {
|
||||
const entity = await getClient().getEntity(partitionKey, key);
|
||||
return entity.unstorageValue;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
async setItem(key, value) {
|
||||
const entity = {
|
||||
partitionKey,
|
||||
rowKey: key,
|
||||
unstorageValue: value
|
||||
};
|
||||
await getClient().upsertEntity(entity, "Replace");
|
||||
},
|
||||
async removeItem(key) {
|
||||
await getClient().deleteEntity(partitionKey, key);
|
||||
},
|
||||
async getKeys() {
|
||||
const iterator = getClient().listEntities().byPage({ maxPageSize: pageSize });
|
||||
const keys = [];
|
||||
for await (const page of iterator) {
|
||||
const pageKeys = page.map((entity) => entity.rowKey).filter(Boolean);
|
||||
keys.push(...pageKeys);
|
||||
}
|
||||
return keys;
|
||||
},
|
||||
async getMeta(key) {
|
||||
const entity = await getClient().getEntity(partitionKey, key);
|
||||
return {
|
||||
mtime: entity.timestamp ? new Date(entity.timestamp) : void 0,
|
||||
etag: entity.etag
|
||||
};
|
||||
},
|
||||
async clear() {
|
||||
const iterator = getClient().listEntities().byPage({ maxPageSize: pageSize });
|
||||
for await (const page of iterator) {
|
||||
await Promise.all(
|
||||
page.map(async (entity) => {
|
||||
if (entity.partitionKey && entity.rowKey) {
|
||||
await getClient().deleteEntity(
|
||||
entity.partitionKey,
|
||||
entity.rowKey
|
||||
);
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
63
node_modules/unstorage/drivers/capacitor-preferences.cjs
generated
vendored
Normal file
63
node_modules/unstorage/drivers/capacitor-preferences.cjs
generated
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _preferences = require("@capacitor/preferences");
|
||||
var _utils = require("./utils/index.cjs");
|
||||
const DRIVER_NAME = "capacitor-preferences";
|
||||
module.exports = (0, _utils.defineDriver)(opts => {
|
||||
const base = (0, _utils.normalizeKey)(opts?.base || "");
|
||||
const resolveKey = key => (0, _utils.joinKeys)(base, key);
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: () => _preferences.Preferences,
|
||||
hasItem(key) {
|
||||
return _preferences.Preferences.keys().then(r => r.keys.includes(resolveKey(key)));
|
||||
},
|
||||
getItem(key) {
|
||||
return _preferences.Preferences.get({
|
||||
key: resolveKey(key)
|
||||
}).then(r => r.value);
|
||||
},
|
||||
getItemRaw(key) {
|
||||
return _preferences.Preferences.get({
|
||||
key: resolveKey(key)
|
||||
}).then(r => r.value);
|
||||
},
|
||||
setItem(key, value) {
|
||||
return _preferences.Preferences.set({
|
||||
key: resolveKey(key),
|
||||
value
|
||||
});
|
||||
},
|
||||
setItemRaw(key, value) {
|
||||
return _preferences.Preferences.set({
|
||||
key: resolveKey(key),
|
||||
value
|
||||
});
|
||||
},
|
||||
removeItem(key) {
|
||||
return _preferences.Preferences.remove({
|
||||
key: resolveKey(key)
|
||||
});
|
||||
},
|
||||
async getKeys() {
|
||||
const {
|
||||
keys
|
||||
} = await _preferences.Preferences.keys();
|
||||
return keys.map(key => key.slice(base.length));
|
||||
},
|
||||
async clear(prefix) {
|
||||
const {
|
||||
keys
|
||||
} = await _preferences.Preferences.keys();
|
||||
const _prefix = resolveKey(prefix || "");
|
||||
await Promise.all(keys.filter(key => key.startsWith(_prefix)).map(key => _preferences.Preferences.remove({
|
||||
key
|
||||
})));
|
||||
}
|
||||
};
|
||||
});
|
5
node_modules/unstorage/drivers/capacitor-preferences.d.ts
generated
vendored
Normal file
5
node_modules/unstorage/drivers/capacitor-preferences.d.ts
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
export interface CapacitorPreferencesOptions {
|
||||
base?: string;
|
||||
}
|
||||
declare const _default: (opts: CapacitorPreferencesOptions) => import("..").Driver<CapacitorPreferencesOptions, import("@capacitor/preferences").PreferencesPlugin>;
|
||||
export default _default;
|
43
node_modules/unstorage/drivers/capacitor-preferences.mjs
generated
vendored
Normal file
43
node_modules/unstorage/drivers/capacitor-preferences.mjs
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
import { Preferences } from "@capacitor/preferences";
|
||||
import { defineDriver, joinKeys, normalizeKey } from "./utils/index.mjs";
|
||||
const DRIVER_NAME = "capacitor-preferences";
|
||||
export default defineDriver(
|
||||
(opts) => {
|
||||
const base = normalizeKey(opts?.base || "");
|
||||
const resolveKey = (key) => joinKeys(base, key);
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: () => Preferences,
|
||||
hasItem(key) {
|
||||
return Preferences.keys().then((r) => r.keys.includes(resolveKey(key)));
|
||||
},
|
||||
getItem(key) {
|
||||
return Preferences.get({ key: resolveKey(key) }).then((r) => r.value);
|
||||
},
|
||||
getItemRaw(key) {
|
||||
return Preferences.get({ key: resolveKey(key) }).then((r) => r.value);
|
||||
},
|
||||
setItem(key, value) {
|
||||
return Preferences.set({ key: resolveKey(key), value });
|
||||
},
|
||||
setItemRaw(key, value) {
|
||||
return Preferences.set({ key: resolveKey(key), value });
|
||||
},
|
||||
removeItem(key) {
|
||||
return Preferences.remove({ key: resolveKey(key) });
|
||||
},
|
||||
async getKeys() {
|
||||
const { keys } = await Preferences.keys();
|
||||
return keys.map((key) => key.slice(base.length));
|
||||
},
|
||||
async clear(prefix) {
|
||||
const { keys } = await Preferences.keys();
|
||||
const _prefix = resolveKey(prefix || "");
|
||||
await Promise.all(
|
||||
keys.filter((key) => key.startsWith(_prefix)).map((key) => Preferences.remove({ key }))
|
||||
);
|
||||
}
|
||||
};
|
||||
}
|
||||
);
|
63
node_modules/unstorage/drivers/cloudflare-kv-binding.cjs
generated
vendored
Normal file
63
node_modules/unstorage/drivers/cloudflare-kv-binding.cjs
generated
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _cloudflare = require("./utils/cloudflare.cjs");
|
||||
const DRIVER_NAME = "cloudflare-kv-binding";
|
||||
module.exports = (0, _utils.defineDriver)(opts => {
|
||||
const r = (key = "") => opts.base ? (0, _utils.joinKeys)(opts.base, key) : key;
|
||||
async function getKeys(base = "") {
|
||||
base = r(base);
|
||||
const binding = (0, _cloudflare.getKVBinding)(opts.binding);
|
||||
const keys = [];
|
||||
let cursor = void 0;
|
||||
do {
|
||||
const kvList = await binding.list({
|
||||
prefix: base || void 0,
|
||||
cursor
|
||||
});
|
||||
keys.push(...kvList.keys);
|
||||
cursor = kvList.list_complete ? void 0 : kvList.cursor;
|
||||
} while (cursor);
|
||||
return keys.map(key => key.name);
|
||||
}
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: () => (0, _cloudflare.getKVBinding)(opts.binding),
|
||||
async hasItem(key) {
|
||||
key = r(key);
|
||||
const binding = (0, _cloudflare.getKVBinding)(opts.binding);
|
||||
return (await binding.get(key)) !== null;
|
||||
},
|
||||
getItem(key) {
|
||||
key = r(key);
|
||||
const binding = (0, _cloudflare.getKVBinding)(opts.binding);
|
||||
return binding.get(key);
|
||||
},
|
||||
setItem(key, value, topts) {
|
||||
key = r(key);
|
||||
const binding = (0, _cloudflare.getKVBinding)(opts.binding);
|
||||
return binding.put(key, value, topts ? {
|
||||
expirationTtl: topts?.ttl ? Math.max(topts.ttl, opts.minTTL ?? 60) : void 0,
|
||||
...topts
|
||||
} : void 0);
|
||||
},
|
||||
removeItem(key) {
|
||||
key = r(key);
|
||||
const binding = (0, _cloudflare.getKVBinding)(opts.binding);
|
||||
return binding.delete(key);
|
||||
},
|
||||
getKeys(base) {
|
||||
return getKeys(base).then(keys => keys.map(key => opts.base ? key.slice(opts.base.length) : key));
|
||||
},
|
||||
async clear(base) {
|
||||
const binding = (0, _cloudflare.getKVBinding)(opts.binding);
|
||||
const keys = await getKeys(base);
|
||||
await Promise.all(keys.map(key => binding.delete(key)));
|
||||
}
|
||||
};
|
||||
});
|
12
node_modules/unstorage/drivers/cloudflare-kv-binding.d.ts
generated
vendored
Normal file
12
node_modules/unstorage/drivers/cloudflare-kv-binding.d.ts
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
export interface KVOptions {
|
||||
binding?: string | KVNamespace;
|
||||
/** Adds prefix to all stored keys */
|
||||
base?: string;
|
||||
/**
|
||||
* The minimum time-to-live (ttl) for setItem in seconds.
|
||||
* The default is 60 seconds as per Cloudflare's [documentation](https://developers.cloudflare.com/kv/api/write-key-value-pairs/).
|
||||
*/
|
||||
minTTL?: number;
|
||||
}
|
||||
declare const _default: (opts: KVOptions) => import("..").Driver<KVOptions, KVNamespace<string>>;
|
||||
export default _default;
|
60
node_modules/unstorage/drivers/cloudflare-kv-binding.mjs
generated
vendored
Normal file
60
node_modules/unstorage/drivers/cloudflare-kv-binding.mjs
generated
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
import { defineDriver, joinKeys } from "./utils/index.mjs";
|
||||
import { getKVBinding } from "./utils/cloudflare.mjs";
|
||||
const DRIVER_NAME = "cloudflare-kv-binding";
|
||||
export default defineDriver((opts) => {
|
||||
const r = (key = "") => opts.base ? joinKeys(opts.base, key) : key;
|
||||
async function getKeys(base = "") {
|
||||
base = r(base);
|
||||
const binding = getKVBinding(opts.binding);
|
||||
const keys = [];
|
||||
let cursor = void 0;
|
||||
do {
|
||||
const kvList = await binding.list({ prefix: base || void 0, cursor });
|
||||
keys.push(...kvList.keys);
|
||||
cursor = kvList.list_complete ? void 0 : kvList.cursor;
|
||||
} while (cursor);
|
||||
return keys.map((key) => key.name);
|
||||
}
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: () => getKVBinding(opts.binding),
|
||||
async hasItem(key) {
|
||||
key = r(key);
|
||||
const binding = getKVBinding(opts.binding);
|
||||
return await binding.get(key) !== null;
|
||||
},
|
||||
getItem(key) {
|
||||
key = r(key);
|
||||
const binding = getKVBinding(opts.binding);
|
||||
return binding.get(key);
|
||||
},
|
||||
setItem(key, value, topts) {
|
||||
key = r(key);
|
||||
const binding = getKVBinding(opts.binding);
|
||||
return binding.put(
|
||||
key,
|
||||
value,
|
||||
topts ? {
|
||||
expirationTtl: topts?.ttl ? Math.max(topts.ttl, opts.minTTL ?? 60) : void 0,
|
||||
...topts
|
||||
} : void 0
|
||||
);
|
||||
},
|
||||
removeItem(key) {
|
||||
key = r(key);
|
||||
const binding = getKVBinding(opts.binding);
|
||||
return binding.delete(key);
|
||||
},
|
||||
getKeys(base) {
|
||||
return getKeys(base).then(
|
||||
(keys) => keys.map((key) => opts.base ? key.slice(opts.base.length) : key)
|
||||
);
|
||||
},
|
||||
async clear(base) {
|
||||
const binding = getKVBinding(opts.binding);
|
||||
const keys = await getKeys(base);
|
||||
await Promise.all(keys.map((key) => binding.delete(key)));
|
||||
}
|
||||
};
|
||||
});
|
138
node_modules/unstorage/drivers/cloudflare-kv-http.cjs
generated
vendored
Normal file
138
node_modules/unstorage/drivers/cloudflare-kv-http.cjs
generated
vendored
Normal file
@@ -0,0 +1,138 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _ofetch = require("ofetch");
|
||||
var _utils = require("./utils/index.cjs");
|
||||
const DRIVER_NAME = "cloudflare-kv-http";
|
||||
module.exports = (0, _utils.defineDriver)(opts => {
|
||||
if (!opts.accountId) {
|
||||
throw (0, _utils.createRequiredError)(DRIVER_NAME, "accountId");
|
||||
}
|
||||
if (!opts.namespaceId) {
|
||||
throw (0, _utils.createRequiredError)(DRIVER_NAME, "namespaceId");
|
||||
}
|
||||
let headers;
|
||||
if ("apiToken" in opts) {
|
||||
headers = {
|
||||
Authorization: `Bearer ${opts.apiToken}`
|
||||
};
|
||||
} else if ("userServiceKey" in opts) {
|
||||
headers = {
|
||||
"X-Auth-User-Service-Key": opts.userServiceKey
|
||||
};
|
||||
} else if (opts.email && opts.apiKey) {
|
||||
headers = {
|
||||
"X-Auth-Email": opts.email,
|
||||
"X-Auth-Key": opts.apiKey
|
||||
};
|
||||
} else {
|
||||
throw (0, _utils.createError)(DRIVER_NAME, "One of the `apiToken`, `userServiceKey`, or a combination of `email` and `apiKey` is required.");
|
||||
}
|
||||
const apiURL = opts.apiURL || "https://api.cloudflare.com";
|
||||
const baseURL = `${apiURL}/client/v4/accounts/${opts.accountId}/storage/kv/namespaces/${opts.namespaceId}`;
|
||||
const kvFetch = _ofetch.$fetch.create({
|
||||
baseURL,
|
||||
headers
|
||||
});
|
||||
const r = (key = "") => opts.base ? (0, _utils.joinKeys)(opts.base, key) : key;
|
||||
const hasItem = async key => {
|
||||
try {
|
||||
const res = await kvFetch(`/metadata/${r(key)}`);
|
||||
return res?.success === true;
|
||||
} catch (err) {
|
||||
if (!err?.response) {
|
||||
throw err;
|
||||
}
|
||||
if (err?.response?.status === 404) {
|
||||
return false;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
const getItem = async key => {
|
||||
try {
|
||||
return await kvFetch(`/values/${r(key)}`).then(r2 => r2.text());
|
||||
} catch (err) {
|
||||
if (!err?.response) {
|
||||
throw err;
|
||||
}
|
||||
if (err?.response?.status === 404) {
|
||||
return null;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
const setItem = async (key, value, topts) => {
|
||||
return await kvFetch(`/values/${r(key)}`, {
|
||||
method: "PUT",
|
||||
body: value,
|
||||
query: topts?.ttl ? {
|
||||
expiration_ttl: Math.max(topts?.ttl, opts.minTTL || 60)
|
||||
} : void 0
|
||||
});
|
||||
};
|
||||
const removeItem = async key => {
|
||||
return await kvFetch(`/values/${r(key)}`, {
|
||||
method: "DELETE"
|
||||
});
|
||||
};
|
||||
const getKeys = async base => {
|
||||
const keys = [];
|
||||
const params = {};
|
||||
if (base || opts.base) {
|
||||
params.prefix = r(base);
|
||||
}
|
||||
const firstPage = await kvFetch("/keys", {
|
||||
params
|
||||
});
|
||||
for (const item of firstPage.result) {
|
||||
keys.push(item.name);
|
||||
}
|
||||
const cursor = firstPage.result_info.cursor;
|
||||
if (cursor) {
|
||||
params.cursor = cursor;
|
||||
}
|
||||
while (params.cursor) {
|
||||
const pageResult = await kvFetch("/keys", {
|
||||
params
|
||||
});
|
||||
for (const item of pageResult.result) {
|
||||
keys.push(item.name);
|
||||
}
|
||||
const pageCursor = pageResult.result_info.cursor;
|
||||
params.cursor = pageCursor ? pageCursor : void 0;
|
||||
}
|
||||
return keys;
|
||||
};
|
||||
const clear = async () => {
|
||||
const keys = await getKeys();
|
||||
const chunks = keys.reduce((acc, key, i) => {
|
||||
if (i % 1e4 === 0) {
|
||||
acc.push([]);
|
||||
}
|
||||
acc[acc.length - 1].push(key);
|
||||
return acc;
|
||||
}, [[]]);
|
||||
await Promise.all(chunks.map(chunk => {
|
||||
if (chunk.length > 0) {
|
||||
return kvFetch("/bulk/delete", {
|
||||
method: "POST",
|
||||
body: chunk
|
||||
});
|
||||
}
|
||||
}));
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
hasItem,
|
||||
getItem,
|
||||
setItem,
|
||||
removeItem,
|
||||
getKeys: base => getKeys(base).then(keys => keys.map(key => opts.base ? key.slice(opts.base.length) : key)),
|
||||
clear
|
||||
};
|
||||
});
|
56
node_modules/unstorage/drivers/cloudflare-kv-http.d.ts
generated
vendored
Normal file
56
node_modules/unstorage/drivers/cloudflare-kv-http.d.ts
generated
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
interface KVAuthAPIToken {
|
||||
/**
|
||||
* API Token generated from the [User Profile 'API Tokens' page](https://dash.cloudflare.com/profile/api-tokens)
|
||||
* of the Cloudflare console.
|
||||
* @see https://api.cloudflare.com/#getting-started-requests
|
||||
*/
|
||||
apiToken: string;
|
||||
}
|
||||
interface KVAuthServiceKey {
|
||||
/**
|
||||
* A special Cloudflare API key good for a restricted set of endpoints.
|
||||
* Always begins with "v1.0-", may vary in length.
|
||||
* May be used to authenticate in place of `apiToken` or `apiKey` and `email`.
|
||||
* @see https://api.cloudflare.com/#getting-started-requests
|
||||
*/
|
||||
userServiceKey: string;
|
||||
}
|
||||
interface KVAuthEmailKey {
|
||||
/**
|
||||
* Email address associated with your account.
|
||||
* Should be used along with `apiKey` to authenticate in place of `apiToken`.
|
||||
*/
|
||||
email: string;
|
||||
/**
|
||||
* API key generated on the "My Account" page of the Cloudflare console.
|
||||
* Should be used along with `email` to authenticate in place of `apiToken`.
|
||||
* @see https://api.cloudflare.com/#getting-started-requests
|
||||
*/
|
||||
apiKey: string;
|
||||
}
|
||||
export type KVHTTPOptions = {
|
||||
/**
|
||||
* Cloudflare account ID (required)
|
||||
*/
|
||||
accountId: string;
|
||||
/**
|
||||
* The ID of the KV namespace to target (required)
|
||||
*/
|
||||
namespaceId: string;
|
||||
/**
|
||||
* The URL of the Cloudflare API.
|
||||
* @default https://api.cloudflare.com
|
||||
*/
|
||||
apiURL?: string;
|
||||
/**
|
||||
* Adds prefix to all stored keys
|
||||
*/
|
||||
base?: string;
|
||||
/**
|
||||
* The minimum time-to-live (ttl) for setItem in seconds.
|
||||
* The default is 60 seconds as per Cloudflare's [documentation](https://developers.cloudflare.com/kv/api/write-key-value-pairs/).
|
||||
*/
|
||||
minTTL?: number;
|
||||
} & (KVAuthServiceKey | KVAuthAPIToken | KVAuthEmailKey);
|
||||
declare const _default: (opts: KVHTTPOptions) => import("..").Driver<KVHTTPOptions, never>;
|
||||
export default _default;
|
129
node_modules/unstorage/drivers/cloudflare-kv-http.mjs
generated
vendored
Normal file
129
node_modules/unstorage/drivers/cloudflare-kv-http.mjs
generated
vendored
Normal file
@@ -0,0 +1,129 @@
|
||||
import { $fetch } from "ofetch";
|
||||
import {
|
||||
createError,
|
||||
createRequiredError,
|
||||
defineDriver,
|
||||
joinKeys
|
||||
} from "./utils/index.mjs";
|
||||
const DRIVER_NAME = "cloudflare-kv-http";
|
||||
export default defineDriver((opts) => {
|
||||
if (!opts.accountId) {
|
||||
throw createRequiredError(DRIVER_NAME, "accountId");
|
||||
}
|
||||
if (!opts.namespaceId) {
|
||||
throw createRequiredError(DRIVER_NAME, "namespaceId");
|
||||
}
|
||||
let headers;
|
||||
if ("apiToken" in opts) {
|
||||
headers = { Authorization: `Bearer ${opts.apiToken}` };
|
||||
} else if ("userServiceKey" in opts) {
|
||||
headers = { "X-Auth-User-Service-Key": opts.userServiceKey };
|
||||
} else if (opts.email && opts.apiKey) {
|
||||
headers = { "X-Auth-Email": opts.email, "X-Auth-Key": opts.apiKey };
|
||||
} else {
|
||||
throw createError(
|
||||
DRIVER_NAME,
|
||||
"One of the `apiToken`, `userServiceKey`, or a combination of `email` and `apiKey` is required."
|
||||
);
|
||||
}
|
||||
const apiURL = opts.apiURL || "https://api.cloudflare.com";
|
||||
const baseURL = `${apiURL}/client/v4/accounts/${opts.accountId}/storage/kv/namespaces/${opts.namespaceId}`;
|
||||
const kvFetch = $fetch.create({ baseURL, headers });
|
||||
const r = (key = "") => opts.base ? joinKeys(opts.base, key) : key;
|
||||
const hasItem = async (key) => {
|
||||
try {
|
||||
const res = await kvFetch(`/metadata/${r(key)}`);
|
||||
return res?.success === true;
|
||||
} catch (err) {
|
||||
if (!err?.response) {
|
||||
throw err;
|
||||
}
|
||||
if (err?.response?.status === 404) {
|
||||
return false;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
const getItem = async (key) => {
|
||||
try {
|
||||
return await kvFetch(`/values/${r(key)}`).then((r2) => r2.text());
|
||||
} catch (err) {
|
||||
if (!err?.response) {
|
||||
throw err;
|
||||
}
|
||||
if (err?.response?.status === 404) {
|
||||
return null;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
const setItem = async (key, value, topts) => {
|
||||
return await kvFetch(`/values/${r(key)}`, {
|
||||
method: "PUT",
|
||||
body: value,
|
||||
query: topts?.ttl ? { expiration_ttl: Math.max(topts?.ttl, opts.minTTL || 60) } : void 0
|
||||
});
|
||||
};
|
||||
const removeItem = async (key) => {
|
||||
return await kvFetch(`/values/${r(key)}`, { method: "DELETE" });
|
||||
};
|
||||
const getKeys = async (base) => {
|
||||
const keys = [];
|
||||
const params = {};
|
||||
if (base || opts.base) {
|
||||
params.prefix = r(base);
|
||||
}
|
||||
const firstPage = await kvFetch("/keys", { params });
|
||||
for (const item of firstPage.result) {
|
||||
keys.push(item.name);
|
||||
}
|
||||
const cursor = firstPage.result_info.cursor;
|
||||
if (cursor) {
|
||||
params.cursor = cursor;
|
||||
}
|
||||
while (params.cursor) {
|
||||
const pageResult = await kvFetch("/keys", { params });
|
||||
for (const item of pageResult.result) {
|
||||
keys.push(item.name);
|
||||
}
|
||||
const pageCursor = pageResult.result_info.cursor;
|
||||
params.cursor = pageCursor ? pageCursor : void 0;
|
||||
}
|
||||
return keys;
|
||||
};
|
||||
const clear = async () => {
|
||||
const keys = await getKeys();
|
||||
const chunks = keys.reduce(
|
||||
(acc, key, i) => {
|
||||
if (i % 1e4 === 0) {
|
||||
acc.push([]);
|
||||
}
|
||||
acc[acc.length - 1].push(key);
|
||||
return acc;
|
||||
},
|
||||
[[]]
|
||||
);
|
||||
await Promise.all(
|
||||
chunks.map((chunk) => {
|
||||
if (chunk.length > 0) {
|
||||
return kvFetch("/bulk/delete", {
|
||||
method: "POST",
|
||||
body: chunk
|
||||
});
|
||||
}
|
||||
})
|
||||
);
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
hasItem,
|
||||
getItem,
|
||||
setItem,
|
||||
removeItem,
|
||||
getKeys: (base) => getKeys(base).then(
|
||||
(keys) => keys.map((key) => opts.base ? key.slice(opts.base.length) : key)
|
||||
),
|
||||
clear
|
||||
};
|
||||
});
|
103
node_modules/unstorage/drivers/cloudflare-r2-binding.cjs
generated
vendored
Normal file
103
node_modules/unstorage/drivers/cloudflare-r2-binding.cjs
generated
vendored
Normal file
@@ -0,0 +1,103 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _cloudflare = require("./utils/cloudflare.cjs");
|
||||
const DRIVER_NAME = "cloudflare-r2-binding";
|
||||
module.exports = (0, _utils.defineDriver)((opts = {}) => {
|
||||
const r = (key = "") => opts.base ? (0, _utils.joinKeys)(opts.base, key) : key;
|
||||
const getKeys = async base => {
|
||||
const binding = (0, _cloudflare.getR2Binding)(opts.binding);
|
||||
const kvList = await binding.list(base || opts.base ? {
|
||||
prefix: r(base)
|
||||
} : void 0);
|
||||
return kvList.objects.map(obj => obj.key);
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: () => (0, _cloudflare.getR2Binding)(opts.binding),
|
||||
async hasItem(key) {
|
||||
key = r(key);
|
||||
const binding = (0, _cloudflare.getR2Binding)(opts.binding);
|
||||
return (await binding.head(key)) !== null;
|
||||
},
|
||||
async getMeta(key) {
|
||||
key = r(key);
|
||||
const binding = (0, _cloudflare.getR2Binding)(opts.binding);
|
||||
const obj = await binding.head(key);
|
||||
if (!obj) return null;
|
||||
return {
|
||||
mtime: obj.uploaded,
|
||||
atime: obj.uploaded,
|
||||
...obj
|
||||
};
|
||||
},
|
||||
getItem(key, topts) {
|
||||
key = r(key);
|
||||
const binding = (0, _cloudflare.getR2Binding)(opts.binding);
|
||||
return binding.get(key, topts).then(r2 => r2?.text() ?? null);
|
||||
},
|
||||
async getItemRaw(key, topts) {
|
||||
key = r(key);
|
||||
const binding = (0, _cloudflare.getR2Binding)(opts.binding);
|
||||
const object = await binding.get(key, topts);
|
||||
return object ? getObjBody(object, topts?.type) : null;
|
||||
},
|
||||
async setItem(key, value, topts) {
|
||||
key = r(key);
|
||||
const binding = (0, _cloudflare.getR2Binding)(opts.binding);
|
||||
await binding.put(key, value, topts);
|
||||
},
|
||||
async setItemRaw(key, value, topts) {
|
||||
key = r(key);
|
||||
const binding = (0, _cloudflare.getR2Binding)(opts.binding);
|
||||
await binding.put(key, value, topts);
|
||||
},
|
||||
async removeItem(key) {
|
||||
key = r(key);
|
||||
const binding = (0, _cloudflare.getR2Binding)(opts.binding);
|
||||
await binding.delete(key);
|
||||
},
|
||||
getKeys(base) {
|
||||
return getKeys(base).then(keys => opts.base ? keys.map(key => key.slice(opts.base.length)) : keys);
|
||||
},
|
||||
async clear(base) {
|
||||
const binding = (0, _cloudflare.getR2Binding)(opts.binding);
|
||||
const keys = await getKeys(base);
|
||||
await binding.delete(keys);
|
||||
}
|
||||
};
|
||||
});
|
||||
function getObjBody(object, type) {
|
||||
switch (type) {
|
||||
case "object":
|
||||
{
|
||||
return object;
|
||||
}
|
||||
case "stream":
|
||||
{
|
||||
return object.body;
|
||||
}
|
||||
case "blob":
|
||||
{
|
||||
return object.blob();
|
||||
}
|
||||
case "arrayBuffer":
|
||||
{
|
||||
return object.arrayBuffer();
|
||||
}
|
||||
case "bytes":
|
||||
{
|
||||
return object.arrayBuffer().then(buffer => new Uint8Array(buffer));
|
||||
}
|
||||
// TODO: Default to bytes in v2
|
||||
default:
|
||||
{
|
||||
return object.arrayBuffer();
|
||||
}
|
||||
}
|
||||
}
|
6
node_modules/unstorage/drivers/cloudflare-r2-binding.d.ts
generated
vendored
Normal file
6
node_modules/unstorage/drivers/cloudflare-r2-binding.d.ts
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
export interface CloudflareR2Options {
|
||||
binding?: string | R2Bucket;
|
||||
base?: string;
|
||||
}
|
||||
declare const _default: (opts: CloudflareR2Options | undefined) => import("..").Driver<CloudflareR2Options | undefined, R2Bucket>;
|
||||
export default _default;
|
93
node_modules/unstorage/drivers/cloudflare-r2-binding.mjs
generated
vendored
Normal file
93
node_modules/unstorage/drivers/cloudflare-r2-binding.mjs
generated
vendored
Normal file
@@ -0,0 +1,93 @@
|
||||
import { defineDriver, joinKeys } from "./utils/index.mjs";
|
||||
import { getR2Binding } from "./utils/cloudflare.mjs";
|
||||
const DRIVER_NAME = "cloudflare-r2-binding";
|
||||
export default defineDriver((opts = {}) => {
|
||||
const r = (key = "") => opts.base ? joinKeys(opts.base, key) : key;
|
||||
const getKeys = async (base) => {
|
||||
const binding = getR2Binding(opts.binding);
|
||||
const kvList = await binding.list(
|
||||
base || opts.base ? { prefix: r(base) } : void 0
|
||||
);
|
||||
return kvList.objects.map((obj) => obj.key);
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: () => getR2Binding(opts.binding),
|
||||
async hasItem(key) {
|
||||
key = r(key);
|
||||
const binding = getR2Binding(opts.binding);
|
||||
return await binding.head(key) !== null;
|
||||
},
|
||||
async getMeta(key) {
|
||||
key = r(key);
|
||||
const binding = getR2Binding(opts.binding);
|
||||
const obj = await binding.head(key);
|
||||
if (!obj) return null;
|
||||
return {
|
||||
mtime: obj.uploaded,
|
||||
atime: obj.uploaded,
|
||||
...obj
|
||||
};
|
||||
},
|
||||
getItem(key, topts) {
|
||||
key = r(key);
|
||||
const binding = getR2Binding(opts.binding);
|
||||
return binding.get(key, topts).then((r2) => r2?.text() ?? null);
|
||||
},
|
||||
async getItemRaw(key, topts) {
|
||||
key = r(key);
|
||||
const binding = getR2Binding(opts.binding);
|
||||
const object = await binding.get(key, topts);
|
||||
return object ? getObjBody(object, topts?.type) : null;
|
||||
},
|
||||
async setItem(key, value, topts) {
|
||||
key = r(key);
|
||||
const binding = getR2Binding(opts.binding);
|
||||
await binding.put(key, value, topts);
|
||||
},
|
||||
async setItemRaw(key, value, topts) {
|
||||
key = r(key);
|
||||
const binding = getR2Binding(opts.binding);
|
||||
await binding.put(key, value, topts);
|
||||
},
|
||||
async removeItem(key) {
|
||||
key = r(key);
|
||||
const binding = getR2Binding(opts.binding);
|
||||
await binding.delete(key);
|
||||
},
|
||||
getKeys(base) {
|
||||
return getKeys(base).then(
|
||||
(keys) => opts.base ? keys.map((key) => key.slice(opts.base.length)) : keys
|
||||
);
|
||||
},
|
||||
async clear(base) {
|
||||
const binding = getR2Binding(opts.binding);
|
||||
const keys = await getKeys(base);
|
||||
await binding.delete(keys);
|
||||
}
|
||||
};
|
||||
});
|
||||
function getObjBody(object, type) {
|
||||
switch (type) {
|
||||
case "object": {
|
||||
return object;
|
||||
}
|
||||
case "stream": {
|
||||
return object.body;
|
||||
}
|
||||
case "blob": {
|
||||
return object.blob();
|
||||
}
|
||||
case "arrayBuffer": {
|
||||
return object.arrayBuffer();
|
||||
}
|
||||
case "bytes": {
|
||||
return object.arrayBuffer().then((buffer) => new Uint8Array(buffer));
|
||||
}
|
||||
// TODO: Default to bytes in v2
|
||||
default: {
|
||||
return object.arrayBuffer();
|
||||
}
|
||||
}
|
||||
}
|
155
node_modules/unstorage/drivers/db0.cjs
generated
vendored
Normal file
155
node_modules/unstorage/drivers/db0.cjs
generated
vendored
Normal file
@@ -0,0 +1,155 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
const DRIVER_NAME = "db0";
|
||||
const DEFAULT_TABLE_NAME = "unstorage";
|
||||
const kExperimentalWarning = "__unstorage_db0_experimental_warning__";
|
||||
module.exports = (0, _utils.defineDriver)(opts => {
|
||||
opts.tableName = opts.tableName || DEFAULT_TABLE_NAME;
|
||||
let setupPromise;
|
||||
let setupDone = false;
|
||||
const ensureTable = () => {
|
||||
if (setupDone) {
|
||||
return;
|
||||
}
|
||||
if (!setupPromise) {
|
||||
if (!globalThis[kExperimentalWarning]) {
|
||||
console.warn("[unstorage]: Database driver is experimental and behavior may change in the future.");
|
||||
globalThis[kExperimentalWarning] = true;
|
||||
}
|
||||
setupPromise = setupTable(opts).then(() => {
|
||||
setupDone = true;
|
||||
setupPromise = void 0;
|
||||
});
|
||||
}
|
||||
return setupPromise;
|
||||
};
|
||||
const isMysql = opts.database.dialect === "mysql";
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: () => opts.database,
|
||||
async hasItem(key) {
|
||||
await ensureTable();
|
||||
const {
|
||||
rows
|
||||
} = isMysql ? await opts.database.sql`SELECT EXISTS (SELECT 1 FROM {${opts.tableName}} WHERE \`key\` = ${key}) AS \`value\`` : await opts.database.sql`SELECT EXISTS (SELECT 1 FROM {${opts.tableName}} WHERE key = ${key}) AS value`;
|
||||
return rows?.[0]?.value == "1";
|
||||
},
|
||||
getItem: async key => {
|
||||
await ensureTable();
|
||||
const {
|
||||
rows
|
||||
} = isMysql ? await opts.database.sql`SELECT value FROM {${opts.tableName}} WHERE \`key\` = ${key}` : await opts.database.sql`SELECT value FROM {${opts.tableName}} WHERE key = ${key}`;
|
||||
return rows?.[0]?.value ?? null;
|
||||
},
|
||||
getItemRaw: async key => {
|
||||
await ensureTable();
|
||||
const {
|
||||
rows
|
||||
} = isMysql ? await opts.database.sql`SELECT \`blob\` as value FROM {${opts.tableName}} WHERE \`key\` = ${key}` : await opts.database.sql`SELECT blob as value FROM {${opts.tableName}} WHERE key = ${key}`;
|
||||
return rows?.[0]?.value ?? null;
|
||||
},
|
||||
setItem: async (key, value) => {
|
||||
await ensureTable();
|
||||
if (isMysql) {
|
||||
await opts.database.sql`INSERT INTO {${opts.tableName}} (\`key\`, \`value\`, created_at, updated_at) VALUES (${key}, ${value}, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP) ON DUPLICATE KEY UPDATE value = ${value}, updated_at = CURRENT_TIMESTAMP`;
|
||||
} else {
|
||||
await opts.database.sql`INSERT INTO {${opts.tableName}} (key, value, created_at, updated_at) VALUES (${key}, ${value}, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP) ON CONFLICT(key) DO UPDATE SET value = ${value}, updated_at = CURRENT_TIMESTAMP`;
|
||||
}
|
||||
},
|
||||
async setItemRaw(key, value) {
|
||||
await ensureTable();
|
||||
if (isMysql) {
|
||||
const blob = Buffer.from(value);
|
||||
await opts.database.sql`INSERT INTO {${opts.tableName}} (\`key\`, \`blob\`, created_at, updated_at) VALUES (${key}, ${blob}, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP) ON DUPLICATE KEY UPDATE \`blob\` = ${blob}, updated_at = CURRENT_TIMESTAMP`;
|
||||
} else {
|
||||
await opts.database.sql`INSERT INTO {${opts.tableName}} (key, blob, created_at, updated_at) VALUES (${key}, ${value}, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP) ON CONFLICT(key) DO UPDATE SET blob = ${value}, updated_at = CURRENT_TIMESTAMP`;
|
||||
}
|
||||
},
|
||||
removeItem: async key => {
|
||||
await ensureTable();
|
||||
if (isMysql) {
|
||||
await opts.database.sql`DELETE FROM {${opts.tableName}} WHERE \`key\`=${key}`;
|
||||
} else {
|
||||
await opts.database.sql`DELETE FROM {${opts.tableName}} WHERE key=${key}`;
|
||||
}
|
||||
},
|
||||
getMeta: async key => {
|
||||
await ensureTable();
|
||||
const {
|
||||
rows
|
||||
} = isMysql ? await opts.database.sql`SELECT created_at, updated_at FROM {${opts.tableName}} WHERE \`key\` = ${key}` : await opts.database.sql`SELECT created_at, updated_at FROM {${opts.tableName}} WHERE key = ${key}`;
|
||||
return {
|
||||
birthtime: toDate(rows?.[0]?.created_at),
|
||||
mtime: toDate(rows?.[0]?.updated_at)
|
||||
};
|
||||
},
|
||||
getKeys: async (base = "") => {
|
||||
await ensureTable();
|
||||
const {
|
||||
rows
|
||||
} = isMysql ? await opts.database.sql`SELECT \`key\` FROM {${opts.tableName}} WHERE \`key\` LIKE ${base + "%"}` : await opts.database.sql`SELECT key FROM {${opts.tableName}} WHERE key LIKE ${base + "%"}`;
|
||||
return rows?.map(r => r.key);
|
||||
},
|
||||
clear: async () => {
|
||||
await ensureTable();
|
||||
await opts.database.sql`DELETE FROM {${opts.tableName}}`;
|
||||
}
|
||||
};
|
||||
});
|
||||
async function setupTable(opts) {
|
||||
switch (opts.database.dialect) {
|
||||
case "sqlite":
|
||||
case "libsql":
|
||||
{
|
||||
await opts.database.sql`
|
||||
CREATE TABLE IF NOT EXISTS {${opts.tableName}} (
|
||||
key TEXT PRIMARY KEY,
|
||||
value TEXT,
|
||||
blob BLOB,
|
||||
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TEXT DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
`;
|
||||
return;
|
||||
}
|
||||
case "postgresql":
|
||||
{
|
||||
await opts.database.sql`
|
||||
CREATE TABLE IF NOT EXISTS {${opts.tableName}} (
|
||||
key VARCHAR(255) NOT NULL PRIMARY KEY,
|
||||
value TEXT,
|
||||
blob BYTEA,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
`;
|
||||
return;
|
||||
}
|
||||
case "mysql":
|
||||
{
|
||||
await opts.database.sql`
|
||||
CREATE TABLE IF NOT EXISTS {${opts.tableName}} (
|
||||
\`key\` VARCHAR(255) NOT NULL PRIMARY KEY,
|
||||
\`value\` LONGTEXT,
|
||||
\`blob\` BLOB,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
|
||||
);
|
||||
`;
|
||||
return;
|
||||
}
|
||||
default:
|
||||
{
|
||||
throw (0, _utils.createError)(DRIVER_NAME, `unsuppoted SQL dialect: ${opts.database.dialect}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
function toDate(timestamp) {
|
||||
return timestamp ? new Date(timestamp) : void 0;
|
||||
}
|
7
node_modules/unstorage/drivers/db0.d.ts
generated
vendored
Normal file
7
node_modules/unstorage/drivers/db0.d.ts
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
import type { Database } from "db0";
|
||||
export interface DB0DriverOptions {
|
||||
database: Database;
|
||||
tableName?: string;
|
||||
}
|
||||
declare const _default: (opts: DB0DriverOptions) => import("..").Driver<DB0DriverOptions, Database<import("db0").Connector<unknown>>>;
|
||||
export default _default;
|
140
node_modules/unstorage/drivers/db0.mjs
generated
vendored
Normal file
140
node_modules/unstorage/drivers/db0.mjs
generated
vendored
Normal file
@@ -0,0 +1,140 @@
|
||||
import { createError, defineDriver } from "./utils/index.mjs";
|
||||
const DRIVER_NAME = "db0";
|
||||
const DEFAULT_TABLE_NAME = "unstorage";
|
||||
const kExperimentalWarning = "__unstorage_db0_experimental_warning__";
|
||||
export default defineDriver((opts) => {
|
||||
opts.tableName = opts.tableName || DEFAULT_TABLE_NAME;
|
||||
let setupPromise;
|
||||
let setupDone = false;
|
||||
const ensureTable = () => {
|
||||
if (setupDone) {
|
||||
return;
|
||||
}
|
||||
if (!setupPromise) {
|
||||
if (!globalThis[kExperimentalWarning]) {
|
||||
console.warn(
|
||||
"[unstorage]: Database driver is experimental and behavior may change in the future."
|
||||
);
|
||||
globalThis[kExperimentalWarning] = true;
|
||||
}
|
||||
setupPromise = setupTable(opts).then(() => {
|
||||
setupDone = true;
|
||||
setupPromise = void 0;
|
||||
});
|
||||
}
|
||||
return setupPromise;
|
||||
};
|
||||
const isMysql = opts.database.dialect === "mysql";
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: () => opts.database,
|
||||
async hasItem(key) {
|
||||
await ensureTable();
|
||||
const { rows } = isMysql ? await opts.database.sql`SELECT EXISTS (SELECT 1 FROM {${opts.tableName}} WHERE \`key\` = ${key}) AS \`value\`` : await opts.database.sql`SELECT EXISTS (SELECT 1 FROM {${opts.tableName}} WHERE key = ${key}) AS value`;
|
||||
return rows?.[0]?.value == "1";
|
||||
},
|
||||
getItem: async (key) => {
|
||||
await ensureTable();
|
||||
const { rows } = isMysql ? await opts.database.sql`SELECT value FROM {${opts.tableName}} WHERE \`key\` = ${key}` : await opts.database.sql`SELECT value FROM {${opts.tableName}} WHERE key = ${key}`;
|
||||
return rows?.[0]?.value ?? null;
|
||||
},
|
||||
getItemRaw: async (key) => {
|
||||
await ensureTable();
|
||||
const { rows } = isMysql ? await opts.database.sql`SELECT \`blob\` as value FROM {${opts.tableName}} WHERE \`key\` = ${key}` : await opts.database.sql`SELECT blob as value FROM {${opts.tableName}} WHERE key = ${key}`;
|
||||
return rows?.[0]?.value ?? null;
|
||||
},
|
||||
setItem: async (key, value) => {
|
||||
await ensureTable();
|
||||
if (isMysql) {
|
||||
await opts.database.sql`INSERT INTO {${opts.tableName}} (\`key\`, \`value\`, created_at, updated_at) VALUES (${key}, ${value}, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP) ON DUPLICATE KEY UPDATE value = ${value}, updated_at = CURRENT_TIMESTAMP`;
|
||||
} else {
|
||||
await opts.database.sql`INSERT INTO {${opts.tableName}} (key, value, created_at, updated_at) VALUES (${key}, ${value}, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP) ON CONFLICT(key) DO UPDATE SET value = ${value}, updated_at = CURRENT_TIMESTAMP`;
|
||||
}
|
||||
},
|
||||
async setItemRaw(key, value) {
|
||||
await ensureTable();
|
||||
if (isMysql) {
|
||||
const blob = Buffer.from(value);
|
||||
await opts.database.sql`INSERT INTO {${opts.tableName}} (\`key\`, \`blob\`, created_at, updated_at) VALUES (${key}, ${blob}, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP) ON DUPLICATE KEY UPDATE \`blob\` = ${blob}, updated_at = CURRENT_TIMESTAMP`;
|
||||
} else {
|
||||
await opts.database.sql`INSERT INTO {${opts.tableName}} (key, blob, created_at, updated_at) VALUES (${key}, ${value}, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP) ON CONFLICT(key) DO UPDATE SET blob = ${value}, updated_at = CURRENT_TIMESTAMP`;
|
||||
}
|
||||
},
|
||||
removeItem: async (key) => {
|
||||
await ensureTable();
|
||||
if (isMysql) {
|
||||
await opts.database.sql`DELETE FROM {${opts.tableName}} WHERE \`key\`=${key}`;
|
||||
} else {
|
||||
await opts.database.sql`DELETE FROM {${opts.tableName}} WHERE key=${key}`;
|
||||
}
|
||||
},
|
||||
getMeta: async (key) => {
|
||||
await ensureTable();
|
||||
const { rows } = isMysql ? await opts.database.sql`SELECT created_at, updated_at FROM {${opts.tableName}} WHERE \`key\` = ${key}` : await opts.database.sql`SELECT created_at, updated_at FROM {${opts.tableName}} WHERE key = ${key}`;
|
||||
return {
|
||||
birthtime: toDate(rows?.[0]?.created_at),
|
||||
mtime: toDate(rows?.[0]?.updated_at)
|
||||
};
|
||||
},
|
||||
getKeys: async (base = "") => {
|
||||
await ensureTable();
|
||||
const { rows } = isMysql ? await opts.database.sql`SELECT \`key\` FROM {${opts.tableName}} WHERE \`key\` LIKE ${base + "%"}` : await opts.database.sql`SELECT key FROM {${opts.tableName}} WHERE key LIKE ${base + "%"}`;
|
||||
return rows?.map((r) => r.key);
|
||||
},
|
||||
clear: async () => {
|
||||
await ensureTable();
|
||||
await opts.database.sql`DELETE FROM {${opts.tableName}}`;
|
||||
}
|
||||
};
|
||||
});
|
||||
async function setupTable(opts) {
|
||||
switch (opts.database.dialect) {
|
||||
case "sqlite":
|
||||
case "libsql": {
|
||||
await opts.database.sql`
|
||||
CREATE TABLE IF NOT EXISTS {${opts.tableName}} (
|
||||
key TEXT PRIMARY KEY,
|
||||
value TEXT,
|
||||
blob BLOB,
|
||||
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TEXT DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
`;
|
||||
return;
|
||||
}
|
||||
case "postgresql": {
|
||||
await opts.database.sql`
|
||||
CREATE TABLE IF NOT EXISTS {${opts.tableName}} (
|
||||
key VARCHAR(255) NOT NULL PRIMARY KEY,
|
||||
value TEXT,
|
||||
blob BYTEA,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
`;
|
||||
return;
|
||||
}
|
||||
case "mysql": {
|
||||
await opts.database.sql`
|
||||
CREATE TABLE IF NOT EXISTS {${opts.tableName}} (
|
||||
\`key\` VARCHAR(255) NOT NULL PRIMARY KEY,
|
||||
\`value\` LONGTEXT,
|
||||
\`blob\` BLOB,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
|
||||
);
|
||||
`;
|
||||
return;
|
||||
}
|
||||
default: {
|
||||
throw createError(
|
||||
DRIVER_NAME,
|
||||
`unsuppoted SQL dialect: ${opts.database.dialect}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
function toDate(timestamp) {
|
||||
return timestamp ? new Date(timestamp) : void 0;
|
||||
}
|
24
node_modules/unstorage/drivers/deno-kv-node.cjs
generated
vendored
Normal file
24
node_modules/unstorage/drivers/deno-kv-node.cjs
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _kv = require("@deno/kv");
|
||||
var _index = require("./utils/index.cjs");
|
||||
var _denoKv = _interopRequireDefault(require("./deno-kv.cjs"));
|
||||
function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; }
|
||||
const DRIVER_NAME = "deno-kv-node";
|
||||
module.exports = (0, _index.defineDriver)((opts = {}) => {
|
||||
const baseDriver = (0, _denoKv.default)({
|
||||
...opts,
|
||||
openKv: () => (0, _kv.openKv)(opts.path, opts.openKvOptions)
|
||||
});
|
||||
return {
|
||||
...baseDriver,
|
||||
getInstance() {
|
||||
return baseDriver.getInstance();
|
||||
},
|
||||
name: DRIVER_NAME
|
||||
};
|
||||
});
|
8
node_modules/unstorage/drivers/deno-kv-node.d.ts
generated
vendored
Normal file
8
node_modules/unstorage/drivers/deno-kv-node.d.ts
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
import { openKv, type Kv } from "@deno/kv";
|
||||
export interface DenoKvNodeOptions {
|
||||
base?: string;
|
||||
path?: string;
|
||||
openKvOptions?: Parameters<typeof openKv>[1];
|
||||
}
|
||||
declare const _default: (opts: DenoKvNodeOptions) => import("..").Driver<DenoKvNodeOptions, Kv | Promise<Kv>>;
|
||||
export default _default;
|
19
node_modules/unstorage/drivers/deno-kv-node.mjs
generated
vendored
Normal file
19
node_modules/unstorage/drivers/deno-kv-node.mjs
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
import { openKv } from "@deno/kv";
|
||||
import { defineDriver } from "./utils/index.mjs";
|
||||
import denoKV from "./deno-kv.mjs";
|
||||
const DRIVER_NAME = "deno-kv-node";
|
||||
export default defineDriver(
|
||||
(opts = {}) => {
|
||||
const baseDriver = denoKV({
|
||||
...opts,
|
||||
openKv: () => openKv(opts.path, opts.openKvOptions)
|
||||
});
|
||||
return {
|
||||
...baseDriver,
|
||||
getInstance() {
|
||||
return baseDriver.getInstance();
|
||||
},
|
||||
name: DRIVER_NAME
|
||||
};
|
||||
}
|
||||
);
|
90
node_modules/unstorage/drivers/deno-kv.cjs
generated
vendored
Normal file
90
node_modules/unstorage/drivers/deno-kv.cjs
generated
vendored
Normal file
@@ -0,0 +1,90 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _index = require("./utils/index.cjs");
|
||||
const DRIVER_NAME = "deno-kv";
|
||||
module.exports = (0, _index.defineDriver)((opts = {}) => {
|
||||
const basePrefix = opts.base ? (0, _index.normalizeKey)(opts.base).split(":") : [];
|
||||
const r = (key = "") => [...basePrefix, ...key.split(":")].filter(Boolean);
|
||||
let _kv;
|
||||
const getKv = () => {
|
||||
if (_kv) {
|
||||
return _kv;
|
||||
}
|
||||
if (opts.openKv) {
|
||||
_kv = opts.openKv();
|
||||
} else {
|
||||
if (!globalThis.Deno) {
|
||||
throw (0, _index.createError)(DRIVER_NAME, "Missing global `Deno`. Are you running in Deno? (hint: use `deno-kv-node` driver for Node.js)");
|
||||
}
|
||||
if (!Deno.openKv) {
|
||||
throw (0, _index.createError)(DRIVER_NAME, "Missing `Deno.openKv`. Are you running Deno with --unstable-kv?");
|
||||
}
|
||||
_kv = Deno.openKv(opts.path);
|
||||
}
|
||||
return _kv;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
getInstance() {
|
||||
return getKv();
|
||||
},
|
||||
async hasItem(key) {
|
||||
const kv = await getKv();
|
||||
const value = await kv.get(r(key));
|
||||
return !!value.value;
|
||||
},
|
||||
async getItem(key) {
|
||||
const kv = await getKv();
|
||||
const value = await kv.get(r(key));
|
||||
return value.value;
|
||||
},
|
||||
async getItemRaw(key) {
|
||||
const kv = await getKv();
|
||||
const value = await kv.get(r(key));
|
||||
return value.value;
|
||||
},
|
||||
async setItem(key, value) {
|
||||
const kv = await getKv();
|
||||
await kv.set(r(key), value);
|
||||
},
|
||||
async setItemRaw(key, value) {
|
||||
const kv = await getKv();
|
||||
await kv.set(r(key), value);
|
||||
},
|
||||
async removeItem(key) {
|
||||
const kv = await getKv();
|
||||
await kv.delete(r(key));
|
||||
},
|
||||
async getKeys(base) {
|
||||
const kv = await getKv();
|
||||
const keys = [];
|
||||
for await (const entry of kv.list({
|
||||
prefix: r(base)
|
||||
})) {
|
||||
keys.push((basePrefix.length > 0 ? entry.key.slice(basePrefix.length) : entry.key).join(":"));
|
||||
}
|
||||
return keys;
|
||||
},
|
||||
async clear(base) {
|
||||
const kv = await getKv();
|
||||
const batch = kv.atomic();
|
||||
for await (const entry of kv.list({
|
||||
prefix: r(base)
|
||||
})) {
|
||||
batch.delete(entry.key);
|
||||
}
|
||||
await batch.commit();
|
||||
},
|
||||
async dispose() {
|
||||
if (_kv) {
|
||||
const kv = await _kv;
|
||||
await kv.close();
|
||||
_kv = void 0;
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
8
node_modules/unstorage/drivers/deno-kv.d.ts
generated
vendored
Normal file
8
node_modules/unstorage/drivers/deno-kv.d.ts
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
import type { Kv } from "@deno/kv";
|
||||
export interface DenoKvOptions {
|
||||
base?: string;
|
||||
path?: string;
|
||||
openKv?: () => Promise<Deno.Kv | Kv>;
|
||||
}
|
||||
declare const _default: (opts: DenoKvOptions) => import("..").Driver<DenoKvOptions, Promise<Deno.Kv | Kv>>;
|
||||
export default _default;
|
90
node_modules/unstorage/drivers/deno-kv.mjs
generated
vendored
Normal file
90
node_modules/unstorage/drivers/deno-kv.mjs
generated
vendored
Normal file
@@ -0,0 +1,90 @@
|
||||
import { defineDriver, createError, normalizeKey } from "./utils/index.mjs";
|
||||
const DRIVER_NAME = "deno-kv";
|
||||
export default defineDriver(
|
||||
(opts = {}) => {
|
||||
const basePrefix = opts.base ? normalizeKey(opts.base).split(":") : [];
|
||||
const r = (key = "") => [...basePrefix, ...key.split(":")].filter(Boolean);
|
||||
let _kv;
|
||||
const getKv = () => {
|
||||
if (_kv) {
|
||||
return _kv;
|
||||
}
|
||||
if (opts.openKv) {
|
||||
_kv = opts.openKv();
|
||||
} else {
|
||||
if (!globalThis.Deno) {
|
||||
throw createError(
|
||||
DRIVER_NAME,
|
||||
"Missing global `Deno`. Are you running in Deno? (hint: use `deno-kv-node` driver for Node.js)"
|
||||
);
|
||||
}
|
||||
if (!Deno.openKv) {
|
||||
throw createError(
|
||||
DRIVER_NAME,
|
||||
"Missing `Deno.openKv`. Are you running Deno with --unstable-kv?"
|
||||
);
|
||||
}
|
||||
_kv = Deno.openKv(opts.path);
|
||||
}
|
||||
return _kv;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
getInstance() {
|
||||
return getKv();
|
||||
},
|
||||
async hasItem(key) {
|
||||
const kv = await getKv();
|
||||
const value = await kv.get(r(key));
|
||||
return !!value.value;
|
||||
},
|
||||
async getItem(key) {
|
||||
const kv = await getKv();
|
||||
const value = await kv.get(r(key));
|
||||
return value.value;
|
||||
},
|
||||
async getItemRaw(key) {
|
||||
const kv = await getKv();
|
||||
const value = await kv.get(r(key));
|
||||
return value.value;
|
||||
},
|
||||
async setItem(key, value) {
|
||||
const kv = await getKv();
|
||||
await kv.set(r(key), value);
|
||||
},
|
||||
async setItemRaw(key, value) {
|
||||
const kv = await getKv();
|
||||
await kv.set(r(key), value);
|
||||
},
|
||||
async removeItem(key) {
|
||||
const kv = await getKv();
|
||||
await kv.delete(r(key));
|
||||
},
|
||||
async getKeys(base) {
|
||||
const kv = await getKv();
|
||||
const keys = [];
|
||||
for await (const entry of kv.list({ prefix: r(base) })) {
|
||||
keys.push(
|
||||
(basePrefix.length > 0 ? entry.key.slice(basePrefix.length) : entry.key).join(":")
|
||||
);
|
||||
}
|
||||
return keys;
|
||||
},
|
||||
async clear(base) {
|
||||
const kv = await getKv();
|
||||
const batch = kv.atomic();
|
||||
for await (const entry of kv.list({ prefix: r(base) })) {
|
||||
batch.delete(entry.key);
|
||||
}
|
||||
await batch.commit();
|
||||
},
|
||||
async dispose() {
|
||||
if (_kv) {
|
||||
const kv = await _kv;
|
||||
await kv.close();
|
||||
_kv = void 0;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
);
|
84
node_modules/unstorage/drivers/fs-lite.cjs
generated
vendored
Normal file
84
node_modules/unstorage/drivers/fs-lite.cjs
generated
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _nodeFs = require("node:fs");
|
||||
var _nodePath = require("node:path");
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _nodeFs2 = require("./utils/node-fs.cjs");
|
||||
const PATH_TRAVERSE_RE = /\.\.:|\.\.$/;
|
||||
const DRIVER_NAME = "fs-lite";
|
||||
module.exports = (0, _utils.defineDriver)((opts = {}) => {
|
||||
if (!opts.base) {
|
||||
throw (0, _utils.createRequiredError)(DRIVER_NAME, "base");
|
||||
}
|
||||
opts.base = (0, _nodePath.resolve)(opts.base);
|
||||
const r = key => {
|
||||
if (PATH_TRAVERSE_RE.test(key)) {
|
||||
throw (0, _utils.createError)(DRIVER_NAME, `Invalid key: ${JSON.stringify(key)}. It should not contain .. segments`);
|
||||
}
|
||||
const resolved = (0, _nodePath.join)(opts.base, key.replace(/:/g, "/"));
|
||||
return resolved;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
flags: {
|
||||
maxDepth: true
|
||||
},
|
||||
hasItem(key) {
|
||||
return (0, _nodeFs.existsSync)(r(key));
|
||||
},
|
||||
getItem(key) {
|
||||
return (0, _nodeFs2.readFile)(r(key), "utf8");
|
||||
},
|
||||
getItemRaw(key) {
|
||||
return (0, _nodeFs2.readFile)(r(key));
|
||||
},
|
||||
async getMeta(key) {
|
||||
const {
|
||||
atime,
|
||||
mtime,
|
||||
size,
|
||||
birthtime,
|
||||
ctime
|
||||
} = await _nodeFs.promises.stat(r(key)).catch(() => ({}));
|
||||
return {
|
||||
atime,
|
||||
mtime,
|
||||
size,
|
||||
birthtime,
|
||||
ctime
|
||||
};
|
||||
},
|
||||
setItem(key, value) {
|
||||
if (opts.readOnly) {
|
||||
return;
|
||||
}
|
||||
return (0, _nodeFs2.writeFile)(r(key), value, "utf8");
|
||||
},
|
||||
setItemRaw(key, value) {
|
||||
if (opts.readOnly) {
|
||||
return;
|
||||
}
|
||||
return (0, _nodeFs2.writeFile)(r(key), value);
|
||||
},
|
||||
removeItem(key) {
|
||||
if (opts.readOnly) {
|
||||
return;
|
||||
}
|
||||
return (0, _nodeFs2.unlink)(r(key));
|
||||
},
|
||||
getKeys(_base, topts) {
|
||||
return (0, _nodeFs2.readdirRecursive)(r("."), opts.ignore, topts?.maxDepth);
|
||||
},
|
||||
async clear() {
|
||||
if (opts.readOnly || opts.noClear) {
|
||||
return;
|
||||
}
|
||||
await (0, _nodeFs2.rmRecursive)(r("."));
|
||||
}
|
||||
};
|
||||
});
|
8
node_modules/unstorage/drivers/fs-lite.d.ts
generated
vendored
Normal file
8
node_modules/unstorage/drivers/fs-lite.d.ts
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
export interface FSStorageOptions {
|
||||
base?: string;
|
||||
ignore?: (path: string) => boolean;
|
||||
readOnly?: boolean;
|
||||
noClear?: boolean;
|
||||
}
|
||||
declare const _default: (opts: FSStorageOptions | undefined) => import("..").Driver<FSStorageOptions | undefined, never>;
|
||||
export default _default;
|
75
node_modules/unstorage/drivers/fs-lite.mjs
generated
vendored
Normal file
75
node_modules/unstorage/drivers/fs-lite.mjs
generated
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
import { existsSync, promises as fsp } from "node:fs";
|
||||
import { resolve, join } from "node:path";
|
||||
import { createError, createRequiredError, defineDriver } from "./utils/index.mjs";
|
||||
import {
|
||||
readFile,
|
||||
writeFile,
|
||||
readdirRecursive,
|
||||
rmRecursive,
|
||||
unlink
|
||||
} from "./utils/node-fs.mjs";
|
||||
const PATH_TRAVERSE_RE = /\.\.:|\.\.$/;
|
||||
const DRIVER_NAME = "fs-lite";
|
||||
export default defineDriver((opts = {}) => {
|
||||
if (!opts.base) {
|
||||
throw createRequiredError(DRIVER_NAME, "base");
|
||||
}
|
||||
opts.base = resolve(opts.base);
|
||||
const r = (key) => {
|
||||
if (PATH_TRAVERSE_RE.test(key)) {
|
||||
throw createError(
|
||||
DRIVER_NAME,
|
||||
`Invalid key: ${JSON.stringify(key)}. It should not contain .. segments`
|
||||
);
|
||||
}
|
||||
const resolved = join(opts.base, key.replace(/:/g, "/"));
|
||||
return resolved;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
flags: {
|
||||
maxDepth: true
|
||||
},
|
||||
hasItem(key) {
|
||||
return existsSync(r(key));
|
||||
},
|
||||
getItem(key) {
|
||||
return readFile(r(key), "utf8");
|
||||
},
|
||||
getItemRaw(key) {
|
||||
return readFile(r(key));
|
||||
},
|
||||
async getMeta(key) {
|
||||
const { atime, mtime, size, birthtime, ctime } = await fsp.stat(r(key)).catch(() => ({}));
|
||||
return { atime, mtime, size, birthtime, ctime };
|
||||
},
|
||||
setItem(key, value) {
|
||||
if (opts.readOnly) {
|
||||
return;
|
||||
}
|
||||
return writeFile(r(key), value, "utf8");
|
||||
},
|
||||
setItemRaw(key, value) {
|
||||
if (opts.readOnly) {
|
||||
return;
|
||||
}
|
||||
return writeFile(r(key), value);
|
||||
},
|
||||
removeItem(key) {
|
||||
if (opts.readOnly) {
|
||||
return;
|
||||
}
|
||||
return unlink(r(key));
|
||||
},
|
||||
getKeys(_base, topts) {
|
||||
return readdirRecursive(r("."), opts.ignore, topts?.maxDepth);
|
||||
},
|
||||
async clear() {
|
||||
if (opts.readOnly || opts.noClear) {
|
||||
return;
|
||||
}
|
||||
await rmRecursive(r("."));
|
||||
}
|
||||
};
|
||||
});
|
130
node_modules/unstorage/drivers/fs.cjs
generated
vendored
Normal file
130
node_modules/unstorage/drivers/fs.cjs
generated
vendored
Normal file
@@ -0,0 +1,130 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _nodeFs = require("node:fs");
|
||||
var _nodePath = require("node:path");
|
||||
var _chokidar = require("chokidar");
|
||||
var _anymatch = _interopRequireDefault(require("anymatch"));
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _nodeFs2 = require("./utils/node-fs.cjs");
|
||||
function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; }
|
||||
const PATH_TRAVERSE_RE = /\.\.:|\.\.$/;
|
||||
const DRIVER_NAME = "fs";
|
||||
module.exports = (0, _utils.defineDriver)((userOptions = {}) => {
|
||||
if (!userOptions.base) {
|
||||
throw (0, _utils.createRequiredError)(DRIVER_NAME, "base");
|
||||
}
|
||||
const base = (0, _nodePath.resolve)(userOptions.base);
|
||||
const ignore = (0, _anymatch.default)(userOptions.ignore || ["**/node_modules/**", "**/.git/**"]);
|
||||
const r = key => {
|
||||
if (PATH_TRAVERSE_RE.test(key)) {
|
||||
throw (0, _utils.createError)(DRIVER_NAME, `Invalid key: ${JSON.stringify(key)}. It should not contain .. segments`);
|
||||
}
|
||||
const resolved = (0, _nodePath.join)(base, key.replace(/:/g, "/"));
|
||||
return resolved;
|
||||
};
|
||||
let _watcher;
|
||||
const _unwatch = async () => {
|
||||
if (_watcher) {
|
||||
await _watcher.close();
|
||||
_watcher = void 0;
|
||||
}
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: userOptions,
|
||||
flags: {
|
||||
maxDepth: true
|
||||
},
|
||||
hasItem(key) {
|
||||
return (0, _nodeFs.existsSync)(r(key));
|
||||
},
|
||||
getItem(key) {
|
||||
return (0, _nodeFs2.readFile)(r(key), "utf8");
|
||||
},
|
||||
getItemRaw(key) {
|
||||
return (0, _nodeFs2.readFile)(r(key));
|
||||
},
|
||||
async getMeta(key) {
|
||||
const {
|
||||
atime,
|
||||
mtime,
|
||||
size,
|
||||
birthtime,
|
||||
ctime
|
||||
} = await _nodeFs.promises.stat(r(key)).catch(() => ({}));
|
||||
return {
|
||||
atime,
|
||||
mtime,
|
||||
size,
|
||||
birthtime,
|
||||
ctime
|
||||
};
|
||||
},
|
||||
setItem(key, value) {
|
||||
if (userOptions.readOnly) {
|
||||
return;
|
||||
}
|
||||
return (0, _nodeFs2.writeFile)(r(key), value, "utf8");
|
||||
},
|
||||
setItemRaw(key, value) {
|
||||
if (userOptions.readOnly) {
|
||||
return;
|
||||
}
|
||||
return (0, _nodeFs2.writeFile)(r(key), value);
|
||||
},
|
||||
removeItem(key) {
|
||||
if (userOptions.readOnly) {
|
||||
return;
|
||||
}
|
||||
return (0, _nodeFs2.unlink)(r(key));
|
||||
},
|
||||
getKeys(_base, topts) {
|
||||
return (0, _nodeFs2.readdirRecursive)(r("."), ignore, topts?.maxDepth);
|
||||
},
|
||||
async clear() {
|
||||
if (userOptions.readOnly || userOptions.noClear) {
|
||||
return;
|
||||
}
|
||||
await (0, _nodeFs2.rmRecursive)(r("."));
|
||||
},
|
||||
async dispose() {
|
||||
if (_watcher) {
|
||||
await _watcher.close();
|
||||
}
|
||||
},
|
||||
async watch(callback) {
|
||||
if (_watcher) {
|
||||
return _unwatch;
|
||||
}
|
||||
await new Promise((resolve2, reject) => {
|
||||
const watchOptions = {
|
||||
ignoreInitial: true,
|
||||
...userOptions.watchOptions
|
||||
};
|
||||
if (!watchOptions.ignored) {
|
||||
watchOptions.ignored = [];
|
||||
} else if (Array.isArray(watchOptions.ignored)) {
|
||||
watchOptions.ignored = [...watchOptions.ignored];
|
||||
} else {
|
||||
watchOptions.ignored = [watchOptions.ignored];
|
||||
}
|
||||
watchOptions.ignored.push(ignore);
|
||||
_watcher = (0, _chokidar.watch)(base, watchOptions).on("ready", () => {
|
||||
resolve2();
|
||||
}).on("error", reject).on("all", (eventName, path) => {
|
||||
path = (0, _nodePath.relative)(base, path);
|
||||
if (eventName === "change" || eventName === "add") {
|
||||
callback("update", path);
|
||||
} else if (eventName === "unlink") {
|
||||
callback("remove", path);
|
||||
}
|
||||
});
|
||||
});
|
||||
return _unwatch;
|
||||
}
|
||||
};
|
||||
});
|
10
node_modules/unstorage/drivers/fs.d.ts
generated
vendored
Normal file
10
node_modules/unstorage/drivers/fs.d.ts
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
import { type ChokidarOptions } from "chokidar";
|
||||
export interface FSStorageOptions {
|
||||
base?: string;
|
||||
ignore?: string[];
|
||||
readOnly?: boolean;
|
||||
noClear?: boolean;
|
||||
watchOptions?: ChokidarOptions;
|
||||
}
|
||||
declare const _default: (opts: FSStorageOptions | undefined) => import("..").Driver<FSStorageOptions | undefined, never>;
|
||||
export default _default;
|
122
node_modules/unstorage/drivers/fs.mjs
generated
vendored
Normal file
122
node_modules/unstorage/drivers/fs.mjs
generated
vendored
Normal file
@@ -0,0 +1,122 @@
|
||||
import { existsSync, promises as fsp } from "node:fs";
|
||||
import { resolve, relative, join } from "node:path";
|
||||
import { watch } from "chokidar";
|
||||
import anymatch from "anymatch";
|
||||
import { createError, createRequiredError, defineDriver } from "./utils/index.mjs";
|
||||
import {
|
||||
readFile,
|
||||
writeFile,
|
||||
readdirRecursive,
|
||||
rmRecursive,
|
||||
unlink
|
||||
} from "./utils/node-fs.mjs";
|
||||
const PATH_TRAVERSE_RE = /\.\.:|\.\.$/;
|
||||
const DRIVER_NAME = "fs";
|
||||
export default defineDriver((userOptions = {}) => {
|
||||
if (!userOptions.base) {
|
||||
throw createRequiredError(DRIVER_NAME, "base");
|
||||
}
|
||||
const base = resolve(userOptions.base);
|
||||
const ignore = anymatch(
|
||||
userOptions.ignore || ["**/node_modules/**", "**/.git/**"]
|
||||
);
|
||||
const r = (key) => {
|
||||
if (PATH_TRAVERSE_RE.test(key)) {
|
||||
throw createError(
|
||||
DRIVER_NAME,
|
||||
`Invalid key: ${JSON.stringify(key)}. It should not contain .. segments`
|
||||
);
|
||||
}
|
||||
const resolved = join(base, key.replace(/:/g, "/"));
|
||||
return resolved;
|
||||
};
|
||||
let _watcher;
|
||||
const _unwatch = async () => {
|
||||
if (_watcher) {
|
||||
await _watcher.close();
|
||||
_watcher = void 0;
|
||||
}
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: userOptions,
|
||||
flags: {
|
||||
maxDepth: true
|
||||
},
|
||||
hasItem(key) {
|
||||
return existsSync(r(key));
|
||||
},
|
||||
getItem(key) {
|
||||
return readFile(r(key), "utf8");
|
||||
},
|
||||
getItemRaw(key) {
|
||||
return readFile(r(key));
|
||||
},
|
||||
async getMeta(key) {
|
||||
const { atime, mtime, size, birthtime, ctime } = await fsp.stat(r(key)).catch(() => ({}));
|
||||
return { atime, mtime, size, birthtime, ctime };
|
||||
},
|
||||
setItem(key, value) {
|
||||
if (userOptions.readOnly) {
|
||||
return;
|
||||
}
|
||||
return writeFile(r(key), value, "utf8");
|
||||
},
|
||||
setItemRaw(key, value) {
|
||||
if (userOptions.readOnly) {
|
||||
return;
|
||||
}
|
||||
return writeFile(r(key), value);
|
||||
},
|
||||
removeItem(key) {
|
||||
if (userOptions.readOnly) {
|
||||
return;
|
||||
}
|
||||
return unlink(r(key));
|
||||
},
|
||||
getKeys(_base, topts) {
|
||||
return readdirRecursive(r("."), ignore, topts?.maxDepth);
|
||||
},
|
||||
async clear() {
|
||||
if (userOptions.readOnly || userOptions.noClear) {
|
||||
return;
|
||||
}
|
||||
await rmRecursive(r("."));
|
||||
},
|
||||
async dispose() {
|
||||
if (_watcher) {
|
||||
await _watcher.close();
|
||||
}
|
||||
},
|
||||
async watch(callback) {
|
||||
if (_watcher) {
|
||||
return _unwatch;
|
||||
}
|
||||
await new Promise((resolve2, reject) => {
|
||||
const watchOptions = {
|
||||
ignoreInitial: true,
|
||||
...userOptions.watchOptions
|
||||
};
|
||||
if (!watchOptions.ignored) {
|
||||
watchOptions.ignored = [];
|
||||
} else if (Array.isArray(watchOptions.ignored)) {
|
||||
watchOptions.ignored = [...watchOptions.ignored];
|
||||
} else {
|
||||
watchOptions.ignored = [watchOptions.ignored];
|
||||
}
|
||||
watchOptions.ignored.push(ignore);
|
||||
_watcher = watch(base, watchOptions).on("ready", () => {
|
||||
resolve2();
|
||||
}).on("error", reject).on("all", (eventName, path) => {
|
||||
path = relative(base, path);
|
||||
if (eventName === "change" || eventName === "add") {
|
||||
callback("update", path);
|
||||
} else if (eventName === "unlink") {
|
||||
callback("remove", path);
|
||||
}
|
||||
});
|
||||
});
|
||||
return _unwatch;
|
||||
}
|
||||
};
|
||||
});
|
114
node_modules/unstorage/drivers/github.cjs
generated
vendored
Normal file
114
node_modules/unstorage/drivers/github.cjs
generated
vendored
Normal file
@@ -0,0 +1,114 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _ofetch = require("ofetch");
|
||||
var _ufo = require("ufo");
|
||||
const defaultOptions = {
|
||||
repo: "",
|
||||
branch: "main",
|
||||
ttl: 600,
|
||||
dir: "",
|
||||
apiURL: "https://api.github.com",
|
||||
cdnURL: "https://raw.githubusercontent.com"
|
||||
};
|
||||
const DRIVER_NAME = "github";
|
||||
module.exports = (0, _utils.defineDriver)(_opts => {
|
||||
const opts = {
|
||||
...defaultOptions,
|
||||
..._opts
|
||||
};
|
||||
const rawUrl = (0, _ufo.joinURL)(opts.cdnURL, opts.repo, opts.branch, opts.dir);
|
||||
let files = {};
|
||||
let lastCheck = 0;
|
||||
let syncPromise;
|
||||
const syncFiles = async () => {
|
||||
if (!opts.repo) {
|
||||
throw (0, _utils.createRequiredError)(DRIVER_NAME, "repo");
|
||||
}
|
||||
if (lastCheck + opts.ttl * 1e3 > Date.now()) {
|
||||
return;
|
||||
}
|
||||
if (!syncPromise) {
|
||||
syncPromise = fetchFiles(opts);
|
||||
}
|
||||
files = await syncPromise;
|
||||
lastCheck = Date.now();
|
||||
syncPromise = void 0;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
async getKeys() {
|
||||
await syncFiles();
|
||||
return Object.keys(files);
|
||||
},
|
||||
async hasItem(key) {
|
||||
await syncFiles();
|
||||
return key in files;
|
||||
},
|
||||
async getItem(key) {
|
||||
await syncFiles();
|
||||
const item = files[key];
|
||||
if (!item) {
|
||||
return null;
|
||||
}
|
||||
if (!item.body) {
|
||||
try {
|
||||
item.body = await (0, _ofetch.$fetch)(key.replace(/:/g, "/"), {
|
||||
baseURL: rawUrl,
|
||||
headers: opts.token ? {
|
||||
Authorization: `token ${opts.token}`
|
||||
} : void 0
|
||||
});
|
||||
} catch (error) {
|
||||
throw (0, _utils.createError)("github", `Failed to fetch \`${JSON.stringify(key)}\``, {
|
||||
cause: error
|
||||
});
|
||||
}
|
||||
}
|
||||
return item.body;
|
||||
},
|
||||
async getMeta(key) {
|
||||
await syncFiles();
|
||||
const item = files[key];
|
||||
return item ? item.meta : null;
|
||||
}
|
||||
};
|
||||
});
|
||||
async function fetchFiles(opts) {
|
||||
const prefix = (0, _ufo.withTrailingSlash)(opts.dir).replace(/^\//, "");
|
||||
const files = {};
|
||||
try {
|
||||
const trees = await (0, _ofetch.$fetch)(`/repos/${opts.repo}/git/trees/${opts.branch}?recursive=1`, {
|
||||
baseURL: opts.apiURL,
|
||||
headers: {
|
||||
"User-Agent": "unstorage",
|
||||
...(opts.token && {
|
||||
Authorization: `token ${opts.token}`
|
||||
})
|
||||
}
|
||||
});
|
||||
for (const node of trees.tree) {
|
||||
if (node.type !== "blob" || !node.path.startsWith(prefix)) {
|
||||
continue;
|
||||
}
|
||||
const key = node.path.slice(prefix.length).replace(/\//g, ":");
|
||||
files[key] = {
|
||||
meta: {
|
||||
sha: node.sha,
|
||||
mode: node.mode,
|
||||
size: node.size
|
||||
}
|
||||
};
|
||||
}
|
||||
return files;
|
||||
} catch (error) {
|
||||
throw (0, _utils.createError)(DRIVER_NAME, "Failed to fetch git tree", {
|
||||
cause: error
|
||||
});
|
||||
}
|
||||
}
|
34
node_modules/unstorage/drivers/github.d.ts
generated
vendored
Normal file
34
node_modules/unstorage/drivers/github.d.ts
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
export interface GithubOptions {
|
||||
/**
|
||||
* The name of the repository. (e.g. `username/my-repo`)
|
||||
* Required
|
||||
*/
|
||||
repo: string;
|
||||
/**
|
||||
* The branch to fetch. (e.g. `dev`)
|
||||
* @default "main"
|
||||
*/
|
||||
branch?: string;
|
||||
/**
|
||||
* @default ""
|
||||
*/
|
||||
dir?: string;
|
||||
/**
|
||||
* @default 600
|
||||
*/
|
||||
ttl?: number;
|
||||
/**
|
||||
* Github API token (recommended)
|
||||
*/
|
||||
token?: string;
|
||||
/**
|
||||
* @default "https://api.github.com"
|
||||
*/
|
||||
apiURL?: string;
|
||||
/**
|
||||
* @default "https://raw.githubusercontent.com"
|
||||
*/
|
||||
cdnURL?: string;
|
||||
}
|
||||
declare const _default: (opts: GithubOptions) => import("..").Driver<GithubOptions, never>;
|
||||
export default _default;
|
108
node_modules/unstorage/drivers/github.mjs
generated
vendored
Normal file
108
node_modules/unstorage/drivers/github.mjs
generated
vendored
Normal file
@@ -0,0 +1,108 @@
|
||||
import { createError, createRequiredError, defineDriver } from "./utils/index.mjs";
|
||||
import { $fetch } from "ofetch";
|
||||
import { withTrailingSlash, joinURL } from "ufo";
|
||||
const defaultOptions = {
|
||||
repo: "",
|
||||
branch: "main",
|
||||
ttl: 600,
|
||||
dir: "",
|
||||
apiURL: "https://api.github.com",
|
||||
cdnURL: "https://raw.githubusercontent.com"
|
||||
};
|
||||
const DRIVER_NAME = "github";
|
||||
export default defineDriver((_opts) => {
|
||||
const opts = { ...defaultOptions, ..._opts };
|
||||
const rawUrl = joinURL(opts.cdnURL, opts.repo, opts.branch, opts.dir);
|
||||
let files = {};
|
||||
let lastCheck = 0;
|
||||
let syncPromise;
|
||||
const syncFiles = async () => {
|
||||
if (!opts.repo) {
|
||||
throw createRequiredError(DRIVER_NAME, "repo");
|
||||
}
|
||||
if (lastCheck + opts.ttl * 1e3 > Date.now()) {
|
||||
return;
|
||||
}
|
||||
if (!syncPromise) {
|
||||
syncPromise = fetchFiles(opts);
|
||||
}
|
||||
files = await syncPromise;
|
||||
lastCheck = Date.now();
|
||||
syncPromise = void 0;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
async getKeys() {
|
||||
await syncFiles();
|
||||
return Object.keys(files);
|
||||
},
|
||||
async hasItem(key) {
|
||||
await syncFiles();
|
||||
return key in files;
|
||||
},
|
||||
async getItem(key) {
|
||||
await syncFiles();
|
||||
const item = files[key];
|
||||
if (!item) {
|
||||
return null;
|
||||
}
|
||||
if (!item.body) {
|
||||
try {
|
||||
item.body = await $fetch(key.replace(/:/g, "/"), {
|
||||
baseURL: rawUrl,
|
||||
headers: opts.token ? {
|
||||
Authorization: `token ${opts.token}`
|
||||
} : void 0
|
||||
});
|
||||
} catch (error) {
|
||||
throw createError(
|
||||
"github",
|
||||
`Failed to fetch \`${JSON.stringify(key)}\``,
|
||||
{ cause: error }
|
||||
);
|
||||
}
|
||||
}
|
||||
return item.body;
|
||||
},
|
||||
async getMeta(key) {
|
||||
await syncFiles();
|
||||
const item = files[key];
|
||||
return item ? item.meta : null;
|
||||
}
|
||||
};
|
||||
});
|
||||
async function fetchFiles(opts) {
|
||||
const prefix = withTrailingSlash(opts.dir).replace(/^\//, "");
|
||||
const files = {};
|
||||
try {
|
||||
const trees = await $fetch(
|
||||
`/repos/${opts.repo}/git/trees/${opts.branch}?recursive=1`,
|
||||
{
|
||||
baseURL: opts.apiURL,
|
||||
headers: {
|
||||
"User-Agent": "unstorage",
|
||||
...opts.token && { Authorization: `token ${opts.token}` }
|
||||
}
|
||||
}
|
||||
);
|
||||
for (const node of trees.tree) {
|
||||
if (node.type !== "blob" || !node.path.startsWith(prefix)) {
|
||||
continue;
|
||||
}
|
||||
const key = node.path.slice(prefix.length).replace(/\//g, ":");
|
||||
files[key] = {
|
||||
meta: {
|
||||
sha: node.sha,
|
||||
mode: node.mode,
|
||||
size: node.size
|
||||
}
|
||||
};
|
||||
}
|
||||
return files;
|
||||
} catch (error) {
|
||||
throw createError(DRIVER_NAME, "Failed to fetch git tree", {
|
||||
cause: error
|
||||
});
|
||||
}
|
||||
}
|
111
node_modules/unstorage/drivers/http.cjs
generated
vendored
Normal file
111
node_modules/unstorage/drivers/http.cjs
generated
vendored
Normal file
@@ -0,0 +1,111 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _ofetch = require("ofetch");
|
||||
var _ufo = require("ufo");
|
||||
const DRIVER_NAME = "http";
|
||||
module.exports = (0, _utils.defineDriver)(opts => {
|
||||
const r = (key = "") => (0, _ufo.joinURL)(opts.base, key.replace(/:/g, "/"));
|
||||
const rBase = (key = "") => (0, _ufo.joinURL)(opts.base, (key || "/").replace(/:/g, "/"), ":");
|
||||
const catchFetchError = (error, fallbackVal = null) => {
|
||||
if (error?.response?.status === 404) {
|
||||
return fallbackVal;
|
||||
}
|
||||
throw error;
|
||||
};
|
||||
const getHeaders = (topts, defaultHeaders) => {
|
||||
const headers = {
|
||||
...defaultHeaders,
|
||||
...opts.headers,
|
||||
...topts?.headers
|
||||
};
|
||||
if (topts?.ttl && !headers["x-ttl"]) {
|
||||
headers["x-ttl"] = topts.ttl + "";
|
||||
}
|
||||
return headers;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
hasItem(key, topts) {
|
||||
return (0, _ofetch.$fetch)(r(key), {
|
||||
method: "HEAD",
|
||||
headers: getHeaders(topts)
|
||||
}).then(() => true).catch(err => catchFetchError(err, false));
|
||||
},
|
||||
async getItem(key, tops) {
|
||||
const value = await (0, _ofetch.$fetch)(r(key), {
|
||||
headers: getHeaders(tops)
|
||||
}).catch(catchFetchError);
|
||||
return value;
|
||||
},
|
||||
async getItemRaw(key, topts) {
|
||||
const response = await _ofetch.$fetch.raw(r(key), {
|
||||
responseType: "arrayBuffer",
|
||||
headers: getHeaders(topts, {
|
||||
accept: "application/octet-stream"
|
||||
})
|
||||
}).catch(catchFetchError);
|
||||
return response._data;
|
||||
},
|
||||
async getMeta(key, topts) {
|
||||
const res = await _ofetch.$fetch.raw(r(key), {
|
||||
method: "HEAD",
|
||||
headers: getHeaders(topts)
|
||||
});
|
||||
let mtime = void 0;
|
||||
let ttl = void 0;
|
||||
const _lastModified = res.headers.get("last-modified");
|
||||
if (_lastModified) {
|
||||
mtime = new Date(_lastModified);
|
||||
}
|
||||
const _ttl = res.headers.get("x-ttl");
|
||||
if (_ttl) {
|
||||
ttl = Number.parseInt(_ttl, 10);
|
||||
}
|
||||
return {
|
||||
status: res.status,
|
||||
mtime,
|
||||
ttl
|
||||
};
|
||||
},
|
||||
async setItem(key, value, topts) {
|
||||
await (0, _ofetch.$fetch)(r(key), {
|
||||
method: "PUT",
|
||||
body: value,
|
||||
headers: getHeaders(topts)
|
||||
});
|
||||
},
|
||||
async setItemRaw(key, value, topts) {
|
||||
await (0, _ofetch.$fetch)(r(key), {
|
||||
method: "PUT",
|
||||
body: value,
|
||||
headers: getHeaders(topts, {
|
||||
"content-type": "application/octet-stream"
|
||||
})
|
||||
});
|
||||
},
|
||||
async removeItem(key, topts) {
|
||||
await (0, _ofetch.$fetch)(r(key), {
|
||||
method: "DELETE",
|
||||
headers: getHeaders(topts)
|
||||
});
|
||||
},
|
||||
async getKeys(base, topts) {
|
||||
const value = await (0, _ofetch.$fetch)(rBase(base), {
|
||||
headers: getHeaders(topts)
|
||||
});
|
||||
return Array.isArray(value) ? value : [];
|
||||
},
|
||||
async clear(base, topts) {
|
||||
await (0, _ofetch.$fetch)(rBase(base), {
|
||||
method: "DELETE",
|
||||
headers: getHeaders(topts)
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
6
node_modules/unstorage/drivers/http.d.ts
generated
vendored
Normal file
6
node_modules/unstorage/drivers/http.d.ts
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
export interface HTTPOptions {
|
||||
base: string;
|
||||
headers?: Record<string, string>;
|
||||
}
|
||||
declare const _default: (opts: HTTPOptions) => import("..").Driver<HTTPOptions, never>;
|
||||
export default _default;
|
103
node_modules/unstorage/drivers/http.mjs
generated
vendored
Normal file
103
node_modules/unstorage/drivers/http.mjs
generated
vendored
Normal file
@@ -0,0 +1,103 @@
|
||||
import { defineDriver } from "./utils/index.mjs";
|
||||
import { $fetch as _fetch } from "ofetch";
|
||||
import { joinURL } from "ufo";
|
||||
const DRIVER_NAME = "http";
|
||||
export default defineDriver((opts) => {
|
||||
const r = (key = "") => joinURL(opts.base, key.replace(/:/g, "/"));
|
||||
const rBase = (key = "") => joinURL(opts.base, (key || "/").replace(/:/g, "/"), ":");
|
||||
const catchFetchError = (error, fallbackVal = null) => {
|
||||
if (error?.response?.status === 404) {
|
||||
return fallbackVal;
|
||||
}
|
||||
throw error;
|
||||
};
|
||||
const getHeaders = (topts, defaultHeaders) => {
|
||||
const headers = {
|
||||
...defaultHeaders,
|
||||
...opts.headers,
|
||||
...topts?.headers
|
||||
};
|
||||
if (topts?.ttl && !headers["x-ttl"]) {
|
||||
headers["x-ttl"] = topts.ttl + "";
|
||||
}
|
||||
return headers;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
hasItem(key, topts) {
|
||||
return _fetch(r(key), {
|
||||
method: "HEAD",
|
||||
headers: getHeaders(topts)
|
||||
}).then(() => true).catch((err) => catchFetchError(err, false));
|
||||
},
|
||||
async getItem(key, tops) {
|
||||
const value = await _fetch(r(key), {
|
||||
headers: getHeaders(tops)
|
||||
}).catch(catchFetchError);
|
||||
return value;
|
||||
},
|
||||
async getItemRaw(key, topts) {
|
||||
const response = await _fetch.raw(r(key), {
|
||||
responseType: "arrayBuffer",
|
||||
headers: getHeaders(topts, { accept: "application/octet-stream" })
|
||||
}).catch(catchFetchError);
|
||||
return response._data;
|
||||
},
|
||||
async getMeta(key, topts) {
|
||||
const res = await _fetch.raw(r(key), {
|
||||
method: "HEAD",
|
||||
headers: getHeaders(topts)
|
||||
});
|
||||
let mtime = void 0;
|
||||
let ttl = void 0;
|
||||
const _lastModified = res.headers.get("last-modified");
|
||||
if (_lastModified) {
|
||||
mtime = new Date(_lastModified);
|
||||
}
|
||||
const _ttl = res.headers.get("x-ttl");
|
||||
if (_ttl) {
|
||||
ttl = Number.parseInt(_ttl, 10);
|
||||
}
|
||||
return {
|
||||
status: res.status,
|
||||
mtime,
|
||||
ttl
|
||||
};
|
||||
},
|
||||
async setItem(key, value, topts) {
|
||||
await _fetch(r(key), {
|
||||
method: "PUT",
|
||||
body: value,
|
||||
headers: getHeaders(topts)
|
||||
});
|
||||
},
|
||||
async setItemRaw(key, value, topts) {
|
||||
await _fetch(r(key), {
|
||||
method: "PUT",
|
||||
body: value,
|
||||
headers: getHeaders(topts, {
|
||||
"content-type": "application/octet-stream"
|
||||
})
|
||||
});
|
||||
},
|
||||
async removeItem(key, topts) {
|
||||
await _fetch(r(key), {
|
||||
method: "DELETE",
|
||||
headers: getHeaders(topts)
|
||||
});
|
||||
},
|
||||
async getKeys(base, topts) {
|
||||
const value = await _fetch(rBase(base), {
|
||||
headers: getHeaders(topts)
|
||||
});
|
||||
return Array.isArray(value) ? value : [];
|
||||
},
|
||||
async clear(base, topts) {
|
||||
await _fetch(rBase(base), {
|
||||
method: "DELETE",
|
||||
headers: getHeaders(topts)
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
48
node_modules/unstorage/drivers/indexedb.cjs
generated
vendored
Normal file
48
node_modules/unstorage/drivers/indexedb.cjs
generated
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _idbKeyval = require("idb-keyval");
|
||||
const DRIVER_NAME = "idb-keyval";
|
||||
module.exports = (0, _utils.defineDriver)((opts = {}) => {
|
||||
const base = opts.base && opts.base.length > 0 ? `${opts.base}:` : "";
|
||||
const makeKey = key => base + key;
|
||||
let customStore;
|
||||
if (opts.dbName && opts.storeName) {
|
||||
customStore = (0, _idbKeyval.createStore)(opts.dbName, opts.storeName);
|
||||
}
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
async hasItem(key) {
|
||||
const item = await (0, _idbKeyval.get)(makeKey(key), customStore);
|
||||
return item === void 0 ? false : true;
|
||||
},
|
||||
async getItem(key) {
|
||||
const item = await (0, _idbKeyval.get)(makeKey(key), customStore);
|
||||
return item ?? null;
|
||||
},
|
||||
async getItemRaw(key) {
|
||||
const item = await (0, _idbKeyval.get)(makeKey(key), customStore);
|
||||
return item ?? null;
|
||||
},
|
||||
setItem(key, value) {
|
||||
return (0, _idbKeyval.set)(makeKey(key), value, customStore);
|
||||
},
|
||||
setItemRaw(key, value) {
|
||||
return (0, _idbKeyval.set)(makeKey(key), value, customStore);
|
||||
},
|
||||
removeItem(key) {
|
||||
return (0, _idbKeyval.del)(makeKey(key), customStore);
|
||||
},
|
||||
getKeys() {
|
||||
return (0, _idbKeyval.keys)(customStore);
|
||||
},
|
||||
clear() {
|
||||
return (0, _idbKeyval.clear)(customStore);
|
||||
}
|
||||
};
|
||||
});
|
7
node_modules/unstorage/drivers/indexedb.d.ts
generated
vendored
Normal file
7
node_modules/unstorage/drivers/indexedb.d.ts
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
export interface IDBKeyvalOptions {
|
||||
base?: string;
|
||||
dbName?: string;
|
||||
storeName?: string;
|
||||
}
|
||||
declare const _default: (opts: IDBKeyvalOptions | undefined) => import("..").Driver<IDBKeyvalOptions | undefined, never>;
|
||||
export default _default;
|
49
node_modules/unstorage/drivers/indexedb.mjs
generated
vendored
Normal file
49
node_modules/unstorage/drivers/indexedb.mjs
generated
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
import { defineDriver } from "./utils/index.mjs";
|
||||
import {
|
||||
get,
|
||||
set,
|
||||
clear,
|
||||
del,
|
||||
keys,
|
||||
createStore
|
||||
} from "idb-keyval";
|
||||
const DRIVER_NAME = "idb-keyval";
|
||||
export default defineDriver((opts = {}) => {
|
||||
const base = opts.base && opts.base.length > 0 ? `${opts.base}:` : "";
|
||||
const makeKey = (key) => base + key;
|
||||
let customStore;
|
||||
if (opts.dbName && opts.storeName) {
|
||||
customStore = createStore(opts.dbName, opts.storeName);
|
||||
}
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
async hasItem(key) {
|
||||
const item = await get(makeKey(key), customStore);
|
||||
return item === void 0 ? false : true;
|
||||
},
|
||||
async getItem(key) {
|
||||
const item = await get(makeKey(key), customStore);
|
||||
return item ?? null;
|
||||
},
|
||||
async getItemRaw(key) {
|
||||
const item = await get(makeKey(key), customStore);
|
||||
return item ?? null;
|
||||
},
|
||||
setItem(key, value) {
|
||||
return set(makeKey(key), value, customStore);
|
||||
},
|
||||
setItemRaw(key, value) {
|
||||
return set(makeKey(key), value, customStore);
|
||||
},
|
||||
removeItem(key) {
|
||||
return del(makeKey(key), customStore);
|
||||
},
|
||||
getKeys() {
|
||||
return keys(customStore);
|
||||
},
|
||||
clear() {
|
||||
return clear(customStore);
|
||||
}
|
||||
};
|
||||
});
|
73
node_modules/unstorage/drivers/localstorage.cjs
generated
vendored
Normal file
73
node_modules/unstorage/drivers/localstorage.cjs
generated
vendored
Normal file
@@ -0,0 +1,73 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
const DRIVER_NAME = "localstorage";
|
||||
module.exports = (0, _utils.defineDriver)((opts = {}) => {
|
||||
const storage = opts.storage || opts.localStorage || opts.sessionStorage || (opts.window || globalThis.window)?.[opts.windowKey || "localStorage"];
|
||||
if (!storage) {
|
||||
throw (0, _utils.createRequiredError)(DRIVER_NAME, "localStorage");
|
||||
}
|
||||
const base = opts.base ? (0, _utils.normalizeKey)(opts.base) : "";
|
||||
const r = key => (base ? `${base}:` : "") + key;
|
||||
let _storageListener;
|
||||
const _unwatch = () => {
|
||||
if (_storageListener) {
|
||||
opts.window?.removeEventListener("storage", _storageListener);
|
||||
}
|
||||
_storageListener = void 0;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: () => storage,
|
||||
hasItem(key) {
|
||||
return Object.prototype.hasOwnProperty.call(storage, r(key));
|
||||
},
|
||||
getItem(key) {
|
||||
return storage.getItem(r(key));
|
||||
},
|
||||
setItem(key, value) {
|
||||
return storage.setItem(r(key), value);
|
||||
},
|
||||
removeItem(key) {
|
||||
return storage.removeItem(r(key));
|
||||
},
|
||||
getKeys() {
|
||||
const allKeys = Object.keys(storage);
|
||||
return base ? allKeys.filter(key => key.startsWith(`${base}:`)).map(key => key.slice(base.length + 1)) : allKeys;
|
||||
},
|
||||
clear(prefix) {
|
||||
const _base = [base, prefix].filter(Boolean).join(":");
|
||||
if (_base) {
|
||||
for (const key of Object.keys(storage)) {
|
||||
if (key.startsWith(`${_base}:`)) {
|
||||
storage?.removeItem(key);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
storage.clear();
|
||||
}
|
||||
},
|
||||
dispose() {
|
||||
if (opts.window && _storageListener) {
|
||||
opts.window.removeEventListener("storage", _storageListener);
|
||||
}
|
||||
},
|
||||
watch(callback) {
|
||||
if (!opts.window) {
|
||||
return _unwatch;
|
||||
}
|
||||
_storageListener = ev => {
|
||||
if (ev.key) {
|
||||
callback(ev.newValue ? "update" : "remove", ev.key);
|
||||
}
|
||||
};
|
||||
opts.window.addEventListener("storage", _storageListener);
|
||||
return _unwatch;
|
||||
}
|
||||
};
|
||||
});
|
12
node_modules/unstorage/drivers/localstorage.d.ts
generated
vendored
Normal file
12
node_modules/unstorage/drivers/localstorage.d.ts
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
export interface LocalStorageOptions {
|
||||
base?: string;
|
||||
window?: typeof window;
|
||||
windowKey?: "localStorage" | "sessionStorage";
|
||||
storage?: typeof window.localStorage | typeof window.sessionStorage;
|
||||
/** @deprecated use `storage` option */
|
||||
sessionStorage?: typeof window.sessionStorage;
|
||||
/** @deprecated use `storage` option */
|
||||
localStorage?: typeof window.localStorage;
|
||||
}
|
||||
declare const _default: (opts: LocalStorageOptions | undefined) => import("..").Driver<LocalStorageOptions | undefined, Storage>;
|
||||
export default _default;
|
67
node_modules/unstorage/drivers/localstorage.mjs
generated
vendored
Normal file
67
node_modules/unstorage/drivers/localstorage.mjs
generated
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
import { createRequiredError, defineDriver, normalizeKey } from "./utils/index.mjs";
|
||||
const DRIVER_NAME = "localstorage";
|
||||
export default defineDriver((opts = {}) => {
|
||||
const storage = opts.storage || opts.localStorage || opts.sessionStorage || (opts.window || globalThis.window)?.[opts.windowKey || "localStorage"];
|
||||
if (!storage) {
|
||||
throw createRequiredError(DRIVER_NAME, "localStorage");
|
||||
}
|
||||
const base = opts.base ? normalizeKey(opts.base) : "";
|
||||
const r = (key) => (base ? `${base}:` : "") + key;
|
||||
let _storageListener;
|
||||
const _unwatch = () => {
|
||||
if (_storageListener) {
|
||||
opts.window?.removeEventListener("storage", _storageListener);
|
||||
}
|
||||
_storageListener = void 0;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: () => storage,
|
||||
hasItem(key) {
|
||||
return Object.prototype.hasOwnProperty.call(storage, r(key));
|
||||
},
|
||||
getItem(key) {
|
||||
return storage.getItem(r(key));
|
||||
},
|
||||
setItem(key, value) {
|
||||
return storage.setItem(r(key), value);
|
||||
},
|
||||
removeItem(key) {
|
||||
return storage.removeItem(r(key));
|
||||
},
|
||||
getKeys() {
|
||||
const allKeys = Object.keys(storage);
|
||||
return base ? allKeys.filter((key) => key.startsWith(`${base}:`)).map((key) => key.slice(base.length + 1)) : allKeys;
|
||||
},
|
||||
clear(prefix) {
|
||||
const _base = [base, prefix].filter(Boolean).join(":");
|
||||
if (_base) {
|
||||
for (const key of Object.keys(storage)) {
|
||||
if (key.startsWith(`${_base}:`)) {
|
||||
storage?.removeItem(key);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
storage.clear();
|
||||
}
|
||||
},
|
||||
dispose() {
|
||||
if (opts.window && _storageListener) {
|
||||
opts.window.removeEventListener("storage", _storageListener);
|
||||
}
|
||||
},
|
||||
watch(callback) {
|
||||
if (!opts.window) {
|
||||
return _unwatch;
|
||||
}
|
||||
_storageListener = (ev) => {
|
||||
if (ev.key) {
|
||||
callback(ev.newValue ? "update" : "remove", ev.key);
|
||||
}
|
||||
};
|
||||
opts.window.addEventListener("storage", _storageListener);
|
||||
return _unwatch;
|
||||
}
|
||||
};
|
||||
});
|
61
node_modules/unstorage/drivers/lru-cache.cjs
generated
vendored
Normal file
61
node_modules/unstorage/drivers/lru-cache.cjs
generated
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _lruCache = require("lru-cache");
|
||||
const DRIVER_NAME = "lru-cache";
|
||||
module.exports = (0, _utils.defineDriver)((opts = {}) => {
|
||||
const cache = new _lruCache.LRUCache({
|
||||
max: 1e3,
|
||||
sizeCalculation: opts.maxSize || opts.maxEntrySize ? (value, key) => {
|
||||
return key.length + byteLength(value);
|
||||
} : void 0,
|
||||
...opts
|
||||
});
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: () => cache,
|
||||
hasItem(key) {
|
||||
return cache.has(key);
|
||||
},
|
||||
getItem(key) {
|
||||
return cache.get(key) ?? null;
|
||||
},
|
||||
getItemRaw(key) {
|
||||
return cache.get(key) ?? null;
|
||||
},
|
||||
setItem(key, value) {
|
||||
cache.set(key, value);
|
||||
},
|
||||
setItemRaw(key, value) {
|
||||
cache.set(key, value);
|
||||
},
|
||||
removeItem(key) {
|
||||
cache.delete(key);
|
||||
},
|
||||
getKeys() {
|
||||
return [...cache.keys()];
|
||||
},
|
||||
clear() {
|
||||
cache.clear();
|
||||
},
|
||||
dispose() {
|
||||
cache.clear();
|
||||
}
|
||||
};
|
||||
});
|
||||
function byteLength(value) {
|
||||
if (typeof Buffer !== "undefined") {
|
||||
try {
|
||||
return Buffer.byteLength(value);
|
||||
} catch {}
|
||||
}
|
||||
try {
|
||||
return typeof value === "string" ? value.length : JSON.stringify(value).length;
|
||||
} catch {}
|
||||
return 0;
|
||||
}
|
6
node_modules/unstorage/drivers/lru-cache.d.ts
generated
vendored
Normal file
6
node_modules/unstorage/drivers/lru-cache.d.ts
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
import { LRUCache } from "lru-cache";
|
||||
type LRUCacheOptions = LRUCache.OptionsBase<string, any, any> & Partial<LRUCache.OptionsMaxLimit<string, any, any>> & Partial<LRUCache.OptionsSizeLimit<string, any, any>> & Partial<LRUCache.OptionsTTLLimit<string, any, any>>;
|
||||
export interface LRUDriverOptions extends LRUCacheOptions {
|
||||
}
|
||||
declare const _default: (opts: LRUDriverOptions | undefined) => import("..").Driver<LRUDriverOptions | undefined, LRUCache<string, any, any>>;
|
||||
export default _default;
|
57
node_modules/unstorage/drivers/lru-cache.mjs
generated
vendored
Normal file
57
node_modules/unstorage/drivers/lru-cache.mjs
generated
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
import { defineDriver } from "./utils/index.mjs";
|
||||
import { LRUCache } from "lru-cache";
|
||||
const DRIVER_NAME = "lru-cache";
|
||||
export default defineDriver((opts = {}) => {
|
||||
const cache = new LRUCache({
|
||||
max: 1e3,
|
||||
sizeCalculation: opts.maxSize || opts.maxEntrySize ? (value, key) => {
|
||||
return key.length + byteLength(value);
|
||||
} : void 0,
|
||||
...opts
|
||||
});
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: () => cache,
|
||||
hasItem(key) {
|
||||
return cache.has(key);
|
||||
},
|
||||
getItem(key) {
|
||||
return cache.get(key) ?? null;
|
||||
},
|
||||
getItemRaw(key) {
|
||||
return cache.get(key) ?? null;
|
||||
},
|
||||
setItem(key, value) {
|
||||
cache.set(key, value);
|
||||
},
|
||||
setItemRaw(key, value) {
|
||||
cache.set(key, value);
|
||||
},
|
||||
removeItem(key) {
|
||||
cache.delete(key);
|
||||
},
|
||||
getKeys() {
|
||||
return [...cache.keys()];
|
||||
},
|
||||
clear() {
|
||||
cache.clear();
|
||||
},
|
||||
dispose() {
|
||||
cache.clear();
|
||||
}
|
||||
};
|
||||
});
|
||||
function byteLength(value) {
|
||||
if (typeof Buffer !== "undefined") {
|
||||
try {
|
||||
return Buffer.byteLength(value);
|
||||
} catch {
|
||||
}
|
||||
}
|
||||
try {
|
||||
return typeof value === "string" ? value.length : JSON.stringify(value).length;
|
||||
} catch {
|
||||
}
|
||||
return 0;
|
||||
}
|
42
node_modules/unstorage/drivers/memory.cjs
generated
vendored
Normal file
42
node_modules/unstorage/drivers/memory.cjs
generated
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
const DRIVER_NAME = "memory";
|
||||
module.exports = (0, _utils.defineDriver)(() => {
|
||||
const data = /* @__PURE__ */new Map();
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
getInstance: () => data,
|
||||
hasItem(key) {
|
||||
return data.has(key);
|
||||
},
|
||||
getItem(key) {
|
||||
return data.get(key) ?? null;
|
||||
},
|
||||
getItemRaw(key) {
|
||||
return data.get(key) ?? null;
|
||||
},
|
||||
setItem(key, value) {
|
||||
data.set(key, value);
|
||||
},
|
||||
setItemRaw(key, value) {
|
||||
data.set(key, value);
|
||||
},
|
||||
removeItem(key) {
|
||||
data.delete(key);
|
||||
},
|
||||
getKeys() {
|
||||
return [...data.keys()];
|
||||
},
|
||||
clear() {
|
||||
data.clear();
|
||||
},
|
||||
dispose() {
|
||||
data.clear();
|
||||
}
|
||||
};
|
||||
});
|
2
node_modules/unstorage/drivers/memory.d.ts
generated
vendored
Normal file
2
node_modules/unstorage/drivers/memory.d.ts
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
declare const _default: (opts: void) => import("..").Driver<void, Map<string, any>>;
|
||||
export default _default;
|
36
node_modules/unstorage/drivers/memory.mjs
generated
vendored
Normal file
36
node_modules/unstorage/drivers/memory.mjs
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
import { defineDriver } from "./utils/index.mjs";
|
||||
const DRIVER_NAME = "memory";
|
||||
export default defineDriver(() => {
|
||||
const data = /* @__PURE__ */ new Map();
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
getInstance: () => data,
|
||||
hasItem(key) {
|
||||
return data.has(key);
|
||||
},
|
||||
getItem(key) {
|
||||
return data.get(key) ?? null;
|
||||
},
|
||||
getItemRaw(key) {
|
||||
return data.get(key) ?? null;
|
||||
},
|
||||
setItem(key, value) {
|
||||
data.set(key, value);
|
||||
},
|
||||
setItemRaw(key, value) {
|
||||
data.set(key, value);
|
||||
},
|
||||
removeItem(key) {
|
||||
data.delete(key);
|
||||
},
|
||||
getKeys() {
|
||||
return [...data.keys()];
|
||||
},
|
||||
clear() {
|
||||
data.clear();
|
||||
},
|
||||
dispose() {
|
||||
data.clear();
|
||||
}
|
||||
};
|
||||
});
|
119
node_modules/unstorage/drivers/mongodb.cjs
generated
vendored
Normal file
119
node_modules/unstorage/drivers/mongodb.cjs
generated
vendored
Normal file
@@ -0,0 +1,119 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _mongodb = require("mongodb");
|
||||
const DRIVER_NAME = "mongodb";
|
||||
module.exports = (0, _utils.defineDriver)(opts => {
|
||||
let collection;
|
||||
const getMongoCollection = () => {
|
||||
if (!collection) {
|
||||
if (!opts.connectionString) {
|
||||
throw (0, _utils.createRequiredError)(DRIVER_NAME, "connectionString");
|
||||
}
|
||||
const mongoClient = new _mongodb.MongoClient(opts.connectionString, opts.clientOptions);
|
||||
const db = mongoClient.db(opts.databaseName || "unstorage");
|
||||
collection = db.collection(opts.collectionName || "unstorage");
|
||||
}
|
||||
return collection;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: getMongoCollection,
|
||||
async hasItem(key) {
|
||||
const result = await getMongoCollection().findOne({
|
||||
key
|
||||
});
|
||||
return !!result;
|
||||
},
|
||||
async getItem(key) {
|
||||
const document = await getMongoCollection().findOne({
|
||||
key
|
||||
});
|
||||
return document?.value ?? null;
|
||||
},
|
||||
async getItems(items) {
|
||||
const keys = items.map(item => item.key);
|
||||
const result = await getMongoCollection().find({
|
||||
key: {
|
||||
$in: keys
|
||||
}
|
||||
}).toArray();
|
||||
const resultMap = new Map(result.map(doc => [doc.key, doc]));
|
||||
return keys.map(key => {
|
||||
return {
|
||||
key,
|
||||
value: resultMap.get(key)?.value ?? null
|
||||
};
|
||||
});
|
||||
},
|
||||
async setItem(key, value) {
|
||||
const currentDateTime = /* @__PURE__ */new Date();
|
||||
await getMongoCollection().updateOne({
|
||||
key
|
||||
}, {
|
||||
$set: {
|
||||
key,
|
||||
value,
|
||||
modifiedAt: currentDateTime
|
||||
},
|
||||
$setOnInsert: {
|
||||
createdAt: currentDateTime
|
||||
}
|
||||
}, {
|
||||
upsert: true
|
||||
});
|
||||
},
|
||||
async setItems(items) {
|
||||
const currentDateTime = /* @__PURE__ */new Date();
|
||||
const operations = items.map(({
|
||||
key,
|
||||
value
|
||||
}) => ({
|
||||
updateOne: {
|
||||
filter: {
|
||||
key
|
||||
},
|
||||
update: {
|
||||
$set: {
|
||||
key,
|
||||
value,
|
||||
modifiedAt: currentDateTime
|
||||
},
|
||||
$setOnInsert: {
|
||||
createdAt: currentDateTime
|
||||
}
|
||||
},
|
||||
upsert: true
|
||||
}
|
||||
}));
|
||||
await getMongoCollection().bulkWrite(operations);
|
||||
},
|
||||
async removeItem(key) {
|
||||
await getMongoCollection().deleteOne({
|
||||
key
|
||||
});
|
||||
},
|
||||
async getKeys() {
|
||||
return await getMongoCollection().find().project({
|
||||
key: true
|
||||
}).map(d => d.key).toArray();
|
||||
},
|
||||
async getMeta(key) {
|
||||
const document = await getMongoCollection().findOne({
|
||||
key
|
||||
});
|
||||
return document ? {
|
||||
mtime: document.modifiedAt,
|
||||
birthtime: document.createdAt
|
||||
} : {};
|
||||
},
|
||||
async clear() {
|
||||
await getMongoCollection().deleteMany({});
|
||||
}
|
||||
};
|
||||
});
|
23
node_modules/unstorage/drivers/mongodb.d.ts
generated
vendored
Normal file
23
node_modules/unstorage/drivers/mongodb.d.ts
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
import { type Collection, type MongoClientOptions } from "mongodb";
|
||||
export interface MongoDbOptions {
|
||||
/**
|
||||
* The MongoDB connection string.
|
||||
*/
|
||||
connectionString: string;
|
||||
/**
|
||||
* Optional configuration settings for the MongoClient instance.
|
||||
*/
|
||||
clientOptions?: MongoClientOptions;
|
||||
/**
|
||||
* The name of the database to use.
|
||||
* @default "unstorage"
|
||||
*/
|
||||
databaseName?: string;
|
||||
/**
|
||||
* The name of the collection to use.
|
||||
* @default "unstorage"
|
||||
*/
|
||||
collectionName?: string;
|
||||
}
|
||||
declare const _default: (opts: MongoDbOptions) => import("..").Driver<MongoDbOptions, Collection<import("mongodb").Document>>;
|
||||
export default _default;
|
82
node_modules/unstorage/drivers/mongodb.mjs
generated
vendored
Normal file
82
node_modules/unstorage/drivers/mongodb.mjs
generated
vendored
Normal file
@@ -0,0 +1,82 @@
|
||||
import { createRequiredError, defineDriver } from "./utils/index.mjs";
|
||||
import { MongoClient } from "mongodb";
|
||||
const DRIVER_NAME = "mongodb";
|
||||
export default defineDriver((opts) => {
|
||||
let collection;
|
||||
const getMongoCollection = () => {
|
||||
if (!collection) {
|
||||
if (!opts.connectionString) {
|
||||
throw createRequiredError(DRIVER_NAME, "connectionString");
|
||||
}
|
||||
const mongoClient = new MongoClient(
|
||||
opts.connectionString,
|
||||
opts.clientOptions
|
||||
);
|
||||
const db = mongoClient.db(opts.databaseName || "unstorage");
|
||||
collection = db.collection(opts.collectionName || "unstorage");
|
||||
}
|
||||
return collection;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: getMongoCollection,
|
||||
async hasItem(key) {
|
||||
const result = await getMongoCollection().findOne({ key });
|
||||
return !!result;
|
||||
},
|
||||
async getItem(key) {
|
||||
const document = await getMongoCollection().findOne({ key });
|
||||
return document?.value ?? null;
|
||||
},
|
||||
async getItems(items) {
|
||||
const keys = items.map((item) => item.key);
|
||||
const result = await getMongoCollection().find({ key: { $in: keys } }).toArray();
|
||||
const resultMap = new Map(result.map((doc) => [doc.key, doc]));
|
||||
return keys.map((key) => {
|
||||
return { key, value: resultMap.get(key)?.value ?? null };
|
||||
});
|
||||
},
|
||||
async setItem(key, value) {
|
||||
const currentDateTime = /* @__PURE__ */ new Date();
|
||||
await getMongoCollection().updateOne(
|
||||
{ key },
|
||||
{
|
||||
$set: { key, value, modifiedAt: currentDateTime },
|
||||
$setOnInsert: { createdAt: currentDateTime }
|
||||
},
|
||||
{ upsert: true }
|
||||
);
|
||||
},
|
||||
async setItems(items) {
|
||||
const currentDateTime = /* @__PURE__ */ new Date();
|
||||
const operations = items.map(({ key, value }) => ({
|
||||
updateOne: {
|
||||
filter: { key },
|
||||
update: {
|
||||
$set: { key, value, modifiedAt: currentDateTime },
|
||||
$setOnInsert: { createdAt: currentDateTime }
|
||||
},
|
||||
upsert: true
|
||||
}
|
||||
}));
|
||||
await getMongoCollection().bulkWrite(operations);
|
||||
},
|
||||
async removeItem(key) {
|
||||
await getMongoCollection().deleteOne({ key });
|
||||
},
|
||||
async getKeys() {
|
||||
return await getMongoCollection().find().project({ key: true }).map((d) => d.key).toArray();
|
||||
},
|
||||
async getMeta(key) {
|
||||
const document = await getMongoCollection().findOne({ key });
|
||||
return document ? {
|
||||
mtime: document.modifiedAt,
|
||||
birthtime: document.createdAt
|
||||
} : {};
|
||||
},
|
||||
async clear() {
|
||||
await getMongoCollection().deleteMany({});
|
||||
}
|
||||
};
|
||||
});
|
81
node_modules/unstorage/drivers/netlify-blobs.cjs
generated
vendored
Normal file
81
node_modules/unstorage/drivers/netlify-blobs.cjs
generated
vendored
Normal file
@@ -0,0 +1,81 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _blobs = require("@netlify/blobs");
|
||||
var _ofetch = require("ofetch");
|
||||
const DRIVER_NAME = "netlify-blobs";
|
||||
module.exports = (0, _utils.defineDriver)(options => {
|
||||
const {
|
||||
deployScoped,
|
||||
name,
|
||||
...opts
|
||||
} = options;
|
||||
let store;
|
||||
const getClient = () => {
|
||||
if (!store) {
|
||||
if (deployScoped) {
|
||||
if (name) {
|
||||
throw (0, _utils.createError)(DRIVER_NAME, "deploy-scoped stores cannot have a name");
|
||||
}
|
||||
store = (0, _blobs.getDeployStore)({
|
||||
fetch: _ofetch.fetch,
|
||||
...options
|
||||
});
|
||||
} else {
|
||||
if (!name) {
|
||||
throw (0, _utils.createRequiredError)(DRIVER_NAME, "name");
|
||||
}
|
||||
store = (0, _blobs.getStore)({
|
||||
name: encodeURIComponent(name),
|
||||
fetch: _ofetch.fetch,
|
||||
...opts
|
||||
});
|
||||
}
|
||||
}
|
||||
return store;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options,
|
||||
getInstance: getClient,
|
||||
async hasItem(key) {
|
||||
return getClient().getMetadata(key).then(Boolean);
|
||||
},
|
||||
getItem: (key, tops) => {
|
||||
return getClient().get(key, tops);
|
||||
},
|
||||
getMeta(key) {
|
||||
return getClient().getMetadata(key);
|
||||
},
|
||||
getItemRaw(key, topts) {
|
||||
return getClient().get(key, {
|
||||
type: topts?.type ?? "arrayBuffer"
|
||||
});
|
||||
},
|
||||
async setItem(key, value, topts) {
|
||||
await getClient().set(key, value, topts);
|
||||
},
|
||||
async setItemRaw(key, value, topts) {
|
||||
await getClient().set(key, value, topts);
|
||||
},
|
||||
removeItem(key) {
|
||||
return getClient().delete(key);
|
||||
},
|
||||
async getKeys(base, tops) {
|
||||
return (await getClient().list({
|
||||
...tops,
|
||||
prefix: base
|
||||
})).blobs.map(item => item.key);
|
||||
},
|
||||
async clear(base) {
|
||||
const client = getClient();
|
||||
return Promise.allSettled((await client.list({
|
||||
prefix: base
|
||||
})).blobs.map(item => client.delete(item.key))).then(() => {});
|
||||
}
|
||||
};
|
||||
});
|
19
node_modules/unstorage/drivers/netlify-blobs.d.ts
generated
vendored
Normal file
19
node_modules/unstorage/drivers/netlify-blobs.d.ts
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
import type { Store, GetStoreOptions, GetDeployStoreOptions } from "@netlify/blobs";
|
||||
export type NetlifyStoreOptions = NetlifyDeployStoreLegacyOptions | NetlifyDeployStoreOptions | NetlifyNamedStoreOptions;
|
||||
export interface ExtraOptions {
|
||||
/** If set to `true`, the store is scoped to the deploy. This means that it is only available from that deploy, and will be deleted or rolled-back alongside it. */
|
||||
deployScoped?: boolean;
|
||||
}
|
||||
export interface NetlifyDeployStoreOptions extends GetDeployStoreOptions, ExtraOptions {
|
||||
name?: never;
|
||||
deployScoped: true;
|
||||
}
|
||||
export interface NetlifyDeployStoreLegacyOptions extends NetlifyDeployStoreOptions {
|
||||
region?: never;
|
||||
}
|
||||
export interface NetlifyNamedStoreOptions extends GetStoreOptions, ExtraOptions {
|
||||
name: string;
|
||||
deployScoped?: false;
|
||||
}
|
||||
declare const _default: (opts: NetlifyStoreOptions) => import("..").Driver<NetlifyStoreOptions, Store>;
|
||||
export default _default;
|
67
node_modules/unstorage/drivers/netlify-blobs.mjs
generated
vendored
Normal file
67
node_modules/unstorage/drivers/netlify-blobs.mjs
generated
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
import { createError, createRequiredError, defineDriver } from "./utils/index.mjs";
|
||||
import { getStore, getDeployStore } from "@netlify/blobs";
|
||||
import { fetch } from "ofetch";
|
||||
const DRIVER_NAME = "netlify-blobs";
|
||||
export default defineDriver((options) => {
|
||||
const { deployScoped, name, ...opts } = options;
|
||||
let store;
|
||||
const getClient = () => {
|
||||
if (!store) {
|
||||
if (deployScoped) {
|
||||
if (name) {
|
||||
throw createError(
|
||||
DRIVER_NAME,
|
||||
"deploy-scoped stores cannot have a name"
|
||||
);
|
||||
}
|
||||
store = getDeployStore({ fetch, ...options });
|
||||
} else {
|
||||
if (!name) {
|
||||
throw createRequiredError(DRIVER_NAME, "name");
|
||||
}
|
||||
store = getStore({ name: encodeURIComponent(name), fetch, ...opts });
|
||||
}
|
||||
}
|
||||
return store;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options,
|
||||
getInstance: getClient,
|
||||
async hasItem(key) {
|
||||
return getClient().getMetadata(key).then(Boolean);
|
||||
},
|
||||
getItem: (key, tops) => {
|
||||
return getClient().get(key, tops);
|
||||
},
|
||||
getMeta(key) {
|
||||
return getClient().getMetadata(key);
|
||||
},
|
||||
getItemRaw(key, topts) {
|
||||
return getClient().get(key, { type: topts?.type ?? "arrayBuffer" });
|
||||
},
|
||||
async setItem(key, value, topts) {
|
||||
await getClient().set(key, value, topts);
|
||||
},
|
||||
async setItemRaw(key, value, topts) {
|
||||
await getClient().set(key, value, topts);
|
||||
},
|
||||
removeItem(key) {
|
||||
return getClient().delete(key);
|
||||
},
|
||||
async getKeys(base, tops) {
|
||||
return (await getClient().list({ ...tops, prefix: base })).blobs.map(
|
||||
(item) => item.key
|
||||
);
|
||||
},
|
||||
async clear(base) {
|
||||
const client = getClient();
|
||||
return Promise.allSettled(
|
||||
(await client.list({ prefix: base })).blobs.map(
|
||||
(item) => client.delete(item.key)
|
||||
)
|
||||
).then(() => {
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
36
node_modules/unstorage/drivers/null.cjs
generated
vendored
Normal file
36
node_modules/unstorage/drivers/null.cjs
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
const DRIVER_NAME = "null";
|
||||
module.exports = (0, _utils.defineDriver)(() => {
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
hasItem() {
|
||||
return false;
|
||||
},
|
||||
getItem() {
|
||||
return null;
|
||||
},
|
||||
getItemRaw() {
|
||||
return null;
|
||||
},
|
||||
getItems() {
|
||||
return [];
|
||||
},
|
||||
getMeta() {
|
||||
return null;
|
||||
},
|
||||
getKeys() {
|
||||
return [];
|
||||
},
|
||||
setItem() {},
|
||||
setItemRaw() {},
|
||||
setItems() {},
|
||||
removeItem() {},
|
||||
clear() {}
|
||||
};
|
||||
});
|
2
node_modules/unstorage/drivers/null.d.ts
generated
vendored
Normal file
2
node_modules/unstorage/drivers/null.d.ts
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
declare const _default: (opts: void) => import("..").Driver<void, never>;
|
||||
export default _default;
|
35
node_modules/unstorage/drivers/null.mjs
generated
vendored
Normal file
35
node_modules/unstorage/drivers/null.mjs
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
import { defineDriver } from "./utils/index.mjs";
|
||||
const DRIVER_NAME = "null";
|
||||
export default defineDriver(() => {
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
hasItem() {
|
||||
return false;
|
||||
},
|
||||
getItem() {
|
||||
return null;
|
||||
},
|
||||
getItemRaw() {
|
||||
return null;
|
||||
},
|
||||
getItems() {
|
||||
return [];
|
||||
},
|
||||
getMeta() {
|
||||
return null;
|
||||
},
|
||||
getKeys() {
|
||||
return [];
|
||||
},
|
||||
setItem() {
|
||||
},
|
||||
setItemRaw() {
|
||||
},
|
||||
setItems() {
|
||||
},
|
||||
removeItem() {
|
||||
},
|
||||
clear() {
|
||||
}
|
||||
};
|
||||
});
|
67
node_modules/unstorage/drivers/overlay.cjs
generated
vendored
Normal file
67
node_modules/unstorage/drivers/overlay.cjs
generated
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
const OVERLAY_REMOVED = "__OVERLAY_REMOVED__";
|
||||
const DRIVER_NAME = "overlay";
|
||||
module.exports = (0, _utils.defineDriver)(options => {
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options,
|
||||
async hasItem(key, opts) {
|
||||
for (const layer of options.layers) {
|
||||
if (await layer.hasItem(key, opts)) {
|
||||
if (layer === options.layers[0] && (await options.layers[0]?.getItem(key)) === OVERLAY_REMOVED) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
},
|
||||
async getItem(key) {
|
||||
for (const layer of options.layers) {
|
||||
const value = await layer.getItem(key);
|
||||
if (value === OVERLAY_REMOVED) {
|
||||
return null;
|
||||
}
|
||||
if (value !== null) {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
},
|
||||
// TODO: Support native meta
|
||||
// async getMeta (key) {},
|
||||
async setItem(key, value, opts) {
|
||||
await options.layers[0]?.setItem?.(key, value, opts);
|
||||
},
|
||||
async removeItem(key, opts) {
|
||||
await options.layers[0]?.setItem?.(key, OVERLAY_REMOVED, opts);
|
||||
},
|
||||
async getKeys(base, opts) {
|
||||
const allKeys = await Promise.all(options.layers.map(async layer => {
|
||||
const keys = await layer.getKeys(base, opts);
|
||||
return keys.map(key => (0, _utils.normalizeKey)(key));
|
||||
}));
|
||||
const uniqueKeys = [...new Set(allKeys.flat())];
|
||||
const existingKeys = await Promise.all(uniqueKeys.map(async key => {
|
||||
if ((await options.layers[0]?.getItem(key)) === OVERLAY_REMOVED) {
|
||||
return false;
|
||||
}
|
||||
return key;
|
||||
}));
|
||||
return existingKeys.filter(Boolean);
|
||||
},
|
||||
async dispose() {
|
||||
await Promise.all(options.layers.map(async layer => {
|
||||
if (layer.dispose) {
|
||||
await layer.dispose();
|
||||
}
|
||||
}));
|
||||
}
|
||||
};
|
||||
});
|
6
node_modules/unstorage/drivers/overlay.d.ts
generated
vendored
Normal file
6
node_modules/unstorage/drivers/overlay.d.ts
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
import type { Driver } from "..";
|
||||
export interface OverlayStorageOptions {
|
||||
layers: Driver[];
|
||||
}
|
||||
declare const _default: (opts: OverlayStorageOptions) => Driver<OverlayStorageOptions, never>;
|
||||
export default _default;
|
68
node_modules/unstorage/drivers/overlay.mjs
generated
vendored
Normal file
68
node_modules/unstorage/drivers/overlay.mjs
generated
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
import { defineDriver } from "./utils/index.mjs";
|
||||
import { normalizeKey } from "./utils/index.mjs";
|
||||
const OVERLAY_REMOVED = "__OVERLAY_REMOVED__";
|
||||
const DRIVER_NAME = "overlay";
|
||||
export default defineDriver((options) => {
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options,
|
||||
async hasItem(key, opts) {
|
||||
for (const layer of options.layers) {
|
||||
if (await layer.hasItem(key, opts)) {
|
||||
if (layer === options.layers[0] && await options.layers[0]?.getItem(key) === OVERLAY_REMOVED) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
},
|
||||
async getItem(key) {
|
||||
for (const layer of options.layers) {
|
||||
const value = await layer.getItem(key);
|
||||
if (value === OVERLAY_REMOVED) {
|
||||
return null;
|
||||
}
|
||||
if (value !== null) {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
},
|
||||
// TODO: Support native meta
|
||||
// async getMeta (key) {},
|
||||
async setItem(key, value, opts) {
|
||||
await options.layers[0]?.setItem?.(key, value, opts);
|
||||
},
|
||||
async removeItem(key, opts) {
|
||||
await options.layers[0]?.setItem?.(key, OVERLAY_REMOVED, opts);
|
||||
},
|
||||
async getKeys(base, opts) {
|
||||
const allKeys = await Promise.all(
|
||||
options.layers.map(async (layer) => {
|
||||
const keys = await layer.getKeys(base, opts);
|
||||
return keys.map((key) => normalizeKey(key));
|
||||
})
|
||||
);
|
||||
const uniqueKeys = [...new Set(allKeys.flat())];
|
||||
const existingKeys = await Promise.all(
|
||||
uniqueKeys.map(async (key) => {
|
||||
if (await options.layers[0]?.getItem(key) === OVERLAY_REMOVED) {
|
||||
return false;
|
||||
}
|
||||
return key;
|
||||
})
|
||||
);
|
||||
return existingKeys.filter(Boolean);
|
||||
},
|
||||
async dispose() {
|
||||
await Promise.all(
|
||||
options.layers.map(async (layer) => {
|
||||
if (layer.dispose) {
|
||||
await layer.dispose();
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
};
|
||||
});
|
80
node_modules/unstorage/drivers/planetscale.cjs
generated
vendored
Normal file
80
node_modules/unstorage/drivers/planetscale.cjs
generated
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _database = require("@planetscale/database");
|
||||
var _nodeFetchNative = require("node-fetch-native");
|
||||
const DRIVER_NAME = "planetscale";
|
||||
module.exports = (0, _utils.defineDriver)((opts = {}) => {
|
||||
opts.table = opts.table || "storage";
|
||||
let _connection;
|
||||
const getConnection = () => {
|
||||
if (!_connection) {
|
||||
if (!opts.url) {
|
||||
throw (0, _utils.createRequiredError)(DRIVER_NAME, "url");
|
||||
}
|
||||
_connection = (0, _database.connect)({
|
||||
url: opts.url,
|
||||
fetch: _nodeFetchNative.fetch
|
||||
});
|
||||
if (opts.boostCache) {
|
||||
_connection.execute("SET @@boost_cached_queries = true;").catch(error => {
|
||||
console.error("[unstorage] [planetscale] Failed to enable cached queries:", error);
|
||||
});
|
||||
}
|
||||
}
|
||||
return _connection;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: getConnection,
|
||||
hasItem: async key => {
|
||||
const res = await getConnection().execute(`SELECT EXISTS (SELECT 1 FROM ${opts.table} WHERE id = :key) as value;`, {
|
||||
key
|
||||
});
|
||||
return rows(res)[0]?.value == "1";
|
||||
},
|
||||
getItem: async key => {
|
||||
const res = await getConnection().execute(`SELECT value from ${opts.table} WHERE id=:key;`, {
|
||||
key
|
||||
});
|
||||
return rows(res)[0]?.value ?? null;
|
||||
},
|
||||
setItem: async (key, value) => {
|
||||
await getConnection().execute(`INSERT INTO ${opts.table} (id, value) VALUES (:key, :value) ON DUPLICATE KEY UPDATE value = :value;`, {
|
||||
key,
|
||||
value
|
||||
});
|
||||
},
|
||||
removeItem: async key => {
|
||||
await getConnection().execute(`DELETE FROM ${opts.table} WHERE id=:key;`, {
|
||||
key
|
||||
});
|
||||
},
|
||||
getMeta: async key => {
|
||||
const res = await getConnection().execute(`SELECT created_at, updated_at from ${opts.table} WHERE id=:key;`, {
|
||||
key
|
||||
});
|
||||
return {
|
||||
birthtime: rows(res)[0]?.created_at,
|
||||
mtime: rows(res)[0]?.updated_at
|
||||
};
|
||||
},
|
||||
getKeys: async (base = "") => {
|
||||
const res = await getConnection().execute(`SELECT id from ${opts.table} WHERE id LIKE :base;`, {
|
||||
base: `${base}%`
|
||||
});
|
||||
return rows(res).map(r => r.id);
|
||||
},
|
||||
clear: async () => {
|
||||
await getConnection().execute(`DELETE FROM ${opts.table};`);
|
||||
}
|
||||
};
|
||||
});
|
||||
function rows(res) {
|
||||
return res.rows || [];
|
||||
}
|
8
node_modules/unstorage/drivers/planetscale.d.ts
generated
vendored
Normal file
8
node_modules/unstorage/drivers/planetscale.d.ts
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
import type { Connection } from "@planetscale/database";
|
||||
export interface PlanetscaleDriverOptions {
|
||||
url?: string;
|
||||
table?: string;
|
||||
boostCache?: boolean;
|
||||
}
|
||||
declare const _default: (opts: PlanetscaleDriverOptions | undefined) => import("..").Driver<PlanetscaleDriverOptions | undefined, Connection>;
|
||||
export default _default;
|
82
node_modules/unstorage/drivers/planetscale.mjs
generated
vendored
Normal file
82
node_modules/unstorage/drivers/planetscale.mjs
generated
vendored
Normal file
@@ -0,0 +1,82 @@
|
||||
import { createRequiredError, defineDriver } from "./utils/index.mjs";
|
||||
import { connect } from "@planetscale/database";
|
||||
import { fetch } from "node-fetch-native";
|
||||
const DRIVER_NAME = "planetscale";
|
||||
export default defineDriver((opts = {}) => {
|
||||
opts.table = opts.table || "storage";
|
||||
let _connection;
|
||||
const getConnection = () => {
|
||||
if (!_connection) {
|
||||
if (!opts.url) {
|
||||
throw createRequiredError(DRIVER_NAME, "url");
|
||||
}
|
||||
_connection = connect({
|
||||
url: opts.url,
|
||||
fetch
|
||||
});
|
||||
if (opts.boostCache) {
|
||||
_connection.execute("SET @@boost_cached_queries = true;").catch((error) => {
|
||||
console.error(
|
||||
"[unstorage] [planetscale] Failed to enable cached queries:",
|
||||
error
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
return _connection;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: getConnection,
|
||||
hasItem: async (key) => {
|
||||
const res = await getConnection().execute(
|
||||
`SELECT EXISTS (SELECT 1 FROM ${opts.table} WHERE id = :key) as value;`,
|
||||
{ key }
|
||||
);
|
||||
return rows(res)[0]?.value == "1";
|
||||
},
|
||||
getItem: async (key) => {
|
||||
const res = await getConnection().execute(
|
||||
`SELECT value from ${opts.table} WHERE id=:key;`,
|
||||
{ key }
|
||||
);
|
||||
return rows(res)[0]?.value ?? null;
|
||||
},
|
||||
setItem: async (key, value) => {
|
||||
await getConnection().execute(
|
||||
`INSERT INTO ${opts.table} (id, value) VALUES (:key, :value) ON DUPLICATE KEY UPDATE value = :value;`,
|
||||
{ key, value }
|
||||
);
|
||||
},
|
||||
removeItem: async (key) => {
|
||||
await getConnection().execute(
|
||||
`DELETE FROM ${opts.table} WHERE id=:key;`,
|
||||
{ key }
|
||||
);
|
||||
},
|
||||
getMeta: async (key) => {
|
||||
const res = await getConnection().execute(
|
||||
`SELECT created_at, updated_at from ${opts.table} WHERE id=:key;`,
|
||||
{ key }
|
||||
);
|
||||
return {
|
||||
birthtime: rows(res)[0]?.created_at,
|
||||
mtime: rows(res)[0]?.updated_at
|
||||
};
|
||||
},
|
||||
getKeys: async (base = "") => {
|
||||
const res = await getConnection().execute(
|
||||
`SELECT id from ${opts.table} WHERE id LIKE :base;`,
|
||||
{ base: `${base}%` }
|
||||
);
|
||||
return rows(res).map((r) => r.id);
|
||||
},
|
||||
clear: async () => {
|
||||
await getConnection().execute(`DELETE FROM ${opts.table};`);
|
||||
}
|
||||
};
|
||||
});
|
||||
function rows(res) {
|
||||
return res.rows || [];
|
||||
}
|
94
node_modules/unstorage/drivers/redis.cjs
generated
vendored
Normal file
94
node_modules/unstorage/drivers/redis.cjs
generated
vendored
Normal file
@@ -0,0 +1,94 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _ioredis = _interopRequireDefault(require("ioredis"));
|
||||
function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; }
|
||||
const DRIVER_NAME = "redis";
|
||||
module.exports = (0, _utils.defineDriver)(opts => {
|
||||
let redisClient;
|
||||
const getRedisClient = () => {
|
||||
if (redisClient) {
|
||||
return redisClient;
|
||||
}
|
||||
if (opts.cluster) {
|
||||
redisClient = new _ioredis.default.Cluster(opts.cluster, opts.clusterOptions);
|
||||
} else if (opts.url) {
|
||||
redisClient = new _ioredis.default(opts.url, opts);
|
||||
} else {
|
||||
redisClient = new _ioredis.default(opts);
|
||||
}
|
||||
return redisClient;
|
||||
};
|
||||
const base = (opts.base || "").replace(/:$/, "");
|
||||
const p = (...keys) => (0, _utils.joinKeys)(base, ...keys);
|
||||
const d = key => base ? key.replace(`${base}:`, "") : key;
|
||||
if (opts.preConnect) {
|
||||
try {
|
||||
getRedisClient();
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
}
|
||||
}
|
||||
const scan = async pattern => {
|
||||
const client = getRedisClient();
|
||||
const keys = [];
|
||||
let cursor = "0";
|
||||
do {
|
||||
const [nextCursor, scanKeys] = opts.scanCount ? await client.scan(cursor, "MATCH", pattern, "COUNT", opts.scanCount) : await client.scan(cursor, "MATCH", pattern);
|
||||
cursor = nextCursor;
|
||||
keys.push(...scanKeys);
|
||||
} while (cursor !== "0");
|
||||
return keys;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: getRedisClient,
|
||||
async hasItem(key) {
|
||||
return Boolean(await getRedisClient().exists(p(key)));
|
||||
},
|
||||
async getItem(key) {
|
||||
const value = await getRedisClient().get(p(key));
|
||||
return value ?? null;
|
||||
},
|
||||
async getItems(items) {
|
||||
const keys = items.map(item => p(item.key));
|
||||
const data = await getRedisClient().mget(...keys);
|
||||
return keys.map((key, index) => {
|
||||
return {
|
||||
key: d(key),
|
||||
value: data[index] ?? null
|
||||
};
|
||||
});
|
||||
},
|
||||
async setItem(key, value, tOptions) {
|
||||
const ttl = tOptions?.ttl ?? opts.ttl;
|
||||
if (ttl) {
|
||||
await getRedisClient().set(p(key), value, "EX", ttl);
|
||||
} else {
|
||||
await getRedisClient().set(p(key), value);
|
||||
}
|
||||
},
|
||||
async removeItem(key) {
|
||||
await getRedisClient().unlink(p(key));
|
||||
},
|
||||
async getKeys(base2) {
|
||||
const keys = await scan(p(base2, "*"));
|
||||
return keys.map(key => d(key));
|
||||
},
|
||||
async clear(base2) {
|
||||
const keys = await scan(p(base2, "*"));
|
||||
if (keys.length === 0) {
|
||||
return;
|
||||
}
|
||||
await getRedisClient().unlink(keys);
|
||||
},
|
||||
dispose() {
|
||||
return getRedisClient().disconnect();
|
||||
}
|
||||
};
|
||||
});
|
37
node_modules/unstorage/drivers/redis.d.ts
generated
vendored
Normal file
37
node_modules/unstorage/drivers/redis.d.ts
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
import Redis, { Cluster, type ClusterNode, type ClusterOptions, type RedisOptions as _RedisOptions } from "ioredis";
|
||||
export interface RedisOptions extends _RedisOptions {
|
||||
/**
|
||||
* Optional prefix to use for all keys. Can be used for namespacing.
|
||||
*/
|
||||
base?: string;
|
||||
/**
|
||||
* Url to use for connecting to redis. Takes precedence over `host` option. Has the format `redis://<REDIS_USER>:<REDIS_PASSWORD>@<REDIS_HOST>:<REDIS_PORT>`
|
||||
*/
|
||||
url?: string;
|
||||
/**
|
||||
* List of redis nodes to use for cluster mode. Takes precedence over `url` and `host` options.
|
||||
*/
|
||||
cluster?: ClusterNode[];
|
||||
/**
|
||||
* Options to use for cluster mode.
|
||||
*/
|
||||
clusterOptions?: ClusterOptions;
|
||||
/**
|
||||
* Default TTL for all items in seconds.
|
||||
*/
|
||||
ttl?: number;
|
||||
/**
|
||||
* How many keys to scan at once.
|
||||
*
|
||||
* [redis documentation](https://redis.io/docs/latest/commands/scan/#the-count-option)
|
||||
*/
|
||||
scanCount?: number;
|
||||
/**
|
||||
* Whether to initialize the redis instance immediately.
|
||||
* Otherwise, it will be initialized on the first read/write call.
|
||||
* @default false
|
||||
*/
|
||||
preConnect?: boolean;
|
||||
}
|
||||
declare const _default: (opts: RedisOptions) => import("..").Driver<RedisOptions, Redis | Cluster>;
|
||||
export default _default;
|
87
node_modules/unstorage/drivers/redis.mjs
generated
vendored
Normal file
87
node_modules/unstorage/drivers/redis.mjs
generated
vendored
Normal file
@@ -0,0 +1,87 @@
|
||||
import { defineDriver, joinKeys } from "./utils/index.mjs";
|
||||
import Redis from "ioredis";
|
||||
const DRIVER_NAME = "redis";
|
||||
export default defineDriver((opts) => {
|
||||
let redisClient;
|
||||
const getRedisClient = () => {
|
||||
if (redisClient) {
|
||||
return redisClient;
|
||||
}
|
||||
if (opts.cluster) {
|
||||
redisClient = new Redis.Cluster(opts.cluster, opts.clusterOptions);
|
||||
} else if (opts.url) {
|
||||
redisClient = new Redis(opts.url, opts);
|
||||
} else {
|
||||
redisClient = new Redis(opts);
|
||||
}
|
||||
return redisClient;
|
||||
};
|
||||
const base = (opts.base || "").replace(/:$/, "");
|
||||
const p = (...keys) => joinKeys(base, ...keys);
|
||||
const d = (key) => base ? key.replace(`${base}:`, "") : key;
|
||||
if (opts.preConnect) {
|
||||
try {
|
||||
getRedisClient();
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
}
|
||||
}
|
||||
const scan = async (pattern) => {
|
||||
const client = getRedisClient();
|
||||
const keys = [];
|
||||
let cursor = "0";
|
||||
do {
|
||||
const [nextCursor, scanKeys] = opts.scanCount ? await client.scan(cursor, "MATCH", pattern, "COUNT", opts.scanCount) : await client.scan(cursor, "MATCH", pattern);
|
||||
cursor = nextCursor;
|
||||
keys.push(...scanKeys);
|
||||
} while (cursor !== "0");
|
||||
return keys;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: getRedisClient,
|
||||
async hasItem(key) {
|
||||
return Boolean(await getRedisClient().exists(p(key)));
|
||||
},
|
||||
async getItem(key) {
|
||||
const value = await getRedisClient().get(p(key));
|
||||
return value ?? null;
|
||||
},
|
||||
async getItems(items) {
|
||||
const keys = items.map((item) => p(item.key));
|
||||
const data = await getRedisClient().mget(...keys);
|
||||
return keys.map((key, index) => {
|
||||
return {
|
||||
key: d(key),
|
||||
value: data[index] ?? null
|
||||
};
|
||||
});
|
||||
},
|
||||
async setItem(key, value, tOptions) {
|
||||
const ttl = tOptions?.ttl ?? opts.ttl;
|
||||
if (ttl) {
|
||||
await getRedisClient().set(p(key), value, "EX", ttl);
|
||||
} else {
|
||||
await getRedisClient().set(p(key), value);
|
||||
}
|
||||
},
|
||||
async removeItem(key) {
|
||||
await getRedisClient().unlink(p(key));
|
||||
},
|
||||
async getKeys(base2) {
|
||||
const keys = await scan(p(base2, "*"));
|
||||
return keys.map((key) => d(key));
|
||||
},
|
||||
async clear(base2) {
|
||||
const keys = await scan(p(base2, "*"));
|
||||
if (keys.length === 0) {
|
||||
return;
|
||||
}
|
||||
await getRedisClient().unlink(keys);
|
||||
},
|
||||
dispose() {
|
||||
return getRedisClient().disconnect();
|
||||
}
|
||||
};
|
||||
});
|
169
node_modules/unstorage/drivers/s3.cjs
generated
vendored
Normal file
169
node_modules/unstorage/drivers/s3.cjs
generated
vendored
Normal file
@@ -0,0 +1,169 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _aws4fetch = require("aws4fetch");
|
||||
const DRIVER_NAME = "s3";
|
||||
module.exports = (0, _utils.defineDriver)(options => {
|
||||
let _awsClient;
|
||||
const getAwsClient = () => {
|
||||
if (!_awsClient) {
|
||||
if (!options.accessKeyId) {
|
||||
throw (0, _utils.createRequiredError)(DRIVER_NAME, "accessKeyId");
|
||||
}
|
||||
if (!options.secretAccessKey) {
|
||||
throw (0, _utils.createRequiredError)(DRIVER_NAME, "secretAccessKey");
|
||||
}
|
||||
if (!options.endpoint) {
|
||||
throw (0, _utils.createRequiredError)(DRIVER_NAME, "endpoint");
|
||||
}
|
||||
if (!options.region) {
|
||||
throw (0, _utils.createRequiredError)(DRIVER_NAME, "region");
|
||||
}
|
||||
_awsClient = new _aws4fetch.AwsClient({
|
||||
service: "s3",
|
||||
accessKeyId: options.accessKeyId,
|
||||
secretAccessKey: options.secretAccessKey,
|
||||
region: options.region
|
||||
});
|
||||
}
|
||||
return _awsClient;
|
||||
};
|
||||
const baseURL = `${options.endpoint.replace(/\/$/, "")}/${options.bucket || ""}`;
|
||||
const url = (key = "") => `${baseURL}/${(0, _utils.normalizeKey)(key, "/")}`;
|
||||
const awsFetch = async (url2, opts) => {
|
||||
const request = await getAwsClient().sign(url2, opts);
|
||||
const res = await fetch(request);
|
||||
if (!res.ok) {
|
||||
if (res.status === 404) {
|
||||
return null;
|
||||
}
|
||||
throw (0, _utils.createError)(DRIVER_NAME, `[${request.method}] ${url2}: ${res.status} ${res.statusText} ${await res.text()}`);
|
||||
}
|
||||
return res;
|
||||
};
|
||||
const headObject = async key => {
|
||||
const res = await awsFetch(url(key), {
|
||||
method: "HEAD"
|
||||
});
|
||||
if (!res) {
|
||||
return null;
|
||||
}
|
||||
const metaHeaders = {};
|
||||
for (const [key2, value] of res.headers.entries()) {
|
||||
const match = /x-amz-meta-(.*)/.exec(key2);
|
||||
if (match?.[1]) {
|
||||
metaHeaders[match[1]] = value;
|
||||
}
|
||||
}
|
||||
return metaHeaders;
|
||||
};
|
||||
const listObjects = async prefix => {
|
||||
const res = await awsFetch(baseURL).then(r => r?.text());
|
||||
if (!res) {
|
||||
console.log("no list", prefix ? `${baseURL}?prefix=${prefix}` : baseURL);
|
||||
return null;
|
||||
}
|
||||
return parseList(res);
|
||||
};
|
||||
const getObject = key => {
|
||||
return awsFetch(url(key));
|
||||
};
|
||||
const putObject = async (key, value) => {
|
||||
return awsFetch(url(key), {
|
||||
method: "PUT",
|
||||
body: value
|
||||
});
|
||||
};
|
||||
const deleteObject = async key => {
|
||||
return awsFetch(url(key), {
|
||||
method: "DELETE"
|
||||
}).then(r => {
|
||||
if (r?.status !== 204 && r?.status !== 200) {
|
||||
throw (0, _utils.createError)(DRIVER_NAME, `Failed to delete ${key}`);
|
||||
}
|
||||
});
|
||||
};
|
||||
const deleteObjects = async base => {
|
||||
const keys = await listObjects(base);
|
||||
if (!keys?.length) {
|
||||
return null;
|
||||
}
|
||||
if (options.bulkDelete === false) {
|
||||
await Promise.all(keys.map(key => deleteObject(key)));
|
||||
} else {
|
||||
const body = deleteKeysReq(keys);
|
||||
await awsFetch(`${baseURL}?delete`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"x-amz-checksum-sha256": await sha256Base64(body)
|
||||
},
|
||||
body
|
||||
});
|
||||
}
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options,
|
||||
getItem(key) {
|
||||
return getObject(key).then(res => res ? res.text() : null);
|
||||
},
|
||||
getItemRaw(key) {
|
||||
return getObject(key).then(res => res ? res.arrayBuffer() : null);
|
||||
},
|
||||
async setItem(key, value) {
|
||||
await putObject(key, value);
|
||||
},
|
||||
async setItemRaw(key, value) {
|
||||
await putObject(key, value);
|
||||
},
|
||||
getMeta(key) {
|
||||
return headObject(key);
|
||||
},
|
||||
hasItem(key) {
|
||||
return headObject(key).then(meta => !!meta);
|
||||
},
|
||||
getKeys(base) {
|
||||
return listObjects(base).then(keys => keys || []);
|
||||
},
|
||||
async removeItem(key) {
|
||||
await deleteObject(key);
|
||||
},
|
||||
async clear(base) {
|
||||
await deleteObjects(base);
|
||||
}
|
||||
};
|
||||
});
|
||||
function deleteKeysReq(keys) {
|
||||
return `<Delete>${keys.map(key => {
|
||||
key = key.replace(/&/g, "&").replace(/</g, "<").replace(/>/g, ">");
|
||||
return /* xml */`<Object><Key>${key}</Key></Object>`;
|
||||
}).join("")}</Delete>`;
|
||||
}
|
||||
async function sha256Base64(str) {
|
||||
const buffer = new TextEncoder().encode(str);
|
||||
const hash = await crypto.subtle.digest("SHA-256", buffer);
|
||||
const bytes = new Uint8Array(hash);
|
||||
const binaryString = String.fromCharCode(...bytes);
|
||||
return btoa(binaryString);
|
||||
}
|
||||
function parseList(xml) {
|
||||
if (!xml.startsWith("<?xml")) {
|
||||
throw new Error("Invalid XML");
|
||||
}
|
||||
const listBucketResult = xml.match(/<ListBucketResult[^>]*>([\s\S]*)<\/ListBucketResult>/)?.[1];
|
||||
if (!listBucketResult) {
|
||||
throw new Error("Missing <ListBucketResult>");
|
||||
}
|
||||
const contents = listBucketResult.match(/<Contents[^>]*>([\s\S]*?)<\/Contents>/g);
|
||||
if (!contents?.length) {
|
||||
return [];
|
||||
}
|
||||
return contents.map(content => {
|
||||
const key = content.match(/<Key>([\s\S]+?)<\/Key>/)?.[1];
|
||||
return key;
|
||||
}).filter(Boolean);
|
||||
}
|
34
node_modules/unstorage/drivers/s3.d.ts
generated
vendored
Normal file
34
node_modules/unstorage/drivers/s3.d.ts
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
export interface S3DriverOptions {
|
||||
/**
|
||||
* Access Key ID
|
||||
*/
|
||||
accessKeyId: string;
|
||||
/**
|
||||
* Secret Access Key
|
||||
*/
|
||||
secretAccessKey: string;
|
||||
/**
|
||||
* The endpoint URL of the S3 service.
|
||||
*
|
||||
* - For AWS S3: "https://s3.[region].amazonaws.com/"
|
||||
* - For cloudflare R2: "https://[uid].r2.cloudflarestorage.com/"
|
||||
*/
|
||||
endpoint: string;
|
||||
/**
|
||||
* The region of the S3 bucket.
|
||||
*
|
||||
* - For AWS S3, this is the region of the bucket.
|
||||
* - For cloudflare, this is can be set to `auto`.
|
||||
*/
|
||||
region: string;
|
||||
/**
|
||||
* The name of the bucket.
|
||||
*/
|
||||
bucket: string;
|
||||
/**
|
||||
* Enabled by default to speedup `clear()` operation. Set to `false` if provider is not implementing [DeleteObject](https://docs.aws.amazon.com/AmazonS3/latest/API/API_DeleteObjects.html).
|
||||
*/
|
||||
bulkDelete?: boolean;
|
||||
}
|
||||
declare const _default: (opts: S3DriverOptions) => import("..").Driver<S3DriverOptions, never>;
|
||||
export default _default;
|
174
node_modules/unstorage/drivers/s3.mjs
generated
vendored
Normal file
174
node_modules/unstorage/drivers/s3.mjs
generated
vendored
Normal file
@@ -0,0 +1,174 @@
|
||||
import {
|
||||
defineDriver,
|
||||
createRequiredError,
|
||||
normalizeKey,
|
||||
createError
|
||||
} from "./utils/index.mjs";
|
||||
import { AwsClient } from "aws4fetch";
|
||||
const DRIVER_NAME = "s3";
|
||||
export default defineDriver((options) => {
|
||||
let _awsClient;
|
||||
const getAwsClient = () => {
|
||||
if (!_awsClient) {
|
||||
if (!options.accessKeyId) {
|
||||
throw createRequiredError(DRIVER_NAME, "accessKeyId");
|
||||
}
|
||||
if (!options.secretAccessKey) {
|
||||
throw createRequiredError(DRIVER_NAME, "secretAccessKey");
|
||||
}
|
||||
if (!options.endpoint) {
|
||||
throw createRequiredError(DRIVER_NAME, "endpoint");
|
||||
}
|
||||
if (!options.region) {
|
||||
throw createRequiredError(DRIVER_NAME, "region");
|
||||
}
|
||||
_awsClient = new AwsClient({
|
||||
service: "s3",
|
||||
accessKeyId: options.accessKeyId,
|
||||
secretAccessKey: options.secretAccessKey,
|
||||
region: options.region
|
||||
});
|
||||
}
|
||||
return _awsClient;
|
||||
};
|
||||
const baseURL = `${options.endpoint.replace(/\/$/, "")}/${options.bucket || ""}`;
|
||||
const url = (key = "") => `${baseURL}/${normalizeKey(key, "/")}`;
|
||||
const awsFetch = async (url2, opts) => {
|
||||
const request = await getAwsClient().sign(url2, opts);
|
||||
const res = await fetch(request);
|
||||
if (!res.ok) {
|
||||
if (res.status === 404) {
|
||||
return null;
|
||||
}
|
||||
throw createError(
|
||||
DRIVER_NAME,
|
||||
`[${request.method}] ${url2}: ${res.status} ${res.statusText} ${await res.text()}`
|
||||
);
|
||||
}
|
||||
return res;
|
||||
};
|
||||
const headObject = async (key) => {
|
||||
const res = await awsFetch(url(key), { method: "HEAD" });
|
||||
if (!res) {
|
||||
return null;
|
||||
}
|
||||
const metaHeaders = {};
|
||||
for (const [key2, value] of res.headers.entries()) {
|
||||
const match = /x-amz-meta-(.*)/.exec(key2);
|
||||
if (match?.[1]) {
|
||||
metaHeaders[match[1]] = value;
|
||||
}
|
||||
}
|
||||
return metaHeaders;
|
||||
};
|
||||
const listObjects = async (prefix) => {
|
||||
const res = await awsFetch(baseURL).then((r) => r?.text());
|
||||
if (!res) {
|
||||
console.log("no list", prefix ? `${baseURL}?prefix=${prefix}` : baseURL);
|
||||
return null;
|
||||
}
|
||||
return parseList(res);
|
||||
};
|
||||
const getObject = (key) => {
|
||||
return awsFetch(url(key));
|
||||
};
|
||||
const putObject = async (key, value) => {
|
||||
return awsFetch(url(key), {
|
||||
method: "PUT",
|
||||
body: value
|
||||
});
|
||||
};
|
||||
const deleteObject = async (key) => {
|
||||
return awsFetch(url(key), { method: "DELETE" }).then((r) => {
|
||||
if (r?.status !== 204 && r?.status !== 200) {
|
||||
throw createError(DRIVER_NAME, `Failed to delete ${key}`);
|
||||
}
|
||||
});
|
||||
};
|
||||
const deleteObjects = async (base) => {
|
||||
const keys = await listObjects(base);
|
||||
if (!keys?.length) {
|
||||
return null;
|
||||
}
|
||||
if (options.bulkDelete === false) {
|
||||
await Promise.all(keys.map((key) => deleteObject(key)));
|
||||
} else {
|
||||
const body = deleteKeysReq(keys);
|
||||
await awsFetch(`${baseURL}?delete`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"x-amz-checksum-sha256": await sha256Base64(body)
|
||||
},
|
||||
body
|
||||
});
|
||||
}
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options,
|
||||
getItem(key) {
|
||||
return getObject(key).then((res) => res ? res.text() : null);
|
||||
},
|
||||
getItemRaw(key) {
|
||||
return getObject(key).then((res) => res ? res.arrayBuffer() : null);
|
||||
},
|
||||
async setItem(key, value) {
|
||||
await putObject(key, value);
|
||||
},
|
||||
async setItemRaw(key, value) {
|
||||
await putObject(key, value);
|
||||
},
|
||||
getMeta(key) {
|
||||
return headObject(key);
|
||||
},
|
||||
hasItem(key) {
|
||||
return headObject(key).then((meta) => !!meta);
|
||||
},
|
||||
getKeys(base) {
|
||||
return listObjects(base).then((keys) => keys || []);
|
||||
},
|
||||
async removeItem(key) {
|
||||
await deleteObject(key);
|
||||
},
|
||||
async clear(base) {
|
||||
await deleteObjects(base);
|
||||
}
|
||||
};
|
||||
});
|
||||
function deleteKeysReq(keys) {
|
||||
return `<Delete>${keys.map((key) => {
|
||||
key = key.replace(/&/g, "&").replace(/</g, "<").replace(/>/g, ">");
|
||||
return (
|
||||
/* xml */
|
||||
`<Object><Key>${key}</Key></Object>`
|
||||
);
|
||||
}).join("")}</Delete>`;
|
||||
}
|
||||
async function sha256Base64(str) {
|
||||
const buffer = new TextEncoder().encode(str);
|
||||
const hash = await crypto.subtle.digest("SHA-256", buffer);
|
||||
const bytes = new Uint8Array(hash);
|
||||
const binaryString = String.fromCharCode(...bytes);
|
||||
return btoa(binaryString);
|
||||
}
|
||||
function parseList(xml) {
|
||||
if (!xml.startsWith("<?xml")) {
|
||||
throw new Error("Invalid XML");
|
||||
}
|
||||
const listBucketResult = xml.match(
|
||||
/<ListBucketResult[^>]*>([\s\S]*)<\/ListBucketResult>/
|
||||
)?.[1];
|
||||
if (!listBucketResult) {
|
||||
throw new Error("Missing <ListBucketResult>");
|
||||
}
|
||||
const contents = listBucketResult.match(
|
||||
/<Contents[^>]*>([\s\S]*?)<\/Contents>/g
|
||||
);
|
||||
if (!contents?.length) {
|
||||
return [];
|
||||
}
|
||||
return contents.map((content) => {
|
||||
const key = content.match(/<Key>([\s\S]+?)<\/Key>/)?.[1];
|
||||
return key;
|
||||
}).filter(Boolean);
|
||||
}
|
19
node_modules/unstorage/drivers/session-storage.cjs
generated
vendored
Normal file
19
node_modules/unstorage/drivers/session-storage.cjs
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _localstorage = _interopRequireDefault(require("./localstorage.cjs"));
|
||||
function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; }
|
||||
const DRIVER_NAME = "session-storage";
|
||||
module.exports = (0, _utils.defineDriver)((opts = {}) => {
|
||||
return {
|
||||
...(0, _localstorage.default)({
|
||||
windowKey: "sessionStorage",
|
||||
...opts
|
||||
}),
|
||||
name: DRIVER_NAME
|
||||
};
|
||||
});
|
5
node_modules/unstorage/drivers/session-storage.d.ts
generated
vendored
Normal file
5
node_modules/unstorage/drivers/session-storage.d.ts
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
import { type LocalStorageOptions } from "./localstorage";
|
||||
export interface SessionStorageOptions extends LocalStorageOptions {
|
||||
}
|
||||
declare const _default: (opts: LocalStorageOptions | undefined) => import("..").Driver<LocalStorageOptions | undefined, Storage>;
|
||||
export default _default;
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user