full site update
This commit is contained in:
111
node_modules/unstorage/drivers/azure-app-configuration.cjs
generated
vendored
Normal file
111
node_modules/unstorage/drivers/azure-app-configuration.cjs
generated
vendored
Normal file
@@ -0,0 +1,111 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _appConfiguration = require("@azure/app-configuration");
|
||||
var _identity = require("@azure/identity");
|
||||
const DRIVER_NAME = "azure-app-configuration";
|
||||
module.exports = (0, _utils.defineDriver)((opts = {}) => {
|
||||
const labelFilter = opts.label || "\0";
|
||||
const keyFilter = opts.prefix ? `${opts.prefix}:*` : "*";
|
||||
const p = key => opts.prefix ? `${opts.prefix}:${key}` : key;
|
||||
const d = key => opts.prefix ? key.replace(opts.prefix, "") : key;
|
||||
let client;
|
||||
const getClient = () => {
|
||||
if (client) {
|
||||
return client;
|
||||
}
|
||||
if (!opts.endpoint && !opts.appConfigName && !opts.connectionString) {
|
||||
throw (0, _utils.createRequiredError)(DRIVER_NAME, ["endpoint", "appConfigName", "connectionString"]);
|
||||
}
|
||||
const appConfigEndpoint = opts.endpoint || `https://${opts.appConfigName}.azconfig.io`;
|
||||
if (opts.connectionString) {
|
||||
client = new _appConfiguration.AppConfigurationClient(opts.connectionString);
|
||||
} else {
|
||||
const credential = new _identity.DefaultAzureCredential();
|
||||
client = new _appConfiguration.AppConfigurationClient(appConfigEndpoint, credential);
|
||||
}
|
||||
return client;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: getClient,
|
||||
async hasItem(key) {
|
||||
try {
|
||||
await getClient().getConfigurationSetting({
|
||||
key: p(key),
|
||||
label: opts.label
|
||||
});
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
async getItem(key) {
|
||||
try {
|
||||
const setting = await getClient().getConfigurationSetting({
|
||||
key: p(key),
|
||||
label: opts.label
|
||||
});
|
||||
return setting.value;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
async setItem(key, value) {
|
||||
await getClient().setConfigurationSetting({
|
||||
key: p(key),
|
||||
value,
|
||||
label: opts.label
|
||||
});
|
||||
return;
|
||||
},
|
||||
async removeItem(key) {
|
||||
await getClient().deleteConfigurationSetting({
|
||||
key: p(key),
|
||||
label: opts.label
|
||||
});
|
||||
return;
|
||||
},
|
||||
async getKeys() {
|
||||
const settings = getClient().listConfigurationSettings({
|
||||
keyFilter,
|
||||
labelFilter,
|
||||
fields: ["key", "value", "label"]
|
||||
});
|
||||
const keys = [];
|
||||
for await (const setting of settings) {
|
||||
keys.push(d(setting.key));
|
||||
}
|
||||
return keys;
|
||||
},
|
||||
async getMeta(key) {
|
||||
const setting = await getClient().getConfigurationSetting({
|
||||
key: p(key),
|
||||
label: opts.label
|
||||
});
|
||||
return {
|
||||
mtime: setting.lastModified,
|
||||
etag: setting.etag,
|
||||
tags: setting.tags
|
||||
};
|
||||
},
|
||||
async clear() {
|
||||
const settings = getClient().listConfigurationSettings({
|
||||
keyFilter,
|
||||
labelFilter,
|
||||
fields: ["key", "value", "label"]
|
||||
});
|
||||
for await (const setting of settings) {
|
||||
await getClient().deleteConfigurationSetting({
|
||||
key: setting.key,
|
||||
label: setting.label
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
30
node_modules/unstorage/drivers/azure-app-configuration.d.ts
generated
vendored
Normal file
30
node_modules/unstorage/drivers/azure-app-configuration.d.ts
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
import { AppConfigurationClient } from "@azure/app-configuration";
|
||||
export interface AzureAppConfigurationOptions {
|
||||
/**
|
||||
* Optional prefix for keys. This can be used to isolate keys from different applications in the same Azure App Configuration instance. E.g. "app01" results in keys like "app01:foo" and "app01:bar".
|
||||
* @default null
|
||||
*/
|
||||
prefix?: string;
|
||||
/**
|
||||
* Optional label for keys. If not provided, all keys will be created and listed without labels. This can be used to isolate keys from different environments in the same Azure App Configuration instance. E.g. "dev" results in keys like "foo" and "bar" with the label "dev".
|
||||
* @default '\0'
|
||||
*/
|
||||
label?: string;
|
||||
/**
|
||||
* Optional endpoint to use when connecting to Azure App Configuration. If not provided, the appConfigName option must be provided. If both are provided, the endpoint option takes precedence.
|
||||
* @default null
|
||||
*/
|
||||
endpoint?: string;
|
||||
/**
|
||||
* Optional name of the Azure App Configuration instance to connect to. If not provided, the endpoint option must be provided. If both are provided, the endpoint option takes precedence.
|
||||
* @default null
|
||||
*/
|
||||
appConfigName?: string;
|
||||
/**
|
||||
* Optional connection string to use when connecting to Azure App Configuration. If not provided, the endpoint option must be provided. If both are provided, the endpoint option takes precedence.
|
||||
* @default null
|
||||
*/
|
||||
connectionString?: string;
|
||||
}
|
||||
declare const _default: (opts: AzureAppConfigurationOptions | undefined) => import("..").Driver<AzureAppConfigurationOptions | undefined, AppConfigurationClient>;
|
||||
export default _default;
|
109
node_modules/unstorage/drivers/azure-app-configuration.mjs
generated
vendored
Normal file
109
node_modules/unstorage/drivers/azure-app-configuration.mjs
generated
vendored
Normal file
@@ -0,0 +1,109 @@
|
||||
import { defineDriver, createRequiredError } from "./utils/index.mjs";
|
||||
import { AppConfigurationClient } from "@azure/app-configuration";
|
||||
import { DefaultAzureCredential } from "@azure/identity";
|
||||
const DRIVER_NAME = "azure-app-configuration";
|
||||
export default defineDriver((opts = {}) => {
|
||||
const labelFilter = opts.label || "\0";
|
||||
const keyFilter = opts.prefix ? `${opts.prefix}:*` : "*";
|
||||
const p = (key) => opts.prefix ? `${opts.prefix}:${key}` : key;
|
||||
const d = (key) => opts.prefix ? key.replace(opts.prefix, "") : key;
|
||||
let client;
|
||||
const getClient = () => {
|
||||
if (client) {
|
||||
return client;
|
||||
}
|
||||
if (!opts.endpoint && !opts.appConfigName && !opts.connectionString) {
|
||||
throw createRequiredError(DRIVER_NAME, [
|
||||
"endpoint",
|
||||
"appConfigName",
|
||||
"connectionString"
|
||||
]);
|
||||
}
|
||||
const appConfigEndpoint = opts.endpoint || `https://${opts.appConfigName}.azconfig.io`;
|
||||
if (opts.connectionString) {
|
||||
client = new AppConfigurationClient(opts.connectionString);
|
||||
} else {
|
||||
const credential = new DefaultAzureCredential();
|
||||
client = new AppConfigurationClient(appConfigEndpoint, credential);
|
||||
}
|
||||
return client;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: getClient,
|
||||
async hasItem(key) {
|
||||
try {
|
||||
await getClient().getConfigurationSetting({
|
||||
key: p(key),
|
||||
label: opts.label
|
||||
});
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
async getItem(key) {
|
||||
try {
|
||||
const setting = await getClient().getConfigurationSetting({
|
||||
key: p(key),
|
||||
label: opts.label
|
||||
});
|
||||
return setting.value;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
async setItem(key, value) {
|
||||
await getClient().setConfigurationSetting({
|
||||
key: p(key),
|
||||
value,
|
||||
label: opts.label
|
||||
});
|
||||
return;
|
||||
},
|
||||
async removeItem(key) {
|
||||
await getClient().deleteConfigurationSetting({
|
||||
key: p(key),
|
||||
label: opts.label
|
||||
});
|
||||
return;
|
||||
},
|
||||
async getKeys() {
|
||||
const settings = getClient().listConfigurationSettings({
|
||||
keyFilter,
|
||||
labelFilter,
|
||||
fields: ["key", "value", "label"]
|
||||
});
|
||||
const keys = [];
|
||||
for await (const setting of settings) {
|
||||
keys.push(d(setting.key));
|
||||
}
|
||||
return keys;
|
||||
},
|
||||
async getMeta(key) {
|
||||
const setting = await getClient().getConfigurationSetting({
|
||||
key: p(key),
|
||||
label: opts.label
|
||||
});
|
||||
return {
|
||||
mtime: setting.lastModified,
|
||||
etag: setting.etag,
|
||||
tags: setting.tags
|
||||
};
|
||||
},
|
||||
async clear() {
|
||||
const settings = getClient().listConfigurationSettings({
|
||||
keyFilter,
|
||||
labelFilter,
|
||||
fields: ["key", "value", "label"]
|
||||
});
|
||||
for await (const setting of settings) {
|
||||
await getClient().deleteConfigurationSetting({
|
||||
key: setting.key,
|
||||
label: setting.label
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
103
node_modules/unstorage/drivers/azure-cosmos.cjs
generated
vendored
Normal file
103
node_modules/unstorage/drivers/azure-cosmos.cjs
generated
vendored
Normal file
@@ -0,0 +1,103 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _cosmos = require("@azure/cosmos");
|
||||
var _identity = require("@azure/identity");
|
||||
const DRIVER_NAME = "azure-cosmos";
|
||||
module.exports = (0, _utils.defineDriver)(opts => {
|
||||
let client;
|
||||
const getCosmosClient = async () => {
|
||||
if (client) {
|
||||
return client;
|
||||
}
|
||||
if (!opts.endpoint) {
|
||||
throw (0, _utils.createRequiredError)(DRIVER_NAME, "endpoint");
|
||||
}
|
||||
if (opts.accountKey) {
|
||||
const cosmosClient = new _cosmos.CosmosClient({
|
||||
endpoint: opts.endpoint,
|
||||
key: opts.accountKey
|
||||
});
|
||||
const {
|
||||
database
|
||||
} = await cosmosClient.databases.createIfNotExists({
|
||||
id: opts.databaseName || "unstorage"
|
||||
});
|
||||
const {
|
||||
container
|
||||
} = await database.containers.createIfNotExists({
|
||||
id: opts.containerName || "unstorage"
|
||||
});
|
||||
client = container;
|
||||
} else {
|
||||
const credential = new _identity.DefaultAzureCredential();
|
||||
const cosmosClient = new _cosmos.CosmosClient({
|
||||
endpoint: opts.endpoint,
|
||||
aadCredentials: credential
|
||||
});
|
||||
const {
|
||||
database
|
||||
} = await cosmosClient.databases.createIfNotExists({
|
||||
id: opts.databaseName || "unstorage"
|
||||
});
|
||||
const {
|
||||
container
|
||||
} = await database.containers.createIfNotExists({
|
||||
id: opts.containerName || "unstorage"
|
||||
});
|
||||
client = container;
|
||||
}
|
||||
return client;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: getCosmosClient,
|
||||
async hasItem(key) {
|
||||
const item = await (await getCosmosClient()).item(key).read();
|
||||
return item.resource ? true : false;
|
||||
},
|
||||
async getItem(key) {
|
||||
const item = await (await getCosmosClient()).item(key).read();
|
||||
return item.resource ? item.resource.value : null;
|
||||
},
|
||||
async setItem(key, value) {
|
||||
const modified = /* @__PURE__ */new Date();
|
||||
await (await getCosmosClient()).items.upsert({
|
||||
id: key,
|
||||
value,
|
||||
modified
|
||||
}, {
|
||||
consistencyLevel: "Session"
|
||||
});
|
||||
},
|
||||
async removeItem(key) {
|
||||
await (await getCosmosClient()).item(key).delete({
|
||||
consistencyLevel: "Session"
|
||||
});
|
||||
},
|
||||
async getKeys() {
|
||||
const iterator = (await getCosmosClient()).items.query(`SELECT { id } from c`);
|
||||
return (await iterator.fetchAll()).resources.map(item => item.id);
|
||||
},
|
||||
async getMeta(key) {
|
||||
const item = await (await getCosmosClient()).item(key).read();
|
||||
return {
|
||||
mtime: item.resource?.modified ? new Date(item.resource.modified) : void 0
|
||||
};
|
||||
},
|
||||
async clear() {
|
||||
const iterator = (await getCosmosClient()).items.query(`SELECT { id } from c`);
|
||||
const items = (await iterator.fetchAll()).resources;
|
||||
for (const item of items) {
|
||||
await (await getCosmosClient()).item(item.id).delete({
|
||||
consistencyLevel: "Session"
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
37
node_modules/unstorage/drivers/azure-cosmos.d.ts
generated
vendored
Normal file
37
node_modules/unstorage/drivers/azure-cosmos.d.ts
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
import { Container } from "@azure/cosmos";
|
||||
export interface AzureCosmosOptions {
|
||||
/**
|
||||
* CosmosDB endpoint in the format of https://<account>.documents.azure.com:443/.
|
||||
*/
|
||||
endpoint: string;
|
||||
/**
|
||||
* CosmosDB account key. If not provided, the driver will use the DefaultAzureCredential (recommended).
|
||||
*/
|
||||
accountKey?: string;
|
||||
/**
|
||||
* The name of the database to use. Defaults to `unstorage`.
|
||||
* @default "unstorage"
|
||||
*/
|
||||
databaseName?: string;
|
||||
/**
|
||||
* The name of the container to use. Defaults to `unstorage`.
|
||||
* @default "unstorage"
|
||||
*/
|
||||
containerName?: string;
|
||||
}
|
||||
export interface AzureCosmosItem {
|
||||
/**
|
||||
* The unstorage key as id of the item.
|
||||
*/
|
||||
id: string;
|
||||
/**
|
||||
* The unstorage value of the item.
|
||||
*/
|
||||
value: string;
|
||||
/**
|
||||
* The unstorage mtime metadata of the item.
|
||||
*/
|
||||
modified: string | Date;
|
||||
}
|
||||
declare const _default: (opts: AzureCosmosOptions) => import("..").Driver<AzureCosmosOptions, Promise<Container>>;
|
||||
export default _default;
|
86
node_modules/unstorage/drivers/azure-cosmos.mjs
generated
vendored
Normal file
86
node_modules/unstorage/drivers/azure-cosmos.mjs
generated
vendored
Normal file
@@ -0,0 +1,86 @@
|
||||
import { createRequiredError, defineDriver } from "./utils/index.mjs";
|
||||
import { CosmosClient } from "@azure/cosmos";
|
||||
import { DefaultAzureCredential } from "@azure/identity";
|
||||
const DRIVER_NAME = "azure-cosmos";
|
||||
export default defineDriver((opts) => {
|
||||
let client;
|
||||
const getCosmosClient = async () => {
|
||||
if (client) {
|
||||
return client;
|
||||
}
|
||||
if (!opts.endpoint) {
|
||||
throw createRequiredError(DRIVER_NAME, "endpoint");
|
||||
}
|
||||
if (opts.accountKey) {
|
||||
const cosmosClient = new CosmosClient({
|
||||
endpoint: opts.endpoint,
|
||||
key: opts.accountKey
|
||||
});
|
||||
const { database } = await cosmosClient.databases.createIfNotExists({
|
||||
id: opts.databaseName || "unstorage"
|
||||
});
|
||||
const { container } = await database.containers.createIfNotExists({
|
||||
id: opts.containerName || "unstorage"
|
||||
});
|
||||
client = container;
|
||||
} else {
|
||||
const credential = new DefaultAzureCredential();
|
||||
const cosmosClient = new CosmosClient({
|
||||
endpoint: opts.endpoint,
|
||||
aadCredentials: credential
|
||||
});
|
||||
const { database } = await cosmosClient.databases.createIfNotExists({
|
||||
id: opts.databaseName || "unstorage"
|
||||
});
|
||||
const { container } = await database.containers.createIfNotExists({
|
||||
id: opts.containerName || "unstorage"
|
||||
});
|
||||
client = container;
|
||||
}
|
||||
return client;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: getCosmosClient,
|
||||
async hasItem(key) {
|
||||
const item = await (await getCosmosClient()).item(key).read();
|
||||
return item.resource ? true : false;
|
||||
},
|
||||
async getItem(key) {
|
||||
const item = await (await getCosmosClient()).item(key).read();
|
||||
return item.resource ? item.resource.value : null;
|
||||
},
|
||||
async setItem(key, value) {
|
||||
const modified = /* @__PURE__ */ new Date();
|
||||
await (await getCosmosClient()).items.upsert(
|
||||
{ id: key, value, modified },
|
||||
{ consistencyLevel: "Session" }
|
||||
);
|
||||
},
|
||||
async removeItem(key) {
|
||||
await (await getCosmosClient()).item(key).delete({ consistencyLevel: "Session" });
|
||||
},
|
||||
async getKeys() {
|
||||
const iterator = (await getCosmosClient()).items.query(
|
||||
`SELECT { id } from c`
|
||||
);
|
||||
return (await iterator.fetchAll()).resources.map((item) => item.id);
|
||||
},
|
||||
async getMeta(key) {
|
||||
const item = await (await getCosmosClient()).item(key).read();
|
||||
return {
|
||||
mtime: item.resource?.modified ? new Date(item.resource.modified) : void 0
|
||||
};
|
||||
},
|
||||
async clear() {
|
||||
const iterator = (await getCosmosClient()).items.query(
|
||||
`SELECT { id } from c`
|
||||
);
|
||||
const items = (await iterator.fetchAll()).resources;
|
||||
for (const item of items) {
|
||||
await (await getCosmosClient()).item(item.id).delete({ consistencyLevel: "Session" });
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
116
node_modules/unstorage/drivers/azure-key-vault.cjs
generated
vendored
Normal file
116
node_modules/unstorage/drivers/azure-key-vault.cjs
generated
vendored
Normal file
@@ -0,0 +1,116 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _keyvaultSecrets = require("@azure/keyvault-secrets");
|
||||
var _identity = require("@azure/identity");
|
||||
const DRIVER_NAME = "azure-key-vault";
|
||||
module.exports = (0, _utils.defineDriver)(opts => {
|
||||
let keyVaultClient;
|
||||
const getKeyVaultClient = () => {
|
||||
if (keyVaultClient) {
|
||||
return keyVaultClient;
|
||||
}
|
||||
const {
|
||||
vaultName = null,
|
||||
serviceVersion = "7.3",
|
||||
pageSize = 25
|
||||
} = opts;
|
||||
if (!vaultName) {
|
||||
throw (0, _utils.createRequiredError)(DRIVER_NAME, "vaultName");
|
||||
}
|
||||
if (pageSize > 25) {
|
||||
throw (0, _utils.createError)(DRIVER_NAME, "`pageSize` cannot be greater than `25`");
|
||||
}
|
||||
const credential = new _identity.DefaultAzureCredential();
|
||||
const url = `https://${vaultName}.vault.azure.net`;
|
||||
keyVaultClient = new _keyvaultSecrets.SecretClient(url, credential, {
|
||||
serviceVersion
|
||||
});
|
||||
return keyVaultClient;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: getKeyVaultClient,
|
||||
async hasItem(key) {
|
||||
try {
|
||||
await getKeyVaultClient().getSecret(encode(key));
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
async getItem(key) {
|
||||
try {
|
||||
const secret = await getKeyVaultClient().getSecret(encode(key));
|
||||
return secret.value;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
async setItem(key, value) {
|
||||
await getKeyVaultClient().setSecret(encode(key), value);
|
||||
},
|
||||
async removeItem(key) {
|
||||
const poller = await getKeyVaultClient().beginDeleteSecret(encode(key));
|
||||
await poller.pollUntilDone();
|
||||
await getKeyVaultClient().purgeDeletedSecret(encode(key));
|
||||
},
|
||||
async getKeys() {
|
||||
const secrets = getKeyVaultClient().listPropertiesOfSecrets().byPage({
|
||||
maxPageSize: opts.pageSize || 25
|
||||
});
|
||||
const keys = [];
|
||||
for await (const page of secrets) {
|
||||
const pageKeys = page.map(secret => decode(secret.name));
|
||||
keys.push(...pageKeys);
|
||||
}
|
||||
return keys;
|
||||
},
|
||||
async getMeta(key) {
|
||||
const secret = await getKeyVaultClient().getSecret(encode(key));
|
||||
return {
|
||||
mtime: secret.properties.updatedOn,
|
||||
birthtime: secret.properties.createdOn,
|
||||
expireTime: secret.properties.expiresOn
|
||||
};
|
||||
},
|
||||
async clear() {
|
||||
const secrets = getKeyVaultClient().listPropertiesOfSecrets().byPage({
|
||||
maxPageSize: opts.pageSize || 25
|
||||
});
|
||||
for await (const page of secrets) {
|
||||
const deletionPromises = page.map(async secret => {
|
||||
const poller = await getKeyVaultClient().beginDeleteSecret(secret.name);
|
||||
await poller.pollUntilDone();
|
||||
await getKeyVaultClient().purgeDeletedSecret(secret.name);
|
||||
});
|
||||
await Promise.all(deletionPromises);
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
||||
const base64Map = {
|
||||
"=": "-e-",
|
||||
"+": "-p-",
|
||||
"/": "-s-"
|
||||
};
|
||||
function encode(value) {
|
||||
let encoded = Buffer.from(value).toString("base64");
|
||||
for (const key in base64Map) {
|
||||
encoded = encoded.replace(new RegExp(key.replace(/[$()*+.?[\\\]^{|}]/g, "\\$&"), "g"), base64Map[key]);
|
||||
}
|
||||
return encoded;
|
||||
}
|
||||
function decode(value) {
|
||||
let decoded = value;
|
||||
const search = new RegExp(Object.values(base64Map).join("|"), "g");
|
||||
decoded = decoded.replace(search, match => {
|
||||
return Object.keys(base64Map).find(key => base64Map[key] === match);
|
||||
});
|
||||
return Buffer.from(decoded, "base64").toString();
|
||||
}
|
19
node_modules/unstorage/drivers/azure-key-vault.d.ts
generated
vendored
Normal file
19
node_modules/unstorage/drivers/azure-key-vault.d.ts
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
import { SecretClient, type SecretClientOptions } from "@azure/keyvault-secrets";
|
||||
export interface AzureKeyVaultOptions {
|
||||
/**
|
||||
* The name of the key vault to use.
|
||||
*/
|
||||
vaultName: string;
|
||||
/**
|
||||
* Version of the Azure Key Vault service to use. Defaults to 7.3.
|
||||
* @default '7.3'
|
||||
*/
|
||||
serviceVersion?: SecretClientOptions["serviceVersion"];
|
||||
/**
|
||||
* The number of entries to retrieve per request. Impacts getKeys() and clear() performance. Maximum value is 25.
|
||||
* @default 25
|
||||
*/
|
||||
pageSize?: number;
|
||||
}
|
||||
declare const _default: (opts: AzureKeyVaultOptions) => import("..").Driver<AzureKeyVaultOptions, SecretClient>;
|
||||
export default _default;
|
107
node_modules/unstorage/drivers/azure-key-vault.mjs
generated
vendored
Normal file
107
node_modules/unstorage/drivers/azure-key-vault.mjs
generated
vendored
Normal file
@@ -0,0 +1,107 @@
|
||||
import { createError, createRequiredError, defineDriver } from "./utils/index.mjs";
|
||||
import {
|
||||
SecretClient
|
||||
} from "@azure/keyvault-secrets";
|
||||
import { DefaultAzureCredential } from "@azure/identity";
|
||||
const DRIVER_NAME = "azure-key-vault";
|
||||
export default defineDriver((opts) => {
|
||||
let keyVaultClient;
|
||||
const getKeyVaultClient = () => {
|
||||
if (keyVaultClient) {
|
||||
return keyVaultClient;
|
||||
}
|
||||
const { vaultName = null, serviceVersion = "7.3", pageSize = 25 } = opts;
|
||||
if (!vaultName) {
|
||||
throw createRequiredError(DRIVER_NAME, "vaultName");
|
||||
}
|
||||
if (pageSize > 25) {
|
||||
throw createError(DRIVER_NAME, "`pageSize` cannot be greater than `25`");
|
||||
}
|
||||
const credential = new DefaultAzureCredential();
|
||||
const url = `https://${vaultName}.vault.azure.net`;
|
||||
keyVaultClient = new SecretClient(url, credential, { serviceVersion });
|
||||
return keyVaultClient;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: getKeyVaultClient,
|
||||
async hasItem(key) {
|
||||
try {
|
||||
await getKeyVaultClient().getSecret(encode(key));
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
async getItem(key) {
|
||||
try {
|
||||
const secret = await getKeyVaultClient().getSecret(encode(key));
|
||||
return secret.value;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
async setItem(key, value) {
|
||||
await getKeyVaultClient().setSecret(encode(key), value);
|
||||
},
|
||||
async removeItem(key) {
|
||||
const poller = await getKeyVaultClient().beginDeleteSecret(encode(key));
|
||||
await poller.pollUntilDone();
|
||||
await getKeyVaultClient().purgeDeletedSecret(encode(key));
|
||||
},
|
||||
async getKeys() {
|
||||
const secrets = getKeyVaultClient().listPropertiesOfSecrets().byPage({ maxPageSize: opts.pageSize || 25 });
|
||||
const keys = [];
|
||||
for await (const page of secrets) {
|
||||
const pageKeys = page.map((secret) => decode(secret.name));
|
||||
keys.push(...pageKeys);
|
||||
}
|
||||
return keys;
|
||||
},
|
||||
async getMeta(key) {
|
||||
const secret = await getKeyVaultClient().getSecret(encode(key));
|
||||
return {
|
||||
mtime: secret.properties.updatedOn,
|
||||
birthtime: secret.properties.createdOn,
|
||||
expireTime: secret.properties.expiresOn
|
||||
};
|
||||
},
|
||||
async clear() {
|
||||
const secrets = getKeyVaultClient().listPropertiesOfSecrets().byPage({ maxPageSize: opts.pageSize || 25 });
|
||||
for await (const page of secrets) {
|
||||
const deletionPromises = page.map(async (secret) => {
|
||||
const poller = await getKeyVaultClient().beginDeleteSecret(
|
||||
secret.name
|
||||
);
|
||||
await poller.pollUntilDone();
|
||||
await getKeyVaultClient().purgeDeletedSecret(secret.name);
|
||||
});
|
||||
await Promise.all(deletionPromises);
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
||||
const base64Map = {
|
||||
"=": "-e-",
|
||||
"+": "-p-",
|
||||
"/": "-s-"
|
||||
};
|
||||
function encode(value) {
|
||||
let encoded = Buffer.from(value).toString("base64");
|
||||
for (const key in base64Map) {
|
||||
encoded = encoded.replace(
|
||||
new RegExp(key.replace(/[$()*+.?[\\\]^{|}]/g, "\\$&"), "g"),
|
||||
base64Map[key]
|
||||
);
|
||||
}
|
||||
return encoded;
|
||||
}
|
||||
function decode(value) {
|
||||
let decoded = value;
|
||||
const search = new RegExp(Object.values(base64Map).join("|"), "g");
|
||||
decoded = decoded.replace(search, (match) => {
|
||||
return Object.keys(base64Map).find((key) => base64Map[key] === match);
|
||||
});
|
||||
return Buffer.from(decoded, "base64").toString();
|
||||
}
|
141
node_modules/unstorage/drivers/azure-storage-blob.cjs
generated
vendored
Normal file
141
node_modules/unstorage/drivers/azure-storage-blob.cjs
generated
vendored
Normal file
@@ -0,0 +1,141 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _storageBlob = require("@azure/storage-blob");
|
||||
var _identity = require("@azure/identity");
|
||||
const DRIVER_NAME = "azure-storage-blob";
|
||||
module.exports = (0, _utils.defineDriver)(opts => {
|
||||
let containerClient;
|
||||
const endpointSuffix = opts.endpointSuffix || ".blob.core.windows.net";
|
||||
const getContainerClient = () => {
|
||||
if (containerClient) {
|
||||
return containerClient;
|
||||
}
|
||||
if (!opts.connectionString && !opts.sasUrl && !opts.accountName) {
|
||||
throw (0, _utils.createError)(DRIVER_NAME, "missing accountName");
|
||||
}
|
||||
let serviceClient;
|
||||
if (opts.accountKey) {
|
||||
const credential = new _storageBlob.StorageSharedKeyCredential(opts.accountName, opts.accountKey);
|
||||
serviceClient = new _storageBlob.BlobServiceClient(`https://${opts.accountName}${endpointSuffix}`, credential);
|
||||
} else if (opts.sasUrl) {
|
||||
if (opts.containerName && opts.sasUrl.includes(`${opts.containerName}?`)) {
|
||||
containerClient = new _storageBlob.ContainerClient(`${opts.sasUrl}`);
|
||||
return containerClient;
|
||||
}
|
||||
serviceClient = new _storageBlob.BlobServiceClient(opts.sasUrl);
|
||||
} else if (opts.sasKey) {
|
||||
if (opts.containerName) {
|
||||
containerClient = new _storageBlob.ContainerClient(`https://${opts.accountName}${endpointSuffix}/${opts.containerName}?${opts.sasKey}`);
|
||||
return containerClient;
|
||||
}
|
||||
serviceClient = new _storageBlob.BlobServiceClient(`https://${opts.accountName}${endpointSuffix}?${opts.sasKey}`);
|
||||
} else if (opts.connectionString) {
|
||||
serviceClient = _storageBlob.BlobServiceClient.fromConnectionString(opts.connectionString);
|
||||
} else {
|
||||
const credential = new _identity.DefaultAzureCredential();
|
||||
serviceClient = new _storageBlob.BlobServiceClient(`https://${opts.accountName}${endpointSuffix}`, credential);
|
||||
}
|
||||
containerClient = serviceClient.getContainerClient(opts.containerName || "unstorage");
|
||||
containerClient.createIfNotExists();
|
||||
return containerClient;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: getContainerClient,
|
||||
async hasItem(key) {
|
||||
return await getContainerClient().getBlockBlobClient(key).exists();
|
||||
},
|
||||
async getItem(key) {
|
||||
try {
|
||||
const blob = await getContainerClient().getBlockBlobClient(key).download();
|
||||
if (isBrowser) {
|
||||
return blob.blobBody ? await blobToString(await blob.blobBody) : null;
|
||||
}
|
||||
return blob.readableStreamBody ? (await streamToBuffer(blob.readableStreamBody)).toString() : null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
async getItemRaw(key) {
|
||||
try {
|
||||
const blob = await getContainerClient().getBlockBlobClient(key).download();
|
||||
if (isBrowser) {
|
||||
return blob.blobBody ? await blobToString(await blob.blobBody) : null;
|
||||
}
|
||||
return blob.readableStreamBody ? await streamToBuffer(blob.readableStreamBody) : null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
async setItem(key, value) {
|
||||
await getContainerClient().getBlockBlobClient(key).upload(value, Buffer.byteLength(value));
|
||||
},
|
||||
async setItemRaw(key, value) {
|
||||
await getContainerClient().getBlockBlobClient(key).upload(value, Buffer.byteLength(value));
|
||||
},
|
||||
async removeItem(key) {
|
||||
await getContainerClient().getBlockBlobClient(key).deleteIfExists({
|
||||
deleteSnapshots: "include"
|
||||
});
|
||||
},
|
||||
async getKeys() {
|
||||
const iterator = getContainerClient().listBlobsFlat().byPage({
|
||||
maxPageSize: 1e3
|
||||
});
|
||||
const keys = [];
|
||||
for await (const page of iterator) {
|
||||
const pageKeys = page.segment.blobItems.map(blob => blob.name);
|
||||
keys.push(...pageKeys);
|
||||
}
|
||||
return keys;
|
||||
},
|
||||
async getMeta(key) {
|
||||
const blobProperties = await getContainerClient().getBlockBlobClient(key).getProperties();
|
||||
return {
|
||||
mtime: blobProperties.lastModified,
|
||||
atime: blobProperties.lastAccessed,
|
||||
cr: blobProperties.createdOn,
|
||||
...blobProperties.metadata
|
||||
};
|
||||
},
|
||||
async clear() {
|
||||
const iterator = getContainerClient().listBlobsFlat().byPage({
|
||||
maxPageSize: 1e3
|
||||
});
|
||||
for await (const page of iterator) {
|
||||
await Promise.all(page.segment.blobItems.map(async blob => await getContainerClient().deleteBlob(blob.name, {
|
||||
deleteSnapshots: "include"
|
||||
})));
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
||||
const isBrowser = typeof window !== "undefined";
|
||||
async function streamToBuffer(readableStream) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const chunks = [];
|
||||
readableStream.on("data", data => {
|
||||
chunks.push(data instanceof Buffer ? data : Buffer.from(data));
|
||||
});
|
||||
readableStream.on("end", () => {
|
||||
resolve(Buffer.concat(chunks));
|
||||
});
|
||||
readableStream.on("error", reject);
|
||||
});
|
||||
}
|
||||
async function blobToString(blob) {
|
||||
const fileReader = new FileReader();
|
||||
return new Promise((resolve, reject) => {
|
||||
fileReader.onloadend = ev => {
|
||||
resolve(ev.target?.result);
|
||||
};
|
||||
fileReader.onerror = reject;
|
||||
fileReader.readAsText(blob);
|
||||
});
|
||||
}
|
35
node_modules/unstorage/drivers/azure-storage-blob.d.ts
generated
vendored
Normal file
35
node_modules/unstorage/drivers/azure-storage-blob.d.ts
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
import { ContainerClient } from "@azure/storage-blob";
|
||||
export interface AzureStorageBlobOptions {
|
||||
/**
|
||||
* The name of the Azure Storage account.
|
||||
*/
|
||||
accountName?: string;
|
||||
/**
|
||||
* The name of the storage container. All entities will be stored in the same container. Will be created if it doesn't exist.
|
||||
* @default "unstorage"
|
||||
*/
|
||||
containerName?: string;
|
||||
/**
|
||||
* The account key. If provided, the SAS key will be ignored. Only available in Node.js runtime.
|
||||
*/
|
||||
accountKey?: string;
|
||||
/**
|
||||
* The SAS token. If provided, the account key will be ignored. Include at least read, list and write permissions to be able to list keys.
|
||||
*/
|
||||
sasKey?: string;
|
||||
/**
|
||||
* The SAS URL. If provided, the account key, SAS key and container name will be ignored.
|
||||
*/
|
||||
sasUrl?: string;
|
||||
/**
|
||||
* The connection string. If provided, the account key and SAS key will be ignored. Only available in Node.js runtime.
|
||||
*/
|
||||
connectionString?: string;
|
||||
/**
|
||||
* Storage account endpoint suffix. Need to be changed for Microsoft Azure operated by 21Vianet, Azure Government or Azurite.
|
||||
* @default ".blob.core.windows.net"
|
||||
*/
|
||||
endpointSuffix?: string;
|
||||
}
|
||||
declare const _default: (opts: AzureStorageBlobOptions) => import("..").Driver<AzureStorageBlobOptions, ContainerClient>;
|
||||
export default _default;
|
154
node_modules/unstorage/drivers/azure-storage-blob.mjs
generated
vendored
Normal file
154
node_modules/unstorage/drivers/azure-storage-blob.mjs
generated
vendored
Normal file
@@ -0,0 +1,154 @@
|
||||
import { createError, defineDriver } from "./utils/index.mjs";
|
||||
import {
|
||||
BlobServiceClient,
|
||||
ContainerClient,
|
||||
StorageSharedKeyCredential
|
||||
} from "@azure/storage-blob";
|
||||
import { DefaultAzureCredential } from "@azure/identity";
|
||||
const DRIVER_NAME = "azure-storage-blob";
|
||||
export default defineDriver((opts) => {
|
||||
let containerClient;
|
||||
const endpointSuffix = opts.endpointSuffix || ".blob.core.windows.net";
|
||||
const getContainerClient = () => {
|
||||
if (containerClient) {
|
||||
return containerClient;
|
||||
}
|
||||
if (!opts.connectionString && !opts.sasUrl && !opts.accountName) {
|
||||
throw createError(DRIVER_NAME, "missing accountName");
|
||||
}
|
||||
let serviceClient;
|
||||
if (opts.accountKey) {
|
||||
const credential = new StorageSharedKeyCredential(
|
||||
opts.accountName,
|
||||
opts.accountKey
|
||||
);
|
||||
serviceClient = new BlobServiceClient(
|
||||
`https://${opts.accountName}${endpointSuffix}`,
|
||||
credential
|
||||
);
|
||||
} else if (opts.sasUrl) {
|
||||
if (opts.containerName && opts.sasUrl.includes(`${opts.containerName}?`)) {
|
||||
containerClient = new ContainerClient(`${opts.sasUrl}`);
|
||||
return containerClient;
|
||||
}
|
||||
serviceClient = new BlobServiceClient(opts.sasUrl);
|
||||
} else if (opts.sasKey) {
|
||||
if (opts.containerName) {
|
||||
containerClient = new ContainerClient(
|
||||
`https://${opts.accountName}${endpointSuffix}/${opts.containerName}?${opts.sasKey}`
|
||||
);
|
||||
return containerClient;
|
||||
}
|
||||
serviceClient = new BlobServiceClient(
|
||||
`https://${opts.accountName}${endpointSuffix}?${opts.sasKey}`
|
||||
);
|
||||
} else if (opts.connectionString) {
|
||||
serviceClient = BlobServiceClient.fromConnectionString(
|
||||
opts.connectionString
|
||||
);
|
||||
} else {
|
||||
const credential = new DefaultAzureCredential();
|
||||
serviceClient = new BlobServiceClient(
|
||||
`https://${opts.accountName}${endpointSuffix}`,
|
||||
credential
|
||||
);
|
||||
}
|
||||
containerClient = serviceClient.getContainerClient(
|
||||
opts.containerName || "unstorage"
|
||||
);
|
||||
containerClient.createIfNotExists();
|
||||
return containerClient;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: getContainerClient,
|
||||
async hasItem(key) {
|
||||
return await getContainerClient().getBlockBlobClient(key).exists();
|
||||
},
|
||||
async getItem(key) {
|
||||
try {
|
||||
const blob = await getContainerClient().getBlockBlobClient(key).download();
|
||||
if (isBrowser) {
|
||||
return blob.blobBody ? await blobToString(await blob.blobBody) : null;
|
||||
}
|
||||
return blob.readableStreamBody ? (await streamToBuffer(blob.readableStreamBody)).toString() : null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
async getItemRaw(key) {
|
||||
try {
|
||||
const blob = await getContainerClient().getBlockBlobClient(key).download();
|
||||
if (isBrowser) {
|
||||
return blob.blobBody ? await blobToString(await blob.blobBody) : null;
|
||||
}
|
||||
return blob.readableStreamBody ? await streamToBuffer(blob.readableStreamBody) : null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
async setItem(key, value) {
|
||||
await getContainerClient().getBlockBlobClient(key).upload(value, Buffer.byteLength(value));
|
||||
},
|
||||
async setItemRaw(key, value) {
|
||||
await getContainerClient().getBlockBlobClient(key).upload(value, Buffer.byteLength(value));
|
||||
},
|
||||
async removeItem(key) {
|
||||
await getContainerClient().getBlockBlobClient(key).deleteIfExists({ deleteSnapshots: "include" });
|
||||
},
|
||||
async getKeys() {
|
||||
const iterator = getContainerClient().listBlobsFlat().byPage({ maxPageSize: 1e3 });
|
||||
const keys = [];
|
||||
for await (const page of iterator) {
|
||||
const pageKeys = page.segment.blobItems.map((blob) => blob.name);
|
||||
keys.push(...pageKeys);
|
||||
}
|
||||
return keys;
|
||||
},
|
||||
async getMeta(key) {
|
||||
const blobProperties = await getContainerClient().getBlockBlobClient(key).getProperties();
|
||||
return {
|
||||
mtime: blobProperties.lastModified,
|
||||
atime: blobProperties.lastAccessed,
|
||||
cr: blobProperties.createdOn,
|
||||
...blobProperties.metadata
|
||||
};
|
||||
},
|
||||
async clear() {
|
||||
const iterator = getContainerClient().listBlobsFlat().byPage({ maxPageSize: 1e3 });
|
||||
for await (const page of iterator) {
|
||||
await Promise.all(
|
||||
page.segment.blobItems.map(
|
||||
async (blob) => await getContainerClient().deleteBlob(blob.name, {
|
||||
deleteSnapshots: "include"
|
||||
})
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
||||
const isBrowser = typeof window !== "undefined";
|
||||
async function streamToBuffer(readableStream) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const chunks = [];
|
||||
readableStream.on("data", (data) => {
|
||||
chunks.push(data instanceof Buffer ? data : Buffer.from(data));
|
||||
});
|
||||
readableStream.on("end", () => {
|
||||
resolve(Buffer.concat(chunks));
|
||||
});
|
||||
readableStream.on("error", reject);
|
||||
});
|
||||
}
|
||||
async function blobToString(blob) {
|
||||
const fileReader = new FileReader();
|
||||
return new Promise((resolve, reject) => {
|
||||
fileReader.onloadend = (ev) => {
|
||||
resolve(ev.target?.result);
|
||||
};
|
||||
fileReader.onerror = reject;
|
||||
fileReader.readAsText(blob);
|
||||
});
|
||||
}
|
108
node_modules/unstorage/drivers/azure-storage-table.cjs
generated
vendored
Normal file
108
node_modules/unstorage/drivers/azure-storage-table.cjs
generated
vendored
Normal file
@@ -0,0 +1,108 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _dataTables = require("@azure/data-tables");
|
||||
var _identity = require("@azure/identity");
|
||||
const DRIVER_NAME = "azure-storage-table";
|
||||
module.exports = (0, _utils.defineDriver)(opts => {
|
||||
const {
|
||||
accountName = null,
|
||||
tableName = "unstorage",
|
||||
partitionKey = "unstorage",
|
||||
accountKey = null,
|
||||
sasKey = null,
|
||||
connectionString = null,
|
||||
pageSize = 1e3
|
||||
} = opts;
|
||||
let client;
|
||||
const getClient = () => {
|
||||
if (client) {
|
||||
return client;
|
||||
}
|
||||
if (!accountName) {
|
||||
throw (0, _utils.createRequiredError)(DRIVER_NAME, "accountName");
|
||||
}
|
||||
if (pageSize > 1e3) {
|
||||
throw (0, _utils.createError)(DRIVER_NAME, "`pageSize` exceeds the maximum allowed value of `1000`");
|
||||
}
|
||||
if (accountKey) {
|
||||
const credential = new _dataTables.AzureNamedKeyCredential(accountName, accountKey);
|
||||
client = new _dataTables.TableClient(`https://${accountName}.table.core.windows.net`, tableName, credential);
|
||||
} else if (sasKey) {
|
||||
const credential = new _dataTables.AzureSASCredential(sasKey);
|
||||
client = new _dataTables.TableClient(`https://${accountName}.table.core.windows.net`, tableName, credential);
|
||||
} else if (connectionString) {
|
||||
client = _dataTables.TableClient.fromConnectionString(connectionString, tableName);
|
||||
} else {
|
||||
const credential = new _identity.DefaultAzureCredential();
|
||||
client = new _dataTables.TableClient(`https://${accountName}.table.core.windows.net`, tableName, credential);
|
||||
}
|
||||
return client;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: getClient,
|
||||
async hasItem(key) {
|
||||
try {
|
||||
await getClient().getEntity(partitionKey, key);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
async getItem(key) {
|
||||
try {
|
||||
const entity = await getClient().getEntity(partitionKey, key);
|
||||
return entity.unstorageValue;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
async setItem(key, value) {
|
||||
const entity = {
|
||||
partitionKey,
|
||||
rowKey: key,
|
||||
unstorageValue: value
|
||||
};
|
||||
await getClient().upsertEntity(entity, "Replace");
|
||||
},
|
||||
async removeItem(key) {
|
||||
await getClient().deleteEntity(partitionKey, key);
|
||||
},
|
||||
async getKeys() {
|
||||
const iterator = getClient().listEntities().byPage({
|
||||
maxPageSize: pageSize
|
||||
});
|
||||
const keys = [];
|
||||
for await (const page of iterator) {
|
||||
const pageKeys = page.map(entity => entity.rowKey).filter(Boolean);
|
||||
keys.push(...pageKeys);
|
||||
}
|
||||
return keys;
|
||||
},
|
||||
async getMeta(key) {
|
||||
const entity = await getClient().getEntity(partitionKey, key);
|
||||
return {
|
||||
mtime: entity.timestamp ? new Date(entity.timestamp) : void 0,
|
||||
etag: entity.etag
|
||||
};
|
||||
},
|
||||
async clear() {
|
||||
const iterator = getClient().listEntities().byPage({
|
||||
maxPageSize: pageSize
|
||||
});
|
||||
for await (const page of iterator) {
|
||||
await Promise.all(page.map(async entity => {
|
||||
if (entity.partitionKey && entity.rowKey) {
|
||||
await getClient().deleteEntity(entity.partitionKey, entity.rowKey);
|
||||
}
|
||||
}));
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
36
node_modules/unstorage/drivers/azure-storage-table.d.ts
generated
vendored
Normal file
36
node_modules/unstorage/drivers/azure-storage-table.d.ts
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
import { TableClient } from "@azure/data-tables";
|
||||
export interface AzureStorageTableOptions {
|
||||
/**
|
||||
* The name of the Azure Storage account.
|
||||
*/
|
||||
accountName: string;
|
||||
/**
|
||||
* The name of the table. All entities will be stored in the same table.
|
||||
* @default 'unstorage'
|
||||
*/
|
||||
tableName?: string;
|
||||
/**
|
||||
* The partition key. All entities will be stored in the same partition.
|
||||
* @default 'unstorage'
|
||||
*/
|
||||
partitionKey?: string;
|
||||
/**
|
||||
* The account key. If provided, the SAS key will be ignored. Only available in Node.js runtime.
|
||||
*/
|
||||
accountKey?: string;
|
||||
/**
|
||||
* The SAS key. If provided, the account key will be ignored.
|
||||
*/
|
||||
sasKey?: string;
|
||||
/**
|
||||
* The connection string. If provided, the account key and SAS key will be ignored. Only available in Node.js runtime.
|
||||
*/
|
||||
connectionString?: string;
|
||||
/**
|
||||
* The number of entries to retrive per request. Impacts getKeys() and clear() performance. Maximum value is 1000.
|
||||
* @default 1000
|
||||
*/
|
||||
pageSize?: number;
|
||||
}
|
||||
declare const _default: (opts: AzureStorageTableOptions) => import("..").Driver<AzureStorageTableOptions, TableClient>;
|
||||
export default _default;
|
122
node_modules/unstorage/drivers/azure-storage-table.mjs
generated
vendored
Normal file
122
node_modules/unstorage/drivers/azure-storage-table.mjs
generated
vendored
Normal file
@@ -0,0 +1,122 @@
|
||||
import { createError, createRequiredError, defineDriver } from "./utils/index.mjs";
|
||||
import {
|
||||
TableClient,
|
||||
AzureNamedKeyCredential,
|
||||
AzureSASCredential
|
||||
} from "@azure/data-tables";
|
||||
import { DefaultAzureCredential } from "@azure/identity";
|
||||
const DRIVER_NAME = "azure-storage-table";
|
||||
export default defineDriver((opts) => {
|
||||
const {
|
||||
accountName = null,
|
||||
tableName = "unstorage",
|
||||
partitionKey = "unstorage",
|
||||
accountKey = null,
|
||||
sasKey = null,
|
||||
connectionString = null,
|
||||
pageSize = 1e3
|
||||
} = opts;
|
||||
let client;
|
||||
const getClient = () => {
|
||||
if (client) {
|
||||
return client;
|
||||
}
|
||||
if (!accountName) {
|
||||
throw createRequiredError(DRIVER_NAME, "accountName");
|
||||
}
|
||||
if (pageSize > 1e3) {
|
||||
throw createError(
|
||||
DRIVER_NAME,
|
||||
"`pageSize` exceeds the maximum allowed value of `1000`"
|
||||
);
|
||||
}
|
||||
if (accountKey) {
|
||||
const credential = new AzureNamedKeyCredential(accountName, accountKey);
|
||||
client = new TableClient(
|
||||
`https://${accountName}.table.core.windows.net`,
|
||||
tableName,
|
||||
credential
|
||||
);
|
||||
} else if (sasKey) {
|
||||
const credential = new AzureSASCredential(sasKey);
|
||||
client = new TableClient(
|
||||
`https://${accountName}.table.core.windows.net`,
|
||||
tableName,
|
||||
credential
|
||||
);
|
||||
} else if (connectionString) {
|
||||
client = TableClient.fromConnectionString(connectionString, tableName);
|
||||
} else {
|
||||
const credential = new DefaultAzureCredential();
|
||||
client = new TableClient(
|
||||
`https://${accountName}.table.core.windows.net`,
|
||||
tableName,
|
||||
credential
|
||||
);
|
||||
}
|
||||
return client;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: getClient,
|
||||
async hasItem(key) {
|
||||
try {
|
||||
await getClient().getEntity(partitionKey, key);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
async getItem(key) {
|
||||
try {
|
||||
const entity = await getClient().getEntity(partitionKey, key);
|
||||
return entity.unstorageValue;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
async setItem(key, value) {
|
||||
const entity = {
|
||||
partitionKey,
|
||||
rowKey: key,
|
||||
unstorageValue: value
|
||||
};
|
||||
await getClient().upsertEntity(entity, "Replace");
|
||||
},
|
||||
async removeItem(key) {
|
||||
await getClient().deleteEntity(partitionKey, key);
|
||||
},
|
||||
async getKeys() {
|
||||
const iterator = getClient().listEntities().byPage({ maxPageSize: pageSize });
|
||||
const keys = [];
|
||||
for await (const page of iterator) {
|
||||
const pageKeys = page.map((entity) => entity.rowKey).filter(Boolean);
|
||||
keys.push(...pageKeys);
|
||||
}
|
||||
return keys;
|
||||
},
|
||||
async getMeta(key) {
|
||||
const entity = await getClient().getEntity(partitionKey, key);
|
||||
return {
|
||||
mtime: entity.timestamp ? new Date(entity.timestamp) : void 0,
|
||||
etag: entity.etag
|
||||
};
|
||||
},
|
||||
async clear() {
|
||||
const iterator = getClient().listEntities().byPage({ maxPageSize: pageSize });
|
||||
for await (const page of iterator) {
|
||||
await Promise.all(
|
||||
page.map(async (entity) => {
|
||||
if (entity.partitionKey && entity.rowKey) {
|
||||
await getClient().deleteEntity(
|
||||
entity.partitionKey,
|
||||
entity.rowKey
|
||||
);
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
63
node_modules/unstorage/drivers/capacitor-preferences.cjs
generated
vendored
Normal file
63
node_modules/unstorage/drivers/capacitor-preferences.cjs
generated
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _preferences = require("@capacitor/preferences");
|
||||
var _utils = require("./utils/index.cjs");
|
||||
const DRIVER_NAME = "capacitor-preferences";
|
||||
module.exports = (0, _utils.defineDriver)(opts => {
|
||||
const base = (0, _utils.normalizeKey)(opts?.base || "");
|
||||
const resolveKey = key => (0, _utils.joinKeys)(base, key);
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: () => _preferences.Preferences,
|
||||
hasItem(key) {
|
||||
return _preferences.Preferences.keys().then(r => r.keys.includes(resolveKey(key)));
|
||||
},
|
||||
getItem(key) {
|
||||
return _preferences.Preferences.get({
|
||||
key: resolveKey(key)
|
||||
}).then(r => r.value);
|
||||
},
|
||||
getItemRaw(key) {
|
||||
return _preferences.Preferences.get({
|
||||
key: resolveKey(key)
|
||||
}).then(r => r.value);
|
||||
},
|
||||
setItem(key, value) {
|
||||
return _preferences.Preferences.set({
|
||||
key: resolveKey(key),
|
||||
value
|
||||
});
|
||||
},
|
||||
setItemRaw(key, value) {
|
||||
return _preferences.Preferences.set({
|
||||
key: resolveKey(key),
|
||||
value
|
||||
});
|
||||
},
|
||||
removeItem(key) {
|
||||
return _preferences.Preferences.remove({
|
||||
key: resolveKey(key)
|
||||
});
|
||||
},
|
||||
async getKeys() {
|
||||
const {
|
||||
keys
|
||||
} = await _preferences.Preferences.keys();
|
||||
return keys.map(key => key.slice(base.length));
|
||||
},
|
||||
async clear(prefix) {
|
||||
const {
|
||||
keys
|
||||
} = await _preferences.Preferences.keys();
|
||||
const _prefix = resolveKey(prefix || "");
|
||||
await Promise.all(keys.filter(key => key.startsWith(_prefix)).map(key => _preferences.Preferences.remove({
|
||||
key
|
||||
})));
|
||||
}
|
||||
};
|
||||
});
|
5
node_modules/unstorage/drivers/capacitor-preferences.d.ts
generated
vendored
Normal file
5
node_modules/unstorage/drivers/capacitor-preferences.d.ts
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
export interface CapacitorPreferencesOptions {
|
||||
base?: string;
|
||||
}
|
||||
declare const _default: (opts: CapacitorPreferencesOptions) => import("..").Driver<CapacitorPreferencesOptions, import("@capacitor/preferences").PreferencesPlugin>;
|
||||
export default _default;
|
43
node_modules/unstorage/drivers/capacitor-preferences.mjs
generated
vendored
Normal file
43
node_modules/unstorage/drivers/capacitor-preferences.mjs
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
import { Preferences } from "@capacitor/preferences";
|
||||
import { defineDriver, joinKeys, normalizeKey } from "./utils/index.mjs";
|
||||
const DRIVER_NAME = "capacitor-preferences";
|
||||
export default defineDriver(
|
||||
(opts) => {
|
||||
const base = normalizeKey(opts?.base || "");
|
||||
const resolveKey = (key) => joinKeys(base, key);
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: () => Preferences,
|
||||
hasItem(key) {
|
||||
return Preferences.keys().then((r) => r.keys.includes(resolveKey(key)));
|
||||
},
|
||||
getItem(key) {
|
||||
return Preferences.get({ key: resolveKey(key) }).then((r) => r.value);
|
||||
},
|
||||
getItemRaw(key) {
|
||||
return Preferences.get({ key: resolveKey(key) }).then((r) => r.value);
|
||||
},
|
||||
setItem(key, value) {
|
||||
return Preferences.set({ key: resolveKey(key), value });
|
||||
},
|
||||
setItemRaw(key, value) {
|
||||
return Preferences.set({ key: resolveKey(key), value });
|
||||
},
|
||||
removeItem(key) {
|
||||
return Preferences.remove({ key: resolveKey(key) });
|
||||
},
|
||||
async getKeys() {
|
||||
const { keys } = await Preferences.keys();
|
||||
return keys.map((key) => key.slice(base.length));
|
||||
},
|
||||
async clear(prefix) {
|
||||
const { keys } = await Preferences.keys();
|
||||
const _prefix = resolveKey(prefix || "");
|
||||
await Promise.all(
|
||||
keys.filter((key) => key.startsWith(_prefix)).map((key) => Preferences.remove({ key }))
|
||||
);
|
||||
}
|
||||
};
|
||||
}
|
||||
);
|
63
node_modules/unstorage/drivers/cloudflare-kv-binding.cjs
generated
vendored
Normal file
63
node_modules/unstorage/drivers/cloudflare-kv-binding.cjs
generated
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _cloudflare = require("./utils/cloudflare.cjs");
|
||||
const DRIVER_NAME = "cloudflare-kv-binding";
|
||||
module.exports = (0, _utils.defineDriver)(opts => {
|
||||
const r = (key = "") => opts.base ? (0, _utils.joinKeys)(opts.base, key) : key;
|
||||
async function getKeys(base = "") {
|
||||
base = r(base);
|
||||
const binding = (0, _cloudflare.getKVBinding)(opts.binding);
|
||||
const keys = [];
|
||||
let cursor = void 0;
|
||||
do {
|
||||
const kvList = await binding.list({
|
||||
prefix: base || void 0,
|
||||
cursor
|
||||
});
|
||||
keys.push(...kvList.keys);
|
||||
cursor = kvList.list_complete ? void 0 : kvList.cursor;
|
||||
} while (cursor);
|
||||
return keys.map(key => key.name);
|
||||
}
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: () => (0, _cloudflare.getKVBinding)(opts.binding),
|
||||
async hasItem(key) {
|
||||
key = r(key);
|
||||
const binding = (0, _cloudflare.getKVBinding)(opts.binding);
|
||||
return (await binding.get(key)) !== null;
|
||||
},
|
||||
getItem(key) {
|
||||
key = r(key);
|
||||
const binding = (0, _cloudflare.getKVBinding)(opts.binding);
|
||||
return binding.get(key);
|
||||
},
|
||||
setItem(key, value, topts) {
|
||||
key = r(key);
|
||||
const binding = (0, _cloudflare.getKVBinding)(opts.binding);
|
||||
return binding.put(key, value, topts ? {
|
||||
expirationTtl: topts?.ttl ? Math.max(topts.ttl, opts.minTTL ?? 60) : void 0,
|
||||
...topts
|
||||
} : void 0);
|
||||
},
|
||||
removeItem(key) {
|
||||
key = r(key);
|
||||
const binding = (0, _cloudflare.getKVBinding)(opts.binding);
|
||||
return binding.delete(key);
|
||||
},
|
||||
getKeys(base) {
|
||||
return getKeys(base).then(keys => keys.map(key => opts.base ? key.slice(opts.base.length) : key));
|
||||
},
|
||||
async clear(base) {
|
||||
const binding = (0, _cloudflare.getKVBinding)(opts.binding);
|
||||
const keys = await getKeys(base);
|
||||
await Promise.all(keys.map(key => binding.delete(key)));
|
||||
}
|
||||
};
|
||||
});
|
12
node_modules/unstorage/drivers/cloudflare-kv-binding.d.ts
generated
vendored
Normal file
12
node_modules/unstorage/drivers/cloudflare-kv-binding.d.ts
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
export interface KVOptions {
|
||||
binding?: string | KVNamespace;
|
||||
/** Adds prefix to all stored keys */
|
||||
base?: string;
|
||||
/**
|
||||
* The minimum time-to-live (ttl) for setItem in seconds.
|
||||
* The default is 60 seconds as per Cloudflare's [documentation](https://developers.cloudflare.com/kv/api/write-key-value-pairs/).
|
||||
*/
|
||||
minTTL?: number;
|
||||
}
|
||||
declare const _default: (opts: KVOptions) => import("..").Driver<KVOptions, KVNamespace<string>>;
|
||||
export default _default;
|
60
node_modules/unstorage/drivers/cloudflare-kv-binding.mjs
generated
vendored
Normal file
60
node_modules/unstorage/drivers/cloudflare-kv-binding.mjs
generated
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
import { defineDriver, joinKeys } from "./utils/index.mjs";
|
||||
import { getKVBinding } from "./utils/cloudflare.mjs";
|
||||
const DRIVER_NAME = "cloudflare-kv-binding";
|
||||
export default defineDriver((opts) => {
|
||||
const r = (key = "") => opts.base ? joinKeys(opts.base, key) : key;
|
||||
async function getKeys(base = "") {
|
||||
base = r(base);
|
||||
const binding = getKVBinding(opts.binding);
|
||||
const keys = [];
|
||||
let cursor = void 0;
|
||||
do {
|
||||
const kvList = await binding.list({ prefix: base || void 0, cursor });
|
||||
keys.push(...kvList.keys);
|
||||
cursor = kvList.list_complete ? void 0 : kvList.cursor;
|
||||
} while (cursor);
|
||||
return keys.map((key) => key.name);
|
||||
}
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: () => getKVBinding(opts.binding),
|
||||
async hasItem(key) {
|
||||
key = r(key);
|
||||
const binding = getKVBinding(opts.binding);
|
||||
return await binding.get(key) !== null;
|
||||
},
|
||||
getItem(key) {
|
||||
key = r(key);
|
||||
const binding = getKVBinding(opts.binding);
|
||||
return binding.get(key);
|
||||
},
|
||||
setItem(key, value, topts) {
|
||||
key = r(key);
|
||||
const binding = getKVBinding(opts.binding);
|
||||
return binding.put(
|
||||
key,
|
||||
value,
|
||||
topts ? {
|
||||
expirationTtl: topts?.ttl ? Math.max(topts.ttl, opts.minTTL ?? 60) : void 0,
|
||||
...topts
|
||||
} : void 0
|
||||
);
|
||||
},
|
||||
removeItem(key) {
|
||||
key = r(key);
|
||||
const binding = getKVBinding(opts.binding);
|
||||
return binding.delete(key);
|
||||
},
|
||||
getKeys(base) {
|
||||
return getKeys(base).then(
|
||||
(keys) => keys.map((key) => opts.base ? key.slice(opts.base.length) : key)
|
||||
);
|
||||
},
|
||||
async clear(base) {
|
||||
const binding = getKVBinding(opts.binding);
|
||||
const keys = await getKeys(base);
|
||||
await Promise.all(keys.map((key) => binding.delete(key)));
|
||||
}
|
||||
};
|
||||
});
|
138
node_modules/unstorage/drivers/cloudflare-kv-http.cjs
generated
vendored
Normal file
138
node_modules/unstorage/drivers/cloudflare-kv-http.cjs
generated
vendored
Normal file
@@ -0,0 +1,138 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _ofetch = require("ofetch");
|
||||
var _utils = require("./utils/index.cjs");
|
||||
const DRIVER_NAME = "cloudflare-kv-http";
|
||||
module.exports = (0, _utils.defineDriver)(opts => {
|
||||
if (!opts.accountId) {
|
||||
throw (0, _utils.createRequiredError)(DRIVER_NAME, "accountId");
|
||||
}
|
||||
if (!opts.namespaceId) {
|
||||
throw (0, _utils.createRequiredError)(DRIVER_NAME, "namespaceId");
|
||||
}
|
||||
let headers;
|
||||
if ("apiToken" in opts) {
|
||||
headers = {
|
||||
Authorization: `Bearer ${opts.apiToken}`
|
||||
};
|
||||
} else if ("userServiceKey" in opts) {
|
||||
headers = {
|
||||
"X-Auth-User-Service-Key": opts.userServiceKey
|
||||
};
|
||||
} else if (opts.email && opts.apiKey) {
|
||||
headers = {
|
||||
"X-Auth-Email": opts.email,
|
||||
"X-Auth-Key": opts.apiKey
|
||||
};
|
||||
} else {
|
||||
throw (0, _utils.createError)(DRIVER_NAME, "One of the `apiToken`, `userServiceKey`, or a combination of `email` and `apiKey` is required.");
|
||||
}
|
||||
const apiURL = opts.apiURL || "https://api.cloudflare.com";
|
||||
const baseURL = `${apiURL}/client/v4/accounts/${opts.accountId}/storage/kv/namespaces/${opts.namespaceId}`;
|
||||
const kvFetch = _ofetch.$fetch.create({
|
||||
baseURL,
|
||||
headers
|
||||
});
|
||||
const r = (key = "") => opts.base ? (0, _utils.joinKeys)(opts.base, key) : key;
|
||||
const hasItem = async key => {
|
||||
try {
|
||||
const res = await kvFetch(`/metadata/${r(key)}`);
|
||||
return res?.success === true;
|
||||
} catch (err) {
|
||||
if (!err?.response) {
|
||||
throw err;
|
||||
}
|
||||
if (err?.response?.status === 404) {
|
||||
return false;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
const getItem = async key => {
|
||||
try {
|
||||
return await kvFetch(`/values/${r(key)}`).then(r2 => r2.text());
|
||||
} catch (err) {
|
||||
if (!err?.response) {
|
||||
throw err;
|
||||
}
|
||||
if (err?.response?.status === 404) {
|
||||
return null;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
const setItem = async (key, value, topts) => {
|
||||
return await kvFetch(`/values/${r(key)}`, {
|
||||
method: "PUT",
|
||||
body: value,
|
||||
query: topts?.ttl ? {
|
||||
expiration_ttl: Math.max(topts?.ttl, opts.minTTL || 60)
|
||||
} : void 0
|
||||
});
|
||||
};
|
||||
const removeItem = async key => {
|
||||
return await kvFetch(`/values/${r(key)}`, {
|
||||
method: "DELETE"
|
||||
});
|
||||
};
|
||||
const getKeys = async base => {
|
||||
const keys = [];
|
||||
const params = {};
|
||||
if (base || opts.base) {
|
||||
params.prefix = r(base);
|
||||
}
|
||||
const firstPage = await kvFetch("/keys", {
|
||||
params
|
||||
});
|
||||
for (const item of firstPage.result) {
|
||||
keys.push(item.name);
|
||||
}
|
||||
const cursor = firstPage.result_info.cursor;
|
||||
if (cursor) {
|
||||
params.cursor = cursor;
|
||||
}
|
||||
while (params.cursor) {
|
||||
const pageResult = await kvFetch("/keys", {
|
||||
params
|
||||
});
|
||||
for (const item of pageResult.result) {
|
||||
keys.push(item.name);
|
||||
}
|
||||
const pageCursor = pageResult.result_info.cursor;
|
||||
params.cursor = pageCursor ? pageCursor : void 0;
|
||||
}
|
||||
return keys;
|
||||
};
|
||||
const clear = async () => {
|
||||
const keys = await getKeys();
|
||||
const chunks = keys.reduce((acc, key, i) => {
|
||||
if (i % 1e4 === 0) {
|
||||
acc.push([]);
|
||||
}
|
||||
acc[acc.length - 1].push(key);
|
||||
return acc;
|
||||
}, [[]]);
|
||||
await Promise.all(chunks.map(chunk => {
|
||||
if (chunk.length > 0) {
|
||||
return kvFetch("/bulk/delete", {
|
||||
method: "POST",
|
||||
body: chunk
|
||||
});
|
||||
}
|
||||
}));
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
hasItem,
|
||||
getItem,
|
||||
setItem,
|
||||
removeItem,
|
||||
getKeys: base => getKeys(base).then(keys => keys.map(key => opts.base ? key.slice(opts.base.length) : key)),
|
||||
clear
|
||||
};
|
||||
});
|
56
node_modules/unstorage/drivers/cloudflare-kv-http.d.ts
generated
vendored
Normal file
56
node_modules/unstorage/drivers/cloudflare-kv-http.d.ts
generated
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
interface KVAuthAPIToken {
|
||||
/**
|
||||
* API Token generated from the [User Profile 'API Tokens' page](https://dash.cloudflare.com/profile/api-tokens)
|
||||
* of the Cloudflare console.
|
||||
* @see https://api.cloudflare.com/#getting-started-requests
|
||||
*/
|
||||
apiToken: string;
|
||||
}
|
||||
interface KVAuthServiceKey {
|
||||
/**
|
||||
* A special Cloudflare API key good for a restricted set of endpoints.
|
||||
* Always begins with "v1.0-", may vary in length.
|
||||
* May be used to authenticate in place of `apiToken` or `apiKey` and `email`.
|
||||
* @see https://api.cloudflare.com/#getting-started-requests
|
||||
*/
|
||||
userServiceKey: string;
|
||||
}
|
||||
interface KVAuthEmailKey {
|
||||
/**
|
||||
* Email address associated with your account.
|
||||
* Should be used along with `apiKey` to authenticate in place of `apiToken`.
|
||||
*/
|
||||
email: string;
|
||||
/**
|
||||
* API key generated on the "My Account" page of the Cloudflare console.
|
||||
* Should be used along with `email` to authenticate in place of `apiToken`.
|
||||
* @see https://api.cloudflare.com/#getting-started-requests
|
||||
*/
|
||||
apiKey: string;
|
||||
}
|
||||
export type KVHTTPOptions = {
|
||||
/**
|
||||
* Cloudflare account ID (required)
|
||||
*/
|
||||
accountId: string;
|
||||
/**
|
||||
* The ID of the KV namespace to target (required)
|
||||
*/
|
||||
namespaceId: string;
|
||||
/**
|
||||
* The URL of the Cloudflare API.
|
||||
* @default https://api.cloudflare.com
|
||||
*/
|
||||
apiURL?: string;
|
||||
/**
|
||||
* Adds prefix to all stored keys
|
||||
*/
|
||||
base?: string;
|
||||
/**
|
||||
* The minimum time-to-live (ttl) for setItem in seconds.
|
||||
* The default is 60 seconds as per Cloudflare's [documentation](https://developers.cloudflare.com/kv/api/write-key-value-pairs/).
|
||||
*/
|
||||
minTTL?: number;
|
||||
} & (KVAuthServiceKey | KVAuthAPIToken | KVAuthEmailKey);
|
||||
declare const _default: (opts: KVHTTPOptions) => import("..").Driver<KVHTTPOptions, never>;
|
||||
export default _default;
|
129
node_modules/unstorage/drivers/cloudflare-kv-http.mjs
generated
vendored
Normal file
129
node_modules/unstorage/drivers/cloudflare-kv-http.mjs
generated
vendored
Normal file
@@ -0,0 +1,129 @@
|
||||
import { $fetch } from "ofetch";
|
||||
import {
|
||||
createError,
|
||||
createRequiredError,
|
||||
defineDriver,
|
||||
joinKeys
|
||||
} from "./utils/index.mjs";
|
||||
const DRIVER_NAME = "cloudflare-kv-http";
|
||||
export default defineDriver((opts) => {
|
||||
if (!opts.accountId) {
|
||||
throw createRequiredError(DRIVER_NAME, "accountId");
|
||||
}
|
||||
if (!opts.namespaceId) {
|
||||
throw createRequiredError(DRIVER_NAME, "namespaceId");
|
||||
}
|
||||
let headers;
|
||||
if ("apiToken" in opts) {
|
||||
headers = { Authorization: `Bearer ${opts.apiToken}` };
|
||||
} else if ("userServiceKey" in opts) {
|
||||
headers = { "X-Auth-User-Service-Key": opts.userServiceKey };
|
||||
} else if (opts.email && opts.apiKey) {
|
||||
headers = { "X-Auth-Email": opts.email, "X-Auth-Key": opts.apiKey };
|
||||
} else {
|
||||
throw createError(
|
||||
DRIVER_NAME,
|
||||
"One of the `apiToken`, `userServiceKey`, or a combination of `email` and `apiKey` is required."
|
||||
);
|
||||
}
|
||||
const apiURL = opts.apiURL || "https://api.cloudflare.com";
|
||||
const baseURL = `${apiURL}/client/v4/accounts/${opts.accountId}/storage/kv/namespaces/${opts.namespaceId}`;
|
||||
const kvFetch = $fetch.create({ baseURL, headers });
|
||||
const r = (key = "") => opts.base ? joinKeys(opts.base, key) : key;
|
||||
const hasItem = async (key) => {
|
||||
try {
|
||||
const res = await kvFetch(`/metadata/${r(key)}`);
|
||||
return res?.success === true;
|
||||
} catch (err) {
|
||||
if (!err?.response) {
|
||||
throw err;
|
||||
}
|
||||
if (err?.response?.status === 404) {
|
||||
return false;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
const getItem = async (key) => {
|
||||
try {
|
||||
return await kvFetch(`/values/${r(key)}`).then((r2) => r2.text());
|
||||
} catch (err) {
|
||||
if (!err?.response) {
|
||||
throw err;
|
||||
}
|
||||
if (err?.response?.status === 404) {
|
||||
return null;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
const setItem = async (key, value, topts) => {
|
||||
return await kvFetch(`/values/${r(key)}`, {
|
||||
method: "PUT",
|
||||
body: value,
|
||||
query: topts?.ttl ? { expiration_ttl: Math.max(topts?.ttl, opts.minTTL || 60) } : void 0
|
||||
});
|
||||
};
|
||||
const removeItem = async (key) => {
|
||||
return await kvFetch(`/values/${r(key)}`, { method: "DELETE" });
|
||||
};
|
||||
const getKeys = async (base) => {
|
||||
const keys = [];
|
||||
const params = {};
|
||||
if (base || opts.base) {
|
||||
params.prefix = r(base);
|
||||
}
|
||||
const firstPage = await kvFetch("/keys", { params });
|
||||
for (const item of firstPage.result) {
|
||||
keys.push(item.name);
|
||||
}
|
||||
const cursor = firstPage.result_info.cursor;
|
||||
if (cursor) {
|
||||
params.cursor = cursor;
|
||||
}
|
||||
while (params.cursor) {
|
||||
const pageResult = await kvFetch("/keys", { params });
|
||||
for (const item of pageResult.result) {
|
||||
keys.push(item.name);
|
||||
}
|
||||
const pageCursor = pageResult.result_info.cursor;
|
||||
params.cursor = pageCursor ? pageCursor : void 0;
|
||||
}
|
||||
return keys;
|
||||
};
|
||||
const clear = async () => {
|
||||
const keys = await getKeys();
|
||||
const chunks = keys.reduce(
|
||||
(acc, key, i) => {
|
||||
if (i % 1e4 === 0) {
|
||||
acc.push([]);
|
||||
}
|
||||
acc[acc.length - 1].push(key);
|
||||
return acc;
|
||||
},
|
||||
[[]]
|
||||
);
|
||||
await Promise.all(
|
||||
chunks.map((chunk) => {
|
||||
if (chunk.length > 0) {
|
||||
return kvFetch("/bulk/delete", {
|
||||
method: "POST",
|
||||
body: chunk
|
||||
});
|
||||
}
|
||||
})
|
||||
);
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
hasItem,
|
||||
getItem,
|
||||
setItem,
|
||||
removeItem,
|
||||
getKeys: (base) => getKeys(base).then(
|
||||
(keys) => keys.map((key) => opts.base ? key.slice(opts.base.length) : key)
|
||||
),
|
||||
clear
|
||||
};
|
||||
});
|
103
node_modules/unstorage/drivers/cloudflare-r2-binding.cjs
generated
vendored
Normal file
103
node_modules/unstorage/drivers/cloudflare-r2-binding.cjs
generated
vendored
Normal file
@@ -0,0 +1,103 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _cloudflare = require("./utils/cloudflare.cjs");
|
||||
const DRIVER_NAME = "cloudflare-r2-binding";
|
||||
module.exports = (0, _utils.defineDriver)((opts = {}) => {
|
||||
const r = (key = "") => opts.base ? (0, _utils.joinKeys)(opts.base, key) : key;
|
||||
const getKeys = async base => {
|
||||
const binding = (0, _cloudflare.getR2Binding)(opts.binding);
|
||||
const kvList = await binding.list(base || opts.base ? {
|
||||
prefix: r(base)
|
||||
} : void 0);
|
||||
return kvList.objects.map(obj => obj.key);
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: () => (0, _cloudflare.getR2Binding)(opts.binding),
|
||||
async hasItem(key) {
|
||||
key = r(key);
|
||||
const binding = (0, _cloudflare.getR2Binding)(opts.binding);
|
||||
return (await binding.head(key)) !== null;
|
||||
},
|
||||
async getMeta(key) {
|
||||
key = r(key);
|
||||
const binding = (0, _cloudflare.getR2Binding)(opts.binding);
|
||||
const obj = await binding.head(key);
|
||||
if (!obj) return null;
|
||||
return {
|
||||
mtime: obj.uploaded,
|
||||
atime: obj.uploaded,
|
||||
...obj
|
||||
};
|
||||
},
|
||||
getItem(key, topts) {
|
||||
key = r(key);
|
||||
const binding = (0, _cloudflare.getR2Binding)(opts.binding);
|
||||
return binding.get(key, topts).then(r2 => r2?.text() ?? null);
|
||||
},
|
||||
async getItemRaw(key, topts) {
|
||||
key = r(key);
|
||||
const binding = (0, _cloudflare.getR2Binding)(opts.binding);
|
||||
const object = await binding.get(key, topts);
|
||||
return object ? getObjBody(object, topts?.type) : null;
|
||||
},
|
||||
async setItem(key, value, topts) {
|
||||
key = r(key);
|
||||
const binding = (0, _cloudflare.getR2Binding)(opts.binding);
|
||||
await binding.put(key, value, topts);
|
||||
},
|
||||
async setItemRaw(key, value, topts) {
|
||||
key = r(key);
|
||||
const binding = (0, _cloudflare.getR2Binding)(opts.binding);
|
||||
await binding.put(key, value, topts);
|
||||
},
|
||||
async removeItem(key) {
|
||||
key = r(key);
|
||||
const binding = (0, _cloudflare.getR2Binding)(opts.binding);
|
||||
await binding.delete(key);
|
||||
},
|
||||
getKeys(base) {
|
||||
return getKeys(base).then(keys => opts.base ? keys.map(key => key.slice(opts.base.length)) : keys);
|
||||
},
|
||||
async clear(base) {
|
||||
const binding = (0, _cloudflare.getR2Binding)(opts.binding);
|
||||
const keys = await getKeys(base);
|
||||
await binding.delete(keys);
|
||||
}
|
||||
};
|
||||
});
|
||||
function getObjBody(object, type) {
|
||||
switch (type) {
|
||||
case "object":
|
||||
{
|
||||
return object;
|
||||
}
|
||||
case "stream":
|
||||
{
|
||||
return object.body;
|
||||
}
|
||||
case "blob":
|
||||
{
|
||||
return object.blob();
|
||||
}
|
||||
case "arrayBuffer":
|
||||
{
|
||||
return object.arrayBuffer();
|
||||
}
|
||||
case "bytes":
|
||||
{
|
||||
return object.arrayBuffer().then(buffer => new Uint8Array(buffer));
|
||||
}
|
||||
// TODO: Default to bytes in v2
|
||||
default:
|
||||
{
|
||||
return object.arrayBuffer();
|
||||
}
|
||||
}
|
||||
}
|
6
node_modules/unstorage/drivers/cloudflare-r2-binding.d.ts
generated
vendored
Normal file
6
node_modules/unstorage/drivers/cloudflare-r2-binding.d.ts
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
export interface CloudflareR2Options {
|
||||
binding?: string | R2Bucket;
|
||||
base?: string;
|
||||
}
|
||||
declare const _default: (opts: CloudflareR2Options | undefined) => import("..").Driver<CloudflareR2Options | undefined, R2Bucket>;
|
||||
export default _default;
|
93
node_modules/unstorage/drivers/cloudflare-r2-binding.mjs
generated
vendored
Normal file
93
node_modules/unstorage/drivers/cloudflare-r2-binding.mjs
generated
vendored
Normal file
@@ -0,0 +1,93 @@
|
||||
import { defineDriver, joinKeys } from "./utils/index.mjs";
|
||||
import { getR2Binding } from "./utils/cloudflare.mjs";
|
||||
const DRIVER_NAME = "cloudflare-r2-binding";
|
||||
export default defineDriver((opts = {}) => {
|
||||
const r = (key = "") => opts.base ? joinKeys(opts.base, key) : key;
|
||||
const getKeys = async (base) => {
|
||||
const binding = getR2Binding(opts.binding);
|
||||
const kvList = await binding.list(
|
||||
base || opts.base ? { prefix: r(base) } : void 0
|
||||
);
|
||||
return kvList.objects.map((obj) => obj.key);
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: () => getR2Binding(opts.binding),
|
||||
async hasItem(key) {
|
||||
key = r(key);
|
||||
const binding = getR2Binding(opts.binding);
|
||||
return await binding.head(key) !== null;
|
||||
},
|
||||
async getMeta(key) {
|
||||
key = r(key);
|
||||
const binding = getR2Binding(opts.binding);
|
||||
const obj = await binding.head(key);
|
||||
if (!obj) return null;
|
||||
return {
|
||||
mtime: obj.uploaded,
|
||||
atime: obj.uploaded,
|
||||
...obj
|
||||
};
|
||||
},
|
||||
getItem(key, topts) {
|
||||
key = r(key);
|
||||
const binding = getR2Binding(opts.binding);
|
||||
return binding.get(key, topts).then((r2) => r2?.text() ?? null);
|
||||
},
|
||||
async getItemRaw(key, topts) {
|
||||
key = r(key);
|
||||
const binding = getR2Binding(opts.binding);
|
||||
const object = await binding.get(key, topts);
|
||||
return object ? getObjBody(object, topts?.type) : null;
|
||||
},
|
||||
async setItem(key, value, topts) {
|
||||
key = r(key);
|
||||
const binding = getR2Binding(opts.binding);
|
||||
await binding.put(key, value, topts);
|
||||
},
|
||||
async setItemRaw(key, value, topts) {
|
||||
key = r(key);
|
||||
const binding = getR2Binding(opts.binding);
|
||||
await binding.put(key, value, topts);
|
||||
},
|
||||
async removeItem(key) {
|
||||
key = r(key);
|
||||
const binding = getR2Binding(opts.binding);
|
||||
await binding.delete(key);
|
||||
},
|
||||
getKeys(base) {
|
||||
return getKeys(base).then(
|
||||
(keys) => opts.base ? keys.map((key) => key.slice(opts.base.length)) : keys
|
||||
);
|
||||
},
|
||||
async clear(base) {
|
||||
const binding = getR2Binding(opts.binding);
|
||||
const keys = await getKeys(base);
|
||||
await binding.delete(keys);
|
||||
}
|
||||
};
|
||||
});
|
||||
function getObjBody(object, type) {
|
||||
switch (type) {
|
||||
case "object": {
|
||||
return object;
|
||||
}
|
||||
case "stream": {
|
||||
return object.body;
|
||||
}
|
||||
case "blob": {
|
||||
return object.blob();
|
||||
}
|
||||
case "arrayBuffer": {
|
||||
return object.arrayBuffer();
|
||||
}
|
||||
case "bytes": {
|
||||
return object.arrayBuffer().then((buffer) => new Uint8Array(buffer));
|
||||
}
|
||||
// TODO: Default to bytes in v2
|
||||
default: {
|
||||
return object.arrayBuffer();
|
||||
}
|
||||
}
|
||||
}
|
155
node_modules/unstorage/drivers/db0.cjs
generated
vendored
Normal file
155
node_modules/unstorage/drivers/db0.cjs
generated
vendored
Normal file
@@ -0,0 +1,155 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
const DRIVER_NAME = "db0";
|
||||
const DEFAULT_TABLE_NAME = "unstorage";
|
||||
const kExperimentalWarning = "__unstorage_db0_experimental_warning__";
|
||||
module.exports = (0, _utils.defineDriver)(opts => {
|
||||
opts.tableName = opts.tableName || DEFAULT_TABLE_NAME;
|
||||
let setupPromise;
|
||||
let setupDone = false;
|
||||
const ensureTable = () => {
|
||||
if (setupDone) {
|
||||
return;
|
||||
}
|
||||
if (!setupPromise) {
|
||||
if (!globalThis[kExperimentalWarning]) {
|
||||
console.warn("[unstorage]: Database driver is experimental and behavior may change in the future.");
|
||||
globalThis[kExperimentalWarning] = true;
|
||||
}
|
||||
setupPromise = setupTable(opts).then(() => {
|
||||
setupDone = true;
|
||||
setupPromise = void 0;
|
||||
});
|
||||
}
|
||||
return setupPromise;
|
||||
};
|
||||
const isMysql = opts.database.dialect === "mysql";
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: () => opts.database,
|
||||
async hasItem(key) {
|
||||
await ensureTable();
|
||||
const {
|
||||
rows
|
||||
} = isMysql ? await opts.database.sql`SELECT EXISTS (SELECT 1 FROM {${opts.tableName}} WHERE \`key\` = ${key}) AS \`value\`` : await opts.database.sql`SELECT EXISTS (SELECT 1 FROM {${opts.tableName}} WHERE key = ${key}) AS value`;
|
||||
return rows?.[0]?.value == "1";
|
||||
},
|
||||
getItem: async key => {
|
||||
await ensureTable();
|
||||
const {
|
||||
rows
|
||||
} = isMysql ? await opts.database.sql`SELECT value FROM {${opts.tableName}} WHERE \`key\` = ${key}` : await opts.database.sql`SELECT value FROM {${opts.tableName}} WHERE key = ${key}`;
|
||||
return rows?.[0]?.value ?? null;
|
||||
},
|
||||
getItemRaw: async key => {
|
||||
await ensureTable();
|
||||
const {
|
||||
rows
|
||||
} = isMysql ? await opts.database.sql`SELECT \`blob\` as value FROM {${opts.tableName}} WHERE \`key\` = ${key}` : await opts.database.sql`SELECT blob as value FROM {${opts.tableName}} WHERE key = ${key}`;
|
||||
return rows?.[0]?.value ?? null;
|
||||
},
|
||||
setItem: async (key, value) => {
|
||||
await ensureTable();
|
||||
if (isMysql) {
|
||||
await opts.database.sql`INSERT INTO {${opts.tableName}} (\`key\`, \`value\`, created_at, updated_at) VALUES (${key}, ${value}, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP) ON DUPLICATE KEY UPDATE value = ${value}, updated_at = CURRENT_TIMESTAMP`;
|
||||
} else {
|
||||
await opts.database.sql`INSERT INTO {${opts.tableName}} (key, value, created_at, updated_at) VALUES (${key}, ${value}, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP) ON CONFLICT(key) DO UPDATE SET value = ${value}, updated_at = CURRENT_TIMESTAMP`;
|
||||
}
|
||||
},
|
||||
async setItemRaw(key, value) {
|
||||
await ensureTable();
|
||||
if (isMysql) {
|
||||
const blob = Buffer.from(value);
|
||||
await opts.database.sql`INSERT INTO {${opts.tableName}} (\`key\`, \`blob\`, created_at, updated_at) VALUES (${key}, ${blob}, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP) ON DUPLICATE KEY UPDATE \`blob\` = ${blob}, updated_at = CURRENT_TIMESTAMP`;
|
||||
} else {
|
||||
await opts.database.sql`INSERT INTO {${opts.tableName}} (key, blob, created_at, updated_at) VALUES (${key}, ${value}, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP) ON CONFLICT(key) DO UPDATE SET blob = ${value}, updated_at = CURRENT_TIMESTAMP`;
|
||||
}
|
||||
},
|
||||
removeItem: async key => {
|
||||
await ensureTable();
|
||||
if (isMysql) {
|
||||
await opts.database.sql`DELETE FROM {${opts.tableName}} WHERE \`key\`=${key}`;
|
||||
} else {
|
||||
await opts.database.sql`DELETE FROM {${opts.tableName}} WHERE key=${key}`;
|
||||
}
|
||||
},
|
||||
getMeta: async key => {
|
||||
await ensureTable();
|
||||
const {
|
||||
rows
|
||||
} = isMysql ? await opts.database.sql`SELECT created_at, updated_at FROM {${opts.tableName}} WHERE \`key\` = ${key}` : await opts.database.sql`SELECT created_at, updated_at FROM {${opts.tableName}} WHERE key = ${key}`;
|
||||
return {
|
||||
birthtime: toDate(rows?.[0]?.created_at),
|
||||
mtime: toDate(rows?.[0]?.updated_at)
|
||||
};
|
||||
},
|
||||
getKeys: async (base = "") => {
|
||||
await ensureTable();
|
||||
const {
|
||||
rows
|
||||
} = isMysql ? await opts.database.sql`SELECT \`key\` FROM {${opts.tableName}} WHERE \`key\` LIKE ${base + "%"}` : await opts.database.sql`SELECT key FROM {${opts.tableName}} WHERE key LIKE ${base + "%"}`;
|
||||
return rows?.map(r => r.key);
|
||||
},
|
||||
clear: async () => {
|
||||
await ensureTable();
|
||||
await opts.database.sql`DELETE FROM {${opts.tableName}}`;
|
||||
}
|
||||
};
|
||||
});
|
||||
async function setupTable(opts) {
|
||||
switch (opts.database.dialect) {
|
||||
case "sqlite":
|
||||
case "libsql":
|
||||
{
|
||||
await opts.database.sql`
|
||||
CREATE TABLE IF NOT EXISTS {${opts.tableName}} (
|
||||
key TEXT PRIMARY KEY,
|
||||
value TEXT,
|
||||
blob BLOB,
|
||||
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TEXT DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
`;
|
||||
return;
|
||||
}
|
||||
case "postgresql":
|
||||
{
|
||||
await opts.database.sql`
|
||||
CREATE TABLE IF NOT EXISTS {${opts.tableName}} (
|
||||
key VARCHAR(255) NOT NULL PRIMARY KEY,
|
||||
value TEXT,
|
||||
blob BYTEA,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
`;
|
||||
return;
|
||||
}
|
||||
case "mysql":
|
||||
{
|
||||
await opts.database.sql`
|
||||
CREATE TABLE IF NOT EXISTS {${opts.tableName}} (
|
||||
\`key\` VARCHAR(255) NOT NULL PRIMARY KEY,
|
||||
\`value\` LONGTEXT,
|
||||
\`blob\` BLOB,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
|
||||
);
|
||||
`;
|
||||
return;
|
||||
}
|
||||
default:
|
||||
{
|
||||
throw (0, _utils.createError)(DRIVER_NAME, `unsuppoted SQL dialect: ${opts.database.dialect}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
function toDate(timestamp) {
|
||||
return timestamp ? new Date(timestamp) : void 0;
|
||||
}
|
7
node_modules/unstorage/drivers/db0.d.ts
generated
vendored
Normal file
7
node_modules/unstorage/drivers/db0.d.ts
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
import type { Database } from "db0";
|
||||
export interface DB0DriverOptions {
|
||||
database: Database;
|
||||
tableName?: string;
|
||||
}
|
||||
declare const _default: (opts: DB0DriverOptions) => import("..").Driver<DB0DriverOptions, Database<import("db0").Connector<unknown>>>;
|
||||
export default _default;
|
140
node_modules/unstorage/drivers/db0.mjs
generated
vendored
Normal file
140
node_modules/unstorage/drivers/db0.mjs
generated
vendored
Normal file
@@ -0,0 +1,140 @@
|
||||
import { createError, defineDriver } from "./utils/index.mjs";
|
||||
const DRIVER_NAME = "db0";
|
||||
const DEFAULT_TABLE_NAME = "unstorage";
|
||||
const kExperimentalWarning = "__unstorage_db0_experimental_warning__";
|
||||
export default defineDriver((opts) => {
|
||||
opts.tableName = opts.tableName || DEFAULT_TABLE_NAME;
|
||||
let setupPromise;
|
||||
let setupDone = false;
|
||||
const ensureTable = () => {
|
||||
if (setupDone) {
|
||||
return;
|
||||
}
|
||||
if (!setupPromise) {
|
||||
if (!globalThis[kExperimentalWarning]) {
|
||||
console.warn(
|
||||
"[unstorage]: Database driver is experimental and behavior may change in the future."
|
||||
);
|
||||
globalThis[kExperimentalWarning] = true;
|
||||
}
|
||||
setupPromise = setupTable(opts).then(() => {
|
||||
setupDone = true;
|
||||
setupPromise = void 0;
|
||||
});
|
||||
}
|
||||
return setupPromise;
|
||||
};
|
||||
const isMysql = opts.database.dialect === "mysql";
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: () => opts.database,
|
||||
async hasItem(key) {
|
||||
await ensureTable();
|
||||
const { rows } = isMysql ? await opts.database.sql`SELECT EXISTS (SELECT 1 FROM {${opts.tableName}} WHERE \`key\` = ${key}) AS \`value\`` : await opts.database.sql`SELECT EXISTS (SELECT 1 FROM {${opts.tableName}} WHERE key = ${key}) AS value`;
|
||||
return rows?.[0]?.value == "1";
|
||||
},
|
||||
getItem: async (key) => {
|
||||
await ensureTable();
|
||||
const { rows } = isMysql ? await opts.database.sql`SELECT value FROM {${opts.tableName}} WHERE \`key\` = ${key}` : await opts.database.sql`SELECT value FROM {${opts.tableName}} WHERE key = ${key}`;
|
||||
return rows?.[0]?.value ?? null;
|
||||
},
|
||||
getItemRaw: async (key) => {
|
||||
await ensureTable();
|
||||
const { rows } = isMysql ? await opts.database.sql`SELECT \`blob\` as value FROM {${opts.tableName}} WHERE \`key\` = ${key}` : await opts.database.sql`SELECT blob as value FROM {${opts.tableName}} WHERE key = ${key}`;
|
||||
return rows?.[0]?.value ?? null;
|
||||
},
|
||||
setItem: async (key, value) => {
|
||||
await ensureTable();
|
||||
if (isMysql) {
|
||||
await opts.database.sql`INSERT INTO {${opts.tableName}} (\`key\`, \`value\`, created_at, updated_at) VALUES (${key}, ${value}, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP) ON DUPLICATE KEY UPDATE value = ${value}, updated_at = CURRENT_TIMESTAMP`;
|
||||
} else {
|
||||
await opts.database.sql`INSERT INTO {${opts.tableName}} (key, value, created_at, updated_at) VALUES (${key}, ${value}, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP) ON CONFLICT(key) DO UPDATE SET value = ${value}, updated_at = CURRENT_TIMESTAMP`;
|
||||
}
|
||||
},
|
||||
async setItemRaw(key, value) {
|
||||
await ensureTable();
|
||||
if (isMysql) {
|
||||
const blob = Buffer.from(value);
|
||||
await opts.database.sql`INSERT INTO {${opts.tableName}} (\`key\`, \`blob\`, created_at, updated_at) VALUES (${key}, ${blob}, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP) ON DUPLICATE KEY UPDATE \`blob\` = ${blob}, updated_at = CURRENT_TIMESTAMP`;
|
||||
} else {
|
||||
await opts.database.sql`INSERT INTO {${opts.tableName}} (key, blob, created_at, updated_at) VALUES (${key}, ${value}, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP) ON CONFLICT(key) DO UPDATE SET blob = ${value}, updated_at = CURRENT_TIMESTAMP`;
|
||||
}
|
||||
},
|
||||
removeItem: async (key) => {
|
||||
await ensureTable();
|
||||
if (isMysql) {
|
||||
await opts.database.sql`DELETE FROM {${opts.tableName}} WHERE \`key\`=${key}`;
|
||||
} else {
|
||||
await opts.database.sql`DELETE FROM {${opts.tableName}} WHERE key=${key}`;
|
||||
}
|
||||
},
|
||||
getMeta: async (key) => {
|
||||
await ensureTable();
|
||||
const { rows } = isMysql ? await opts.database.sql`SELECT created_at, updated_at FROM {${opts.tableName}} WHERE \`key\` = ${key}` : await opts.database.sql`SELECT created_at, updated_at FROM {${opts.tableName}} WHERE key = ${key}`;
|
||||
return {
|
||||
birthtime: toDate(rows?.[0]?.created_at),
|
||||
mtime: toDate(rows?.[0]?.updated_at)
|
||||
};
|
||||
},
|
||||
getKeys: async (base = "") => {
|
||||
await ensureTable();
|
||||
const { rows } = isMysql ? await opts.database.sql`SELECT \`key\` FROM {${opts.tableName}} WHERE \`key\` LIKE ${base + "%"}` : await opts.database.sql`SELECT key FROM {${opts.tableName}} WHERE key LIKE ${base + "%"}`;
|
||||
return rows?.map((r) => r.key);
|
||||
},
|
||||
clear: async () => {
|
||||
await ensureTable();
|
||||
await opts.database.sql`DELETE FROM {${opts.tableName}}`;
|
||||
}
|
||||
};
|
||||
});
|
||||
async function setupTable(opts) {
|
||||
switch (opts.database.dialect) {
|
||||
case "sqlite":
|
||||
case "libsql": {
|
||||
await opts.database.sql`
|
||||
CREATE TABLE IF NOT EXISTS {${opts.tableName}} (
|
||||
key TEXT PRIMARY KEY,
|
||||
value TEXT,
|
||||
blob BLOB,
|
||||
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TEXT DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
`;
|
||||
return;
|
||||
}
|
||||
case "postgresql": {
|
||||
await opts.database.sql`
|
||||
CREATE TABLE IF NOT EXISTS {${opts.tableName}} (
|
||||
key VARCHAR(255) NOT NULL PRIMARY KEY,
|
||||
value TEXT,
|
||||
blob BYTEA,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
`;
|
||||
return;
|
||||
}
|
||||
case "mysql": {
|
||||
await opts.database.sql`
|
||||
CREATE TABLE IF NOT EXISTS {${opts.tableName}} (
|
||||
\`key\` VARCHAR(255) NOT NULL PRIMARY KEY,
|
||||
\`value\` LONGTEXT,
|
||||
\`blob\` BLOB,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
|
||||
);
|
||||
`;
|
||||
return;
|
||||
}
|
||||
default: {
|
||||
throw createError(
|
||||
DRIVER_NAME,
|
||||
`unsuppoted SQL dialect: ${opts.database.dialect}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
function toDate(timestamp) {
|
||||
return timestamp ? new Date(timestamp) : void 0;
|
||||
}
|
24
node_modules/unstorage/drivers/deno-kv-node.cjs
generated
vendored
Normal file
24
node_modules/unstorage/drivers/deno-kv-node.cjs
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _kv = require("@deno/kv");
|
||||
var _index = require("./utils/index.cjs");
|
||||
var _denoKv = _interopRequireDefault(require("./deno-kv.cjs"));
|
||||
function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; }
|
||||
const DRIVER_NAME = "deno-kv-node";
|
||||
module.exports = (0, _index.defineDriver)((opts = {}) => {
|
||||
const baseDriver = (0, _denoKv.default)({
|
||||
...opts,
|
||||
openKv: () => (0, _kv.openKv)(opts.path, opts.openKvOptions)
|
||||
});
|
||||
return {
|
||||
...baseDriver,
|
||||
getInstance() {
|
||||
return baseDriver.getInstance();
|
||||
},
|
||||
name: DRIVER_NAME
|
||||
};
|
||||
});
|
8
node_modules/unstorage/drivers/deno-kv-node.d.ts
generated
vendored
Normal file
8
node_modules/unstorage/drivers/deno-kv-node.d.ts
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
import { openKv, type Kv } from "@deno/kv";
|
||||
export interface DenoKvNodeOptions {
|
||||
base?: string;
|
||||
path?: string;
|
||||
openKvOptions?: Parameters<typeof openKv>[1];
|
||||
}
|
||||
declare const _default: (opts: DenoKvNodeOptions) => import("..").Driver<DenoKvNodeOptions, Kv | Promise<Kv>>;
|
||||
export default _default;
|
19
node_modules/unstorage/drivers/deno-kv-node.mjs
generated
vendored
Normal file
19
node_modules/unstorage/drivers/deno-kv-node.mjs
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
import { openKv } from "@deno/kv";
|
||||
import { defineDriver } from "./utils/index.mjs";
|
||||
import denoKV from "./deno-kv.mjs";
|
||||
const DRIVER_NAME = "deno-kv-node";
|
||||
export default defineDriver(
|
||||
(opts = {}) => {
|
||||
const baseDriver = denoKV({
|
||||
...opts,
|
||||
openKv: () => openKv(opts.path, opts.openKvOptions)
|
||||
});
|
||||
return {
|
||||
...baseDriver,
|
||||
getInstance() {
|
||||
return baseDriver.getInstance();
|
||||
},
|
||||
name: DRIVER_NAME
|
||||
};
|
||||
}
|
||||
);
|
90
node_modules/unstorage/drivers/deno-kv.cjs
generated
vendored
Normal file
90
node_modules/unstorage/drivers/deno-kv.cjs
generated
vendored
Normal file
@@ -0,0 +1,90 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _index = require("./utils/index.cjs");
|
||||
const DRIVER_NAME = "deno-kv";
|
||||
module.exports = (0, _index.defineDriver)((opts = {}) => {
|
||||
const basePrefix = opts.base ? (0, _index.normalizeKey)(opts.base).split(":") : [];
|
||||
const r = (key = "") => [...basePrefix, ...key.split(":")].filter(Boolean);
|
||||
let _kv;
|
||||
const getKv = () => {
|
||||
if (_kv) {
|
||||
return _kv;
|
||||
}
|
||||
if (opts.openKv) {
|
||||
_kv = opts.openKv();
|
||||
} else {
|
||||
if (!globalThis.Deno) {
|
||||
throw (0, _index.createError)(DRIVER_NAME, "Missing global `Deno`. Are you running in Deno? (hint: use `deno-kv-node` driver for Node.js)");
|
||||
}
|
||||
if (!Deno.openKv) {
|
||||
throw (0, _index.createError)(DRIVER_NAME, "Missing `Deno.openKv`. Are you running Deno with --unstable-kv?");
|
||||
}
|
||||
_kv = Deno.openKv(opts.path);
|
||||
}
|
||||
return _kv;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
getInstance() {
|
||||
return getKv();
|
||||
},
|
||||
async hasItem(key) {
|
||||
const kv = await getKv();
|
||||
const value = await kv.get(r(key));
|
||||
return !!value.value;
|
||||
},
|
||||
async getItem(key) {
|
||||
const kv = await getKv();
|
||||
const value = await kv.get(r(key));
|
||||
return value.value;
|
||||
},
|
||||
async getItemRaw(key) {
|
||||
const kv = await getKv();
|
||||
const value = await kv.get(r(key));
|
||||
return value.value;
|
||||
},
|
||||
async setItem(key, value) {
|
||||
const kv = await getKv();
|
||||
await kv.set(r(key), value);
|
||||
},
|
||||
async setItemRaw(key, value) {
|
||||
const kv = await getKv();
|
||||
await kv.set(r(key), value);
|
||||
},
|
||||
async removeItem(key) {
|
||||
const kv = await getKv();
|
||||
await kv.delete(r(key));
|
||||
},
|
||||
async getKeys(base) {
|
||||
const kv = await getKv();
|
||||
const keys = [];
|
||||
for await (const entry of kv.list({
|
||||
prefix: r(base)
|
||||
})) {
|
||||
keys.push((basePrefix.length > 0 ? entry.key.slice(basePrefix.length) : entry.key).join(":"));
|
||||
}
|
||||
return keys;
|
||||
},
|
||||
async clear(base) {
|
||||
const kv = await getKv();
|
||||
const batch = kv.atomic();
|
||||
for await (const entry of kv.list({
|
||||
prefix: r(base)
|
||||
})) {
|
||||
batch.delete(entry.key);
|
||||
}
|
||||
await batch.commit();
|
||||
},
|
||||
async dispose() {
|
||||
if (_kv) {
|
||||
const kv = await _kv;
|
||||
await kv.close();
|
||||
_kv = void 0;
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
8
node_modules/unstorage/drivers/deno-kv.d.ts
generated
vendored
Normal file
8
node_modules/unstorage/drivers/deno-kv.d.ts
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
import type { Kv } from "@deno/kv";
|
||||
export interface DenoKvOptions {
|
||||
base?: string;
|
||||
path?: string;
|
||||
openKv?: () => Promise<Deno.Kv | Kv>;
|
||||
}
|
||||
declare const _default: (opts: DenoKvOptions) => import("..").Driver<DenoKvOptions, Promise<Deno.Kv | Kv>>;
|
||||
export default _default;
|
90
node_modules/unstorage/drivers/deno-kv.mjs
generated
vendored
Normal file
90
node_modules/unstorage/drivers/deno-kv.mjs
generated
vendored
Normal file
@@ -0,0 +1,90 @@
|
||||
import { defineDriver, createError, normalizeKey } from "./utils/index.mjs";
|
||||
const DRIVER_NAME = "deno-kv";
|
||||
export default defineDriver(
|
||||
(opts = {}) => {
|
||||
const basePrefix = opts.base ? normalizeKey(opts.base).split(":") : [];
|
||||
const r = (key = "") => [...basePrefix, ...key.split(":")].filter(Boolean);
|
||||
let _kv;
|
||||
const getKv = () => {
|
||||
if (_kv) {
|
||||
return _kv;
|
||||
}
|
||||
if (opts.openKv) {
|
||||
_kv = opts.openKv();
|
||||
} else {
|
||||
if (!globalThis.Deno) {
|
||||
throw createError(
|
||||
DRIVER_NAME,
|
||||
"Missing global `Deno`. Are you running in Deno? (hint: use `deno-kv-node` driver for Node.js)"
|
||||
);
|
||||
}
|
||||
if (!Deno.openKv) {
|
||||
throw createError(
|
||||
DRIVER_NAME,
|
||||
"Missing `Deno.openKv`. Are you running Deno with --unstable-kv?"
|
||||
);
|
||||
}
|
||||
_kv = Deno.openKv(opts.path);
|
||||
}
|
||||
return _kv;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
getInstance() {
|
||||
return getKv();
|
||||
},
|
||||
async hasItem(key) {
|
||||
const kv = await getKv();
|
||||
const value = await kv.get(r(key));
|
||||
return !!value.value;
|
||||
},
|
||||
async getItem(key) {
|
||||
const kv = await getKv();
|
||||
const value = await kv.get(r(key));
|
||||
return value.value;
|
||||
},
|
||||
async getItemRaw(key) {
|
||||
const kv = await getKv();
|
||||
const value = await kv.get(r(key));
|
||||
return value.value;
|
||||
},
|
||||
async setItem(key, value) {
|
||||
const kv = await getKv();
|
||||
await kv.set(r(key), value);
|
||||
},
|
||||
async setItemRaw(key, value) {
|
||||
const kv = await getKv();
|
||||
await kv.set(r(key), value);
|
||||
},
|
||||
async removeItem(key) {
|
||||
const kv = await getKv();
|
||||
await kv.delete(r(key));
|
||||
},
|
||||
async getKeys(base) {
|
||||
const kv = await getKv();
|
||||
const keys = [];
|
||||
for await (const entry of kv.list({ prefix: r(base) })) {
|
||||
keys.push(
|
||||
(basePrefix.length > 0 ? entry.key.slice(basePrefix.length) : entry.key).join(":")
|
||||
);
|
||||
}
|
||||
return keys;
|
||||
},
|
||||
async clear(base) {
|
||||
const kv = await getKv();
|
||||
const batch = kv.atomic();
|
||||
for await (const entry of kv.list({ prefix: r(base) })) {
|
||||
batch.delete(entry.key);
|
||||
}
|
||||
await batch.commit();
|
||||
},
|
||||
async dispose() {
|
||||
if (_kv) {
|
||||
const kv = await _kv;
|
||||
await kv.close();
|
||||
_kv = void 0;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
);
|
84
node_modules/unstorage/drivers/fs-lite.cjs
generated
vendored
Normal file
84
node_modules/unstorage/drivers/fs-lite.cjs
generated
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _nodeFs = require("node:fs");
|
||||
var _nodePath = require("node:path");
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _nodeFs2 = require("./utils/node-fs.cjs");
|
||||
const PATH_TRAVERSE_RE = /\.\.:|\.\.$/;
|
||||
const DRIVER_NAME = "fs-lite";
|
||||
module.exports = (0, _utils.defineDriver)((opts = {}) => {
|
||||
if (!opts.base) {
|
||||
throw (0, _utils.createRequiredError)(DRIVER_NAME, "base");
|
||||
}
|
||||
opts.base = (0, _nodePath.resolve)(opts.base);
|
||||
const r = key => {
|
||||
if (PATH_TRAVERSE_RE.test(key)) {
|
||||
throw (0, _utils.createError)(DRIVER_NAME, `Invalid key: ${JSON.stringify(key)}. It should not contain .. segments`);
|
||||
}
|
||||
const resolved = (0, _nodePath.join)(opts.base, key.replace(/:/g, "/"));
|
||||
return resolved;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
flags: {
|
||||
maxDepth: true
|
||||
},
|
||||
hasItem(key) {
|
||||
return (0, _nodeFs.existsSync)(r(key));
|
||||
},
|
||||
getItem(key) {
|
||||
return (0, _nodeFs2.readFile)(r(key), "utf8");
|
||||
},
|
||||
getItemRaw(key) {
|
||||
return (0, _nodeFs2.readFile)(r(key));
|
||||
},
|
||||
async getMeta(key) {
|
||||
const {
|
||||
atime,
|
||||
mtime,
|
||||
size,
|
||||
birthtime,
|
||||
ctime
|
||||
} = await _nodeFs.promises.stat(r(key)).catch(() => ({}));
|
||||
return {
|
||||
atime,
|
||||
mtime,
|
||||
size,
|
||||
birthtime,
|
||||
ctime
|
||||
};
|
||||
},
|
||||
setItem(key, value) {
|
||||
if (opts.readOnly) {
|
||||
return;
|
||||
}
|
||||
return (0, _nodeFs2.writeFile)(r(key), value, "utf8");
|
||||
},
|
||||
setItemRaw(key, value) {
|
||||
if (opts.readOnly) {
|
||||
return;
|
||||
}
|
||||
return (0, _nodeFs2.writeFile)(r(key), value);
|
||||
},
|
||||
removeItem(key) {
|
||||
if (opts.readOnly) {
|
||||
return;
|
||||
}
|
||||
return (0, _nodeFs2.unlink)(r(key));
|
||||
},
|
||||
getKeys(_base, topts) {
|
||||
return (0, _nodeFs2.readdirRecursive)(r("."), opts.ignore, topts?.maxDepth);
|
||||
},
|
||||
async clear() {
|
||||
if (opts.readOnly || opts.noClear) {
|
||||
return;
|
||||
}
|
||||
await (0, _nodeFs2.rmRecursive)(r("."));
|
||||
}
|
||||
};
|
||||
});
|
8
node_modules/unstorage/drivers/fs-lite.d.ts
generated
vendored
Normal file
8
node_modules/unstorage/drivers/fs-lite.d.ts
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
export interface FSStorageOptions {
|
||||
base?: string;
|
||||
ignore?: (path: string) => boolean;
|
||||
readOnly?: boolean;
|
||||
noClear?: boolean;
|
||||
}
|
||||
declare const _default: (opts: FSStorageOptions | undefined) => import("..").Driver<FSStorageOptions | undefined, never>;
|
||||
export default _default;
|
75
node_modules/unstorage/drivers/fs-lite.mjs
generated
vendored
Normal file
75
node_modules/unstorage/drivers/fs-lite.mjs
generated
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
import { existsSync, promises as fsp } from "node:fs";
|
||||
import { resolve, join } from "node:path";
|
||||
import { createError, createRequiredError, defineDriver } from "./utils/index.mjs";
|
||||
import {
|
||||
readFile,
|
||||
writeFile,
|
||||
readdirRecursive,
|
||||
rmRecursive,
|
||||
unlink
|
||||
} from "./utils/node-fs.mjs";
|
||||
const PATH_TRAVERSE_RE = /\.\.:|\.\.$/;
|
||||
const DRIVER_NAME = "fs-lite";
|
||||
export default defineDriver((opts = {}) => {
|
||||
if (!opts.base) {
|
||||
throw createRequiredError(DRIVER_NAME, "base");
|
||||
}
|
||||
opts.base = resolve(opts.base);
|
||||
const r = (key) => {
|
||||
if (PATH_TRAVERSE_RE.test(key)) {
|
||||
throw createError(
|
||||
DRIVER_NAME,
|
||||
`Invalid key: ${JSON.stringify(key)}. It should not contain .. segments`
|
||||
);
|
||||
}
|
||||
const resolved = join(opts.base, key.replace(/:/g, "/"));
|
||||
return resolved;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
flags: {
|
||||
maxDepth: true
|
||||
},
|
||||
hasItem(key) {
|
||||
return existsSync(r(key));
|
||||
},
|
||||
getItem(key) {
|
||||
return readFile(r(key), "utf8");
|
||||
},
|
||||
getItemRaw(key) {
|
||||
return readFile(r(key));
|
||||
},
|
||||
async getMeta(key) {
|
||||
const { atime, mtime, size, birthtime, ctime } = await fsp.stat(r(key)).catch(() => ({}));
|
||||
return { atime, mtime, size, birthtime, ctime };
|
||||
},
|
||||
setItem(key, value) {
|
||||
if (opts.readOnly) {
|
||||
return;
|
||||
}
|
||||
return writeFile(r(key), value, "utf8");
|
||||
},
|
||||
setItemRaw(key, value) {
|
||||
if (opts.readOnly) {
|
||||
return;
|
||||
}
|
||||
return writeFile(r(key), value);
|
||||
},
|
||||
removeItem(key) {
|
||||
if (opts.readOnly) {
|
||||
return;
|
||||
}
|
||||
return unlink(r(key));
|
||||
},
|
||||
getKeys(_base, topts) {
|
||||
return readdirRecursive(r("."), opts.ignore, topts?.maxDepth);
|
||||
},
|
||||
async clear() {
|
||||
if (opts.readOnly || opts.noClear) {
|
||||
return;
|
||||
}
|
||||
await rmRecursive(r("."));
|
||||
}
|
||||
};
|
||||
});
|
130
node_modules/unstorage/drivers/fs.cjs
generated
vendored
Normal file
130
node_modules/unstorage/drivers/fs.cjs
generated
vendored
Normal file
@@ -0,0 +1,130 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _nodeFs = require("node:fs");
|
||||
var _nodePath = require("node:path");
|
||||
var _chokidar = require("chokidar");
|
||||
var _anymatch = _interopRequireDefault(require("anymatch"));
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _nodeFs2 = require("./utils/node-fs.cjs");
|
||||
function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; }
|
||||
const PATH_TRAVERSE_RE = /\.\.:|\.\.$/;
|
||||
const DRIVER_NAME = "fs";
|
||||
module.exports = (0, _utils.defineDriver)((userOptions = {}) => {
|
||||
if (!userOptions.base) {
|
||||
throw (0, _utils.createRequiredError)(DRIVER_NAME, "base");
|
||||
}
|
||||
const base = (0, _nodePath.resolve)(userOptions.base);
|
||||
const ignore = (0, _anymatch.default)(userOptions.ignore || ["**/node_modules/**", "**/.git/**"]);
|
||||
const r = key => {
|
||||
if (PATH_TRAVERSE_RE.test(key)) {
|
||||
throw (0, _utils.createError)(DRIVER_NAME, `Invalid key: ${JSON.stringify(key)}. It should not contain .. segments`);
|
||||
}
|
||||
const resolved = (0, _nodePath.join)(base, key.replace(/:/g, "/"));
|
||||
return resolved;
|
||||
};
|
||||
let _watcher;
|
||||
const _unwatch = async () => {
|
||||
if (_watcher) {
|
||||
await _watcher.close();
|
||||
_watcher = void 0;
|
||||
}
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: userOptions,
|
||||
flags: {
|
||||
maxDepth: true
|
||||
},
|
||||
hasItem(key) {
|
||||
return (0, _nodeFs.existsSync)(r(key));
|
||||
},
|
||||
getItem(key) {
|
||||
return (0, _nodeFs2.readFile)(r(key), "utf8");
|
||||
},
|
||||
getItemRaw(key) {
|
||||
return (0, _nodeFs2.readFile)(r(key));
|
||||
},
|
||||
async getMeta(key) {
|
||||
const {
|
||||
atime,
|
||||
mtime,
|
||||
size,
|
||||
birthtime,
|
||||
ctime
|
||||
} = await _nodeFs.promises.stat(r(key)).catch(() => ({}));
|
||||
return {
|
||||
atime,
|
||||
mtime,
|
||||
size,
|
||||
birthtime,
|
||||
ctime
|
||||
};
|
||||
},
|
||||
setItem(key, value) {
|
||||
if (userOptions.readOnly) {
|
||||
return;
|
||||
}
|
||||
return (0, _nodeFs2.writeFile)(r(key), value, "utf8");
|
||||
},
|
||||
setItemRaw(key, value) {
|
||||
if (userOptions.readOnly) {
|
||||
return;
|
||||
}
|
||||
return (0, _nodeFs2.writeFile)(r(key), value);
|
||||
},
|
||||
removeItem(key) {
|
||||
if (userOptions.readOnly) {
|
||||
return;
|
||||
}
|
||||
return (0, _nodeFs2.unlink)(r(key));
|
||||
},
|
||||
getKeys(_base, topts) {
|
||||
return (0, _nodeFs2.readdirRecursive)(r("."), ignore, topts?.maxDepth);
|
||||
},
|
||||
async clear() {
|
||||
if (userOptions.readOnly || userOptions.noClear) {
|
||||
return;
|
||||
}
|
||||
await (0, _nodeFs2.rmRecursive)(r("."));
|
||||
},
|
||||
async dispose() {
|
||||
if (_watcher) {
|
||||
await _watcher.close();
|
||||
}
|
||||
},
|
||||
async watch(callback) {
|
||||
if (_watcher) {
|
||||
return _unwatch;
|
||||
}
|
||||
await new Promise((resolve2, reject) => {
|
||||
const watchOptions = {
|
||||
ignoreInitial: true,
|
||||
...userOptions.watchOptions
|
||||
};
|
||||
if (!watchOptions.ignored) {
|
||||
watchOptions.ignored = [];
|
||||
} else if (Array.isArray(watchOptions.ignored)) {
|
||||
watchOptions.ignored = [...watchOptions.ignored];
|
||||
} else {
|
||||
watchOptions.ignored = [watchOptions.ignored];
|
||||
}
|
||||
watchOptions.ignored.push(ignore);
|
||||
_watcher = (0, _chokidar.watch)(base, watchOptions).on("ready", () => {
|
||||
resolve2();
|
||||
}).on("error", reject).on("all", (eventName, path) => {
|
||||
path = (0, _nodePath.relative)(base, path);
|
||||
if (eventName === "change" || eventName === "add") {
|
||||
callback("update", path);
|
||||
} else if (eventName === "unlink") {
|
||||
callback("remove", path);
|
||||
}
|
||||
});
|
||||
});
|
||||
return _unwatch;
|
||||
}
|
||||
};
|
||||
});
|
10
node_modules/unstorage/drivers/fs.d.ts
generated
vendored
Normal file
10
node_modules/unstorage/drivers/fs.d.ts
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
import { type ChokidarOptions } from "chokidar";
|
||||
export interface FSStorageOptions {
|
||||
base?: string;
|
||||
ignore?: string[];
|
||||
readOnly?: boolean;
|
||||
noClear?: boolean;
|
||||
watchOptions?: ChokidarOptions;
|
||||
}
|
||||
declare const _default: (opts: FSStorageOptions | undefined) => import("..").Driver<FSStorageOptions | undefined, never>;
|
||||
export default _default;
|
122
node_modules/unstorage/drivers/fs.mjs
generated
vendored
Normal file
122
node_modules/unstorage/drivers/fs.mjs
generated
vendored
Normal file
@@ -0,0 +1,122 @@
|
||||
import { existsSync, promises as fsp } from "node:fs";
|
||||
import { resolve, relative, join } from "node:path";
|
||||
import { watch } from "chokidar";
|
||||
import anymatch from "anymatch";
|
||||
import { createError, createRequiredError, defineDriver } from "./utils/index.mjs";
|
||||
import {
|
||||
readFile,
|
||||
writeFile,
|
||||
readdirRecursive,
|
||||
rmRecursive,
|
||||
unlink
|
||||
} from "./utils/node-fs.mjs";
|
||||
const PATH_TRAVERSE_RE = /\.\.:|\.\.$/;
|
||||
const DRIVER_NAME = "fs";
|
||||
export default defineDriver((userOptions = {}) => {
|
||||
if (!userOptions.base) {
|
||||
throw createRequiredError(DRIVER_NAME, "base");
|
||||
}
|
||||
const base = resolve(userOptions.base);
|
||||
const ignore = anymatch(
|
||||
userOptions.ignore || ["**/node_modules/**", "**/.git/**"]
|
||||
);
|
||||
const r = (key) => {
|
||||
if (PATH_TRAVERSE_RE.test(key)) {
|
||||
throw createError(
|
||||
DRIVER_NAME,
|
||||
`Invalid key: ${JSON.stringify(key)}. It should not contain .. segments`
|
||||
);
|
||||
}
|
||||
const resolved = join(base, key.replace(/:/g, "/"));
|
||||
return resolved;
|
||||
};
|
||||
let _watcher;
|
||||
const _unwatch = async () => {
|
||||
if (_watcher) {
|
||||
await _watcher.close();
|
||||
_watcher = void 0;
|
||||
}
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: userOptions,
|
||||
flags: {
|
||||
maxDepth: true
|
||||
},
|
||||
hasItem(key) {
|
||||
return existsSync(r(key));
|
||||
},
|
||||
getItem(key) {
|
||||
return readFile(r(key), "utf8");
|
||||
},
|
||||
getItemRaw(key) {
|
||||
return readFile(r(key));
|
||||
},
|
||||
async getMeta(key) {
|
||||
const { atime, mtime, size, birthtime, ctime } = await fsp.stat(r(key)).catch(() => ({}));
|
||||
return { atime, mtime, size, birthtime, ctime };
|
||||
},
|
||||
setItem(key, value) {
|
||||
if (userOptions.readOnly) {
|
||||
return;
|
||||
}
|
||||
return writeFile(r(key), value, "utf8");
|
||||
},
|
||||
setItemRaw(key, value) {
|
||||
if (userOptions.readOnly) {
|
||||
return;
|
||||
}
|
||||
return writeFile(r(key), value);
|
||||
},
|
||||
removeItem(key) {
|
||||
if (userOptions.readOnly) {
|
||||
return;
|
||||
}
|
||||
return unlink(r(key));
|
||||
},
|
||||
getKeys(_base, topts) {
|
||||
return readdirRecursive(r("."), ignore, topts?.maxDepth);
|
||||
},
|
||||
async clear() {
|
||||
if (userOptions.readOnly || userOptions.noClear) {
|
||||
return;
|
||||
}
|
||||
await rmRecursive(r("."));
|
||||
},
|
||||
async dispose() {
|
||||
if (_watcher) {
|
||||
await _watcher.close();
|
||||
}
|
||||
},
|
||||
async watch(callback) {
|
||||
if (_watcher) {
|
||||
return _unwatch;
|
||||
}
|
||||
await new Promise((resolve2, reject) => {
|
||||
const watchOptions = {
|
||||
ignoreInitial: true,
|
||||
...userOptions.watchOptions
|
||||
};
|
||||
if (!watchOptions.ignored) {
|
||||
watchOptions.ignored = [];
|
||||
} else if (Array.isArray(watchOptions.ignored)) {
|
||||
watchOptions.ignored = [...watchOptions.ignored];
|
||||
} else {
|
||||
watchOptions.ignored = [watchOptions.ignored];
|
||||
}
|
||||
watchOptions.ignored.push(ignore);
|
||||
_watcher = watch(base, watchOptions).on("ready", () => {
|
||||
resolve2();
|
||||
}).on("error", reject).on("all", (eventName, path) => {
|
||||
path = relative(base, path);
|
||||
if (eventName === "change" || eventName === "add") {
|
||||
callback("update", path);
|
||||
} else if (eventName === "unlink") {
|
||||
callback("remove", path);
|
||||
}
|
||||
});
|
||||
});
|
||||
return _unwatch;
|
||||
}
|
||||
};
|
||||
});
|
114
node_modules/unstorage/drivers/github.cjs
generated
vendored
Normal file
114
node_modules/unstorage/drivers/github.cjs
generated
vendored
Normal file
@@ -0,0 +1,114 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _ofetch = require("ofetch");
|
||||
var _ufo = require("ufo");
|
||||
const defaultOptions = {
|
||||
repo: "",
|
||||
branch: "main",
|
||||
ttl: 600,
|
||||
dir: "",
|
||||
apiURL: "https://api.github.com",
|
||||
cdnURL: "https://raw.githubusercontent.com"
|
||||
};
|
||||
const DRIVER_NAME = "github";
|
||||
module.exports = (0, _utils.defineDriver)(_opts => {
|
||||
const opts = {
|
||||
...defaultOptions,
|
||||
..._opts
|
||||
};
|
||||
const rawUrl = (0, _ufo.joinURL)(opts.cdnURL, opts.repo, opts.branch, opts.dir);
|
||||
let files = {};
|
||||
let lastCheck = 0;
|
||||
let syncPromise;
|
||||
const syncFiles = async () => {
|
||||
if (!opts.repo) {
|
||||
throw (0, _utils.createRequiredError)(DRIVER_NAME, "repo");
|
||||
}
|
||||
if (lastCheck + opts.ttl * 1e3 > Date.now()) {
|
||||
return;
|
||||
}
|
||||
if (!syncPromise) {
|
||||
syncPromise = fetchFiles(opts);
|
||||
}
|
||||
files = await syncPromise;
|
||||
lastCheck = Date.now();
|
||||
syncPromise = void 0;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
async getKeys() {
|
||||
await syncFiles();
|
||||
return Object.keys(files);
|
||||
},
|
||||
async hasItem(key) {
|
||||
await syncFiles();
|
||||
return key in files;
|
||||
},
|
||||
async getItem(key) {
|
||||
await syncFiles();
|
||||
const item = files[key];
|
||||
if (!item) {
|
||||
return null;
|
||||
}
|
||||
if (!item.body) {
|
||||
try {
|
||||
item.body = await (0, _ofetch.$fetch)(key.replace(/:/g, "/"), {
|
||||
baseURL: rawUrl,
|
||||
headers: opts.token ? {
|
||||
Authorization: `token ${opts.token}`
|
||||
} : void 0
|
||||
});
|
||||
} catch (error) {
|
||||
throw (0, _utils.createError)("github", `Failed to fetch \`${JSON.stringify(key)}\``, {
|
||||
cause: error
|
||||
});
|
||||
}
|
||||
}
|
||||
return item.body;
|
||||
},
|
||||
async getMeta(key) {
|
||||
await syncFiles();
|
||||
const item = files[key];
|
||||
return item ? item.meta : null;
|
||||
}
|
||||
};
|
||||
});
|
||||
async function fetchFiles(opts) {
|
||||
const prefix = (0, _ufo.withTrailingSlash)(opts.dir).replace(/^\//, "");
|
||||
const files = {};
|
||||
try {
|
||||
const trees = await (0, _ofetch.$fetch)(`/repos/${opts.repo}/git/trees/${opts.branch}?recursive=1`, {
|
||||
baseURL: opts.apiURL,
|
||||
headers: {
|
||||
"User-Agent": "unstorage",
|
||||
...(opts.token && {
|
||||
Authorization: `token ${opts.token}`
|
||||
})
|
||||
}
|
||||
});
|
||||
for (const node of trees.tree) {
|
||||
if (node.type !== "blob" || !node.path.startsWith(prefix)) {
|
||||
continue;
|
||||
}
|
||||
const key = node.path.slice(prefix.length).replace(/\//g, ":");
|
||||
files[key] = {
|
||||
meta: {
|
||||
sha: node.sha,
|
||||
mode: node.mode,
|
||||
size: node.size
|
||||
}
|
||||
};
|
||||
}
|
||||
return files;
|
||||
} catch (error) {
|
||||
throw (0, _utils.createError)(DRIVER_NAME, "Failed to fetch git tree", {
|
||||
cause: error
|
||||
});
|
||||
}
|
||||
}
|
34
node_modules/unstorage/drivers/github.d.ts
generated
vendored
Normal file
34
node_modules/unstorage/drivers/github.d.ts
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
export interface GithubOptions {
|
||||
/**
|
||||
* The name of the repository. (e.g. `username/my-repo`)
|
||||
* Required
|
||||
*/
|
||||
repo: string;
|
||||
/**
|
||||
* The branch to fetch. (e.g. `dev`)
|
||||
* @default "main"
|
||||
*/
|
||||
branch?: string;
|
||||
/**
|
||||
* @default ""
|
||||
*/
|
||||
dir?: string;
|
||||
/**
|
||||
* @default 600
|
||||
*/
|
||||
ttl?: number;
|
||||
/**
|
||||
* Github API token (recommended)
|
||||
*/
|
||||
token?: string;
|
||||
/**
|
||||
* @default "https://api.github.com"
|
||||
*/
|
||||
apiURL?: string;
|
||||
/**
|
||||
* @default "https://raw.githubusercontent.com"
|
||||
*/
|
||||
cdnURL?: string;
|
||||
}
|
||||
declare const _default: (opts: GithubOptions) => import("..").Driver<GithubOptions, never>;
|
||||
export default _default;
|
108
node_modules/unstorage/drivers/github.mjs
generated
vendored
Normal file
108
node_modules/unstorage/drivers/github.mjs
generated
vendored
Normal file
@@ -0,0 +1,108 @@
|
||||
import { createError, createRequiredError, defineDriver } from "./utils/index.mjs";
|
||||
import { $fetch } from "ofetch";
|
||||
import { withTrailingSlash, joinURL } from "ufo";
|
||||
const defaultOptions = {
|
||||
repo: "",
|
||||
branch: "main",
|
||||
ttl: 600,
|
||||
dir: "",
|
||||
apiURL: "https://api.github.com",
|
||||
cdnURL: "https://raw.githubusercontent.com"
|
||||
};
|
||||
const DRIVER_NAME = "github";
|
||||
export default defineDriver((_opts) => {
|
||||
const opts = { ...defaultOptions, ..._opts };
|
||||
const rawUrl = joinURL(opts.cdnURL, opts.repo, opts.branch, opts.dir);
|
||||
let files = {};
|
||||
let lastCheck = 0;
|
||||
let syncPromise;
|
||||
const syncFiles = async () => {
|
||||
if (!opts.repo) {
|
||||
throw createRequiredError(DRIVER_NAME, "repo");
|
||||
}
|
||||
if (lastCheck + opts.ttl * 1e3 > Date.now()) {
|
||||
return;
|
||||
}
|
||||
if (!syncPromise) {
|
||||
syncPromise = fetchFiles(opts);
|
||||
}
|
||||
files = await syncPromise;
|
||||
lastCheck = Date.now();
|
||||
syncPromise = void 0;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
async getKeys() {
|
||||
await syncFiles();
|
||||
return Object.keys(files);
|
||||
},
|
||||
async hasItem(key) {
|
||||
await syncFiles();
|
||||
return key in files;
|
||||
},
|
||||
async getItem(key) {
|
||||
await syncFiles();
|
||||
const item = files[key];
|
||||
if (!item) {
|
||||
return null;
|
||||
}
|
||||
if (!item.body) {
|
||||
try {
|
||||
item.body = await $fetch(key.replace(/:/g, "/"), {
|
||||
baseURL: rawUrl,
|
||||
headers: opts.token ? {
|
||||
Authorization: `token ${opts.token}`
|
||||
} : void 0
|
||||
});
|
||||
} catch (error) {
|
||||
throw createError(
|
||||
"github",
|
||||
`Failed to fetch \`${JSON.stringify(key)}\``,
|
||||
{ cause: error }
|
||||
);
|
||||
}
|
||||
}
|
||||
return item.body;
|
||||
},
|
||||
async getMeta(key) {
|
||||
await syncFiles();
|
||||
const item = files[key];
|
||||
return item ? item.meta : null;
|
||||
}
|
||||
};
|
||||
});
|
||||
async function fetchFiles(opts) {
|
||||
const prefix = withTrailingSlash(opts.dir).replace(/^\//, "");
|
||||
const files = {};
|
||||
try {
|
||||
const trees = await $fetch(
|
||||
`/repos/${opts.repo}/git/trees/${opts.branch}?recursive=1`,
|
||||
{
|
||||
baseURL: opts.apiURL,
|
||||
headers: {
|
||||
"User-Agent": "unstorage",
|
||||
...opts.token && { Authorization: `token ${opts.token}` }
|
||||
}
|
||||
}
|
||||
);
|
||||
for (const node of trees.tree) {
|
||||
if (node.type !== "blob" || !node.path.startsWith(prefix)) {
|
||||
continue;
|
||||
}
|
||||
const key = node.path.slice(prefix.length).replace(/\//g, ":");
|
||||
files[key] = {
|
||||
meta: {
|
||||
sha: node.sha,
|
||||
mode: node.mode,
|
||||
size: node.size
|
||||
}
|
||||
};
|
||||
}
|
||||
return files;
|
||||
} catch (error) {
|
||||
throw createError(DRIVER_NAME, "Failed to fetch git tree", {
|
||||
cause: error
|
||||
});
|
||||
}
|
||||
}
|
111
node_modules/unstorage/drivers/http.cjs
generated
vendored
Normal file
111
node_modules/unstorage/drivers/http.cjs
generated
vendored
Normal file
@@ -0,0 +1,111 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _ofetch = require("ofetch");
|
||||
var _ufo = require("ufo");
|
||||
const DRIVER_NAME = "http";
|
||||
module.exports = (0, _utils.defineDriver)(opts => {
|
||||
const r = (key = "") => (0, _ufo.joinURL)(opts.base, key.replace(/:/g, "/"));
|
||||
const rBase = (key = "") => (0, _ufo.joinURL)(opts.base, (key || "/").replace(/:/g, "/"), ":");
|
||||
const catchFetchError = (error, fallbackVal = null) => {
|
||||
if (error?.response?.status === 404) {
|
||||
return fallbackVal;
|
||||
}
|
||||
throw error;
|
||||
};
|
||||
const getHeaders = (topts, defaultHeaders) => {
|
||||
const headers = {
|
||||
...defaultHeaders,
|
||||
...opts.headers,
|
||||
...topts?.headers
|
||||
};
|
||||
if (topts?.ttl && !headers["x-ttl"]) {
|
||||
headers["x-ttl"] = topts.ttl + "";
|
||||
}
|
||||
return headers;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
hasItem(key, topts) {
|
||||
return (0, _ofetch.$fetch)(r(key), {
|
||||
method: "HEAD",
|
||||
headers: getHeaders(topts)
|
||||
}).then(() => true).catch(err => catchFetchError(err, false));
|
||||
},
|
||||
async getItem(key, tops) {
|
||||
const value = await (0, _ofetch.$fetch)(r(key), {
|
||||
headers: getHeaders(tops)
|
||||
}).catch(catchFetchError);
|
||||
return value;
|
||||
},
|
||||
async getItemRaw(key, topts) {
|
||||
const response = await _ofetch.$fetch.raw(r(key), {
|
||||
responseType: "arrayBuffer",
|
||||
headers: getHeaders(topts, {
|
||||
accept: "application/octet-stream"
|
||||
})
|
||||
}).catch(catchFetchError);
|
||||
return response._data;
|
||||
},
|
||||
async getMeta(key, topts) {
|
||||
const res = await _ofetch.$fetch.raw(r(key), {
|
||||
method: "HEAD",
|
||||
headers: getHeaders(topts)
|
||||
});
|
||||
let mtime = void 0;
|
||||
let ttl = void 0;
|
||||
const _lastModified = res.headers.get("last-modified");
|
||||
if (_lastModified) {
|
||||
mtime = new Date(_lastModified);
|
||||
}
|
||||
const _ttl = res.headers.get("x-ttl");
|
||||
if (_ttl) {
|
||||
ttl = Number.parseInt(_ttl, 10);
|
||||
}
|
||||
return {
|
||||
status: res.status,
|
||||
mtime,
|
||||
ttl
|
||||
};
|
||||
},
|
||||
async setItem(key, value, topts) {
|
||||
await (0, _ofetch.$fetch)(r(key), {
|
||||
method: "PUT",
|
||||
body: value,
|
||||
headers: getHeaders(topts)
|
||||
});
|
||||
},
|
||||
async setItemRaw(key, value, topts) {
|
||||
await (0, _ofetch.$fetch)(r(key), {
|
||||
method: "PUT",
|
||||
body: value,
|
||||
headers: getHeaders(topts, {
|
||||
"content-type": "application/octet-stream"
|
||||
})
|
||||
});
|
||||
},
|
||||
async removeItem(key, topts) {
|
||||
await (0, _ofetch.$fetch)(r(key), {
|
||||
method: "DELETE",
|
||||
headers: getHeaders(topts)
|
||||
});
|
||||
},
|
||||
async getKeys(base, topts) {
|
||||
const value = await (0, _ofetch.$fetch)(rBase(base), {
|
||||
headers: getHeaders(topts)
|
||||
});
|
||||
return Array.isArray(value) ? value : [];
|
||||
},
|
||||
async clear(base, topts) {
|
||||
await (0, _ofetch.$fetch)(rBase(base), {
|
||||
method: "DELETE",
|
||||
headers: getHeaders(topts)
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
6
node_modules/unstorage/drivers/http.d.ts
generated
vendored
Normal file
6
node_modules/unstorage/drivers/http.d.ts
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
export interface HTTPOptions {
|
||||
base: string;
|
||||
headers?: Record<string, string>;
|
||||
}
|
||||
declare const _default: (opts: HTTPOptions) => import("..").Driver<HTTPOptions, never>;
|
||||
export default _default;
|
103
node_modules/unstorage/drivers/http.mjs
generated
vendored
Normal file
103
node_modules/unstorage/drivers/http.mjs
generated
vendored
Normal file
@@ -0,0 +1,103 @@
|
||||
import { defineDriver } from "./utils/index.mjs";
|
||||
import { $fetch as _fetch } from "ofetch";
|
||||
import { joinURL } from "ufo";
|
||||
const DRIVER_NAME = "http";
|
||||
export default defineDriver((opts) => {
|
||||
const r = (key = "") => joinURL(opts.base, key.replace(/:/g, "/"));
|
||||
const rBase = (key = "") => joinURL(opts.base, (key || "/").replace(/:/g, "/"), ":");
|
||||
const catchFetchError = (error, fallbackVal = null) => {
|
||||
if (error?.response?.status === 404) {
|
||||
return fallbackVal;
|
||||
}
|
||||
throw error;
|
||||
};
|
||||
const getHeaders = (topts, defaultHeaders) => {
|
||||
const headers = {
|
||||
...defaultHeaders,
|
||||
...opts.headers,
|
||||
...topts?.headers
|
||||
};
|
||||
if (topts?.ttl && !headers["x-ttl"]) {
|
||||
headers["x-ttl"] = topts.ttl + "";
|
||||
}
|
||||
return headers;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
hasItem(key, topts) {
|
||||
return _fetch(r(key), {
|
||||
method: "HEAD",
|
||||
headers: getHeaders(topts)
|
||||
}).then(() => true).catch((err) => catchFetchError(err, false));
|
||||
},
|
||||
async getItem(key, tops) {
|
||||
const value = await _fetch(r(key), {
|
||||
headers: getHeaders(tops)
|
||||
}).catch(catchFetchError);
|
||||
return value;
|
||||
},
|
||||
async getItemRaw(key, topts) {
|
||||
const response = await _fetch.raw(r(key), {
|
||||
responseType: "arrayBuffer",
|
||||
headers: getHeaders(topts, { accept: "application/octet-stream" })
|
||||
}).catch(catchFetchError);
|
||||
return response._data;
|
||||
},
|
||||
async getMeta(key, topts) {
|
||||
const res = await _fetch.raw(r(key), {
|
||||
method: "HEAD",
|
||||
headers: getHeaders(topts)
|
||||
});
|
||||
let mtime = void 0;
|
||||
let ttl = void 0;
|
||||
const _lastModified = res.headers.get("last-modified");
|
||||
if (_lastModified) {
|
||||
mtime = new Date(_lastModified);
|
||||
}
|
||||
const _ttl = res.headers.get("x-ttl");
|
||||
if (_ttl) {
|
||||
ttl = Number.parseInt(_ttl, 10);
|
||||
}
|
||||
return {
|
||||
status: res.status,
|
||||
mtime,
|
||||
ttl
|
||||
};
|
||||
},
|
||||
async setItem(key, value, topts) {
|
||||
await _fetch(r(key), {
|
||||
method: "PUT",
|
||||
body: value,
|
||||
headers: getHeaders(topts)
|
||||
});
|
||||
},
|
||||
async setItemRaw(key, value, topts) {
|
||||
await _fetch(r(key), {
|
||||
method: "PUT",
|
||||
body: value,
|
||||
headers: getHeaders(topts, {
|
||||
"content-type": "application/octet-stream"
|
||||
})
|
||||
});
|
||||
},
|
||||
async removeItem(key, topts) {
|
||||
await _fetch(r(key), {
|
||||
method: "DELETE",
|
||||
headers: getHeaders(topts)
|
||||
});
|
||||
},
|
||||
async getKeys(base, topts) {
|
||||
const value = await _fetch(rBase(base), {
|
||||
headers: getHeaders(topts)
|
||||
});
|
||||
return Array.isArray(value) ? value : [];
|
||||
},
|
||||
async clear(base, topts) {
|
||||
await _fetch(rBase(base), {
|
||||
method: "DELETE",
|
||||
headers: getHeaders(topts)
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
48
node_modules/unstorage/drivers/indexedb.cjs
generated
vendored
Normal file
48
node_modules/unstorage/drivers/indexedb.cjs
generated
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _idbKeyval = require("idb-keyval");
|
||||
const DRIVER_NAME = "idb-keyval";
|
||||
module.exports = (0, _utils.defineDriver)((opts = {}) => {
|
||||
const base = opts.base && opts.base.length > 0 ? `${opts.base}:` : "";
|
||||
const makeKey = key => base + key;
|
||||
let customStore;
|
||||
if (opts.dbName && opts.storeName) {
|
||||
customStore = (0, _idbKeyval.createStore)(opts.dbName, opts.storeName);
|
||||
}
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
async hasItem(key) {
|
||||
const item = await (0, _idbKeyval.get)(makeKey(key), customStore);
|
||||
return item === void 0 ? false : true;
|
||||
},
|
||||
async getItem(key) {
|
||||
const item = await (0, _idbKeyval.get)(makeKey(key), customStore);
|
||||
return item ?? null;
|
||||
},
|
||||
async getItemRaw(key) {
|
||||
const item = await (0, _idbKeyval.get)(makeKey(key), customStore);
|
||||
return item ?? null;
|
||||
},
|
||||
setItem(key, value) {
|
||||
return (0, _idbKeyval.set)(makeKey(key), value, customStore);
|
||||
},
|
||||
setItemRaw(key, value) {
|
||||
return (0, _idbKeyval.set)(makeKey(key), value, customStore);
|
||||
},
|
||||
removeItem(key) {
|
||||
return (0, _idbKeyval.del)(makeKey(key), customStore);
|
||||
},
|
||||
getKeys() {
|
||||
return (0, _idbKeyval.keys)(customStore);
|
||||
},
|
||||
clear() {
|
||||
return (0, _idbKeyval.clear)(customStore);
|
||||
}
|
||||
};
|
||||
});
|
7
node_modules/unstorage/drivers/indexedb.d.ts
generated
vendored
Normal file
7
node_modules/unstorage/drivers/indexedb.d.ts
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
export interface IDBKeyvalOptions {
|
||||
base?: string;
|
||||
dbName?: string;
|
||||
storeName?: string;
|
||||
}
|
||||
declare const _default: (opts: IDBKeyvalOptions | undefined) => import("..").Driver<IDBKeyvalOptions | undefined, never>;
|
||||
export default _default;
|
49
node_modules/unstorage/drivers/indexedb.mjs
generated
vendored
Normal file
49
node_modules/unstorage/drivers/indexedb.mjs
generated
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
import { defineDriver } from "./utils/index.mjs";
|
||||
import {
|
||||
get,
|
||||
set,
|
||||
clear,
|
||||
del,
|
||||
keys,
|
||||
createStore
|
||||
} from "idb-keyval";
|
||||
const DRIVER_NAME = "idb-keyval";
|
||||
export default defineDriver((opts = {}) => {
|
||||
const base = opts.base && opts.base.length > 0 ? `${opts.base}:` : "";
|
||||
const makeKey = (key) => base + key;
|
||||
let customStore;
|
||||
if (opts.dbName && opts.storeName) {
|
||||
customStore = createStore(opts.dbName, opts.storeName);
|
||||
}
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
async hasItem(key) {
|
||||
const item = await get(makeKey(key), customStore);
|
||||
return item === void 0 ? false : true;
|
||||
},
|
||||
async getItem(key) {
|
||||
const item = await get(makeKey(key), customStore);
|
||||
return item ?? null;
|
||||
},
|
||||
async getItemRaw(key) {
|
||||
const item = await get(makeKey(key), customStore);
|
||||
return item ?? null;
|
||||
},
|
||||
setItem(key, value) {
|
||||
return set(makeKey(key), value, customStore);
|
||||
},
|
||||
setItemRaw(key, value) {
|
||||
return set(makeKey(key), value, customStore);
|
||||
},
|
||||
removeItem(key) {
|
||||
return del(makeKey(key), customStore);
|
||||
},
|
||||
getKeys() {
|
||||
return keys(customStore);
|
||||
},
|
||||
clear() {
|
||||
return clear(customStore);
|
||||
}
|
||||
};
|
||||
});
|
73
node_modules/unstorage/drivers/localstorage.cjs
generated
vendored
Normal file
73
node_modules/unstorage/drivers/localstorage.cjs
generated
vendored
Normal file
@@ -0,0 +1,73 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
const DRIVER_NAME = "localstorage";
|
||||
module.exports = (0, _utils.defineDriver)((opts = {}) => {
|
||||
const storage = opts.storage || opts.localStorage || opts.sessionStorage || (opts.window || globalThis.window)?.[opts.windowKey || "localStorage"];
|
||||
if (!storage) {
|
||||
throw (0, _utils.createRequiredError)(DRIVER_NAME, "localStorage");
|
||||
}
|
||||
const base = opts.base ? (0, _utils.normalizeKey)(opts.base) : "";
|
||||
const r = key => (base ? `${base}:` : "") + key;
|
||||
let _storageListener;
|
||||
const _unwatch = () => {
|
||||
if (_storageListener) {
|
||||
opts.window?.removeEventListener("storage", _storageListener);
|
||||
}
|
||||
_storageListener = void 0;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: () => storage,
|
||||
hasItem(key) {
|
||||
return Object.prototype.hasOwnProperty.call(storage, r(key));
|
||||
},
|
||||
getItem(key) {
|
||||
return storage.getItem(r(key));
|
||||
},
|
||||
setItem(key, value) {
|
||||
return storage.setItem(r(key), value);
|
||||
},
|
||||
removeItem(key) {
|
||||
return storage.removeItem(r(key));
|
||||
},
|
||||
getKeys() {
|
||||
const allKeys = Object.keys(storage);
|
||||
return base ? allKeys.filter(key => key.startsWith(`${base}:`)).map(key => key.slice(base.length + 1)) : allKeys;
|
||||
},
|
||||
clear(prefix) {
|
||||
const _base = [base, prefix].filter(Boolean).join(":");
|
||||
if (_base) {
|
||||
for (const key of Object.keys(storage)) {
|
||||
if (key.startsWith(`${_base}:`)) {
|
||||
storage?.removeItem(key);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
storage.clear();
|
||||
}
|
||||
},
|
||||
dispose() {
|
||||
if (opts.window && _storageListener) {
|
||||
opts.window.removeEventListener("storage", _storageListener);
|
||||
}
|
||||
},
|
||||
watch(callback) {
|
||||
if (!opts.window) {
|
||||
return _unwatch;
|
||||
}
|
||||
_storageListener = ev => {
|
||||
if (ev.key) {
|
||||
callback(ev.newValue ? "update" : "remove", ev.key);
|
||||
}
|
||||
};
|
||||
opts.window.addEventListener("storage", _storageListener);
|
||||
return _unwatch;
|
||||
}
|
||||
};
|
||||
});
|
12
node_modules/unstorage/drivers/localstorage.d.ts
generated
vendored
Normal file
12
node_modules/unstorage/drivers/localstorage.d.ts
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
export interface LocalStorageOptions {
|
||||
base?: string;
|
||||
window?: typeof window;
|
||||
windowKey?: "localStorage" | "sessionStorage";
|
||||
storage?: typeof window.localStorage | typeof window.sessionStorage;
|
||||
/** @deprecated use `storage` option */
|
||||
sessionStorage?: typeof window.sessionStorage;
|
||||
/** @deprecated use `storage` option */
|
||||
localStorage?: typeof window.localStorage;
|
||||
}
|
||||
declare const _default: (opts: LocalStorageOptions | undefined) => import("..").Driver<LocalStorageOptions | undefined, Storage>;
|
||||
export default _default;
|
67
node_modules/unstorage/drivers/localstorage.mjs
generated
vendored
Normal file
67
node_modules/unstorage/drivers/localstorage.mjs
generated
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
import { createRequiredError, defineDriver, normalizeKey } from "./utils/index.mjs";
|
||||
const DRIVER_NAME = "localstorage";
|
||||
export default defineDriver((opts = {}) => {
|
||||
const storage = opts.storage || opts.localStorage || opts.sessionStorage || (opts.window || globalThis.window)?.[opts.windowKey || "localStorage"];
|
||||
if (!storage) {
|
||||
throw createRequiredError(DRIVER_NAME, "localStorage");
|
||||
}
|
||||
const base = opts.base ? normalizeKey(opts.base) : "";
|
||||
const r = (key) => (base ? `${base}:` : "") + key;
|
||||
let _storageListener;
|
||||
const _unwatch = () => {
|
||||
if (_storageListener) {
|
||||
opts.window?.removeEventListener("storage", _storageListener);
|
||||
}
|
||||
_storageListener = void 0;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: () => storage,
|
||||
hasItem(key) {
|
||||
return Object.prototype.hasOwnProperty.call(storage, r(key));
|
||||
},
|
||||
getItem(key) {
|
||||
return storage.getItem(r(key));
|
||||
},
|
||||
setItem(key, value) {
|
||||
return storage.setItem(r(key), value);
|
||||
},
|
||||
removeItem(key) {
|
||||
return storage.removeItem(r(key));
|
||||
},
|
||||
getKeys() {
|
||||
const allKeys = Object.keys(storage);
|
||||
return base ? allKeys.filter((key) => key.startsWith(`${base}:`)).map((key) => key.slice(base.length + 1)) : allKeys;
|
||||
},
|
||||
clear(prefix) {
|
||||
const _base = [base, prefix].filter(Boolean).join(":");
|
||||
if (_base) {
|
||||
for (const key of Object.keys(storage)) {
|
||||
if (key.startsWith(`${_base}:`)) {
|
||||
storage?.removeItem(key);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
storage.clear();
|
||||
}
|
||||
},
|
||||
dispose() {
|
||||
if (opts.window && _storageListener) {
|
||||
opts.window.removeEventListener("storage", _storageListener);
|
||||
}
|
||||
},
|
||||
watch(callback) {
|
||||
if (!opts.window) {
|
||||
return _unwatch;
|
||||
}
|
||||
_storageListener = (ev) => {
|
||||
if (ev.key) {
|
||||
callback(ev.newValue ? "update" : "remove", ev.key);
|
||||
}
|
||||
};
|
||||
opts.window.addEventListener("storage", _storageListener);
|
||||
return _unwatch;
|
||||
}
|
||||
};
|
||||
});
|
61
node_modules/unstorage/drivers/lru-cache.cjs
generated
vendored
Normal file
61
node_modules/unstorage/drivers/lru-cache.cjs
generated
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _lruCache = require("lru-cache");
|
||||
const DRIVER_NAME = "lru-cache";
|
||||
module.exports = (0, _utils.defineDriver)((opts = {}) => {
|
||||
const cache = new _lruCache.LRUCache({
|
||||
max: 1e3,
|
||||
sizeCalculation: opts.maxSize || opts.maxEntrySize ? (value, key) => {
|
||||
return key.length + byteLength(value);
|
||||
} : void 0,
|
||||
...opts
|
||||
});
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: () => cache,
|
||||
hasItem(key) {
|
||||
return cache.has(key);
|
||||
},
|
||||
getItem(key) {
|
||||
return cache.get(key) ?? null;
|
||||
},
|
||||
getItemRaw(key) {
|
||||
return cache.get(key) ?? null;
|
||||
},
|
||||
setItem(key, value) {
|
||||
cache.set(key, value);
|
||||
},
|
||||
setItemRaw(key, value) {
|
||||
cache.set(key, value);
|
||||
},
|
||||
removeItem(key) {
|
||||
cache.delete(key);
|
||||
},
|
||||
getKeys() {
|
||||
return [...cache.keys()];
|
||||
},
|
||||
clear() {
|
||||
cache.clear();
|
||||
},
|
||||
dispose() {
|
||||
cache.clear();
|
||||
}
|
||||
};
|
||||
});
|
||||
function byteLength(value) {
|
||||
if (typeof Buffer !== "undefined") {
|
||||
try {
|
||||
return Buffer.byteLength(value);
|
||||
} catch {}
|
||||
}
|
||||
try {
|
||||
return typeof value === "string" ? value.length : JSON.stringify(value).length;
|
||||
} catch {}
|
||||
return 0;
|
||||
}
|
6
node_modules/unstorage/drivers/lru-cache.d.ts
generated
vendored
Normal file
6
node_modules/unstorage/drivers/lru-cache.d.ts
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
import { LRUCache } from "lru-cache";
|
||||
type LRUCacheOptions = LRUCache.OptionsBase<string, any, any> & Partial<LRUCache.OptionsMaxLimit<string, any, any>> & Partial<LRUCache.OptionsSizeLimit<string, any, any>> & Partial<LRUCache.OptionsTTLLimit<string, any, any>>;
|
||||
export interface LRUDriverOptions extends LRUCacheOptions {
|
||||
}
|
||||
declare const _default: (opts: LRUDriverOptions | undefined) => import("..").Driver<LRUDriverOptions | undefined, LRUCache<string, any, any>>;
|
||||
export default _default;
|
57
node_modules/unstorage/drivers/lru-cache.mjs
generated
vendored
Normal file
57
node_modules/unstorage/drivers/lru-cache.mjs
generated
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
import { defineDriver } from "./utils/index.mjs";
|
||||
import { LRUCache } from "lru-cache";
|
||||
const DRIVER_NAME = "lru-cache";
|
||||
export default defineDriver((opts = {}) => {
|
||||
const cache = new LRUCache({
|
||||
max: 1e3,
|
||||
sizeCalculation: opts.maxSize || opts.maxEntrySize ? (value, key) => {
|
||||
return key.length + byteLength(value);
|
||||
} : void 0,
|
||||
...opts
|
||||
});
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: () => cache,
|
||||
hasItem(key) {
|
||||
return cache.has(key);
|
||||
},
|
||||
getItem(key) {
|
||||
return cache.get(key) ?? null;
|
||||
},
|
||||
getItemRaw(key) {
|
||||
return cache.get(key) ?? null;
|
||||
},
|
||||
setItem(key, value) {
|
||||
cache.set(key, value);
|
||||
},
|
||||
setItemRaw(key, value) {
|
||||
cache.set(key, value);
|
||||
},
|
||||
removeItem(key) {
|
||||
cache.delete(key);
|
||||
},
|
||||
getKeys() {
|
||||
return [...cache.keys()];
|
||||
},
|
||||
clear() {
|
||||
cache.clear();
|
||||
},
|
||||
dispose() {
|
||||
cache.clear();
|
||||
}
|
||||
};
|
||||
});
|
||||
function byteLength(value) {
|
||||
if (typeof Buffer !== "undefined") {
|
||||
try {
|
||||
return Buffer.byteLength(value);
|
||||
} catch {
|
||||
}
|
||||
}
|
||||
try {
|
||||
return typeof value === "string" ? value.length : JSON.stringify(value).length;
|
||||
} catch {
|
||||
}
|
||||
return 0;
|
||||
}
|
42
node_modules/unstorage/drivers/memory.cjs
generated
vendored
Normal file
42
node_modules/unstorage/drivers/memory.cjs
generated
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
const DRIVER_NAME = "memory";
|
||||
module.exports = (0, _utils.defineDriver)(() => {
|
||||
const data = /* @__PURE__ */new Map();
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
getInstance: () => data,
|
||||
hasItem(key) {
|
||||
return data.has(key);
|
||||
},
|
||||
getItem(key) {
|
||||
return data.get(key) ?? null;
|
||||
},
|
||||
getItemRaw(key) {
|
||||
return data.get(key) ?? null;
|
||||
},
|
||||
setItem(key, value) {
|
||||
data.set(key, value);
|
||||
},
|
||||
setItemRaw(key, value) {
|
||||
data.set(key, value);
|
||||
},
|
||||
removeItem(key) {
|
||||
data.delete(key);
|
||||
},
|
||||
getKeys() {
|
||||
return [...data.keys()];
|
||||
},
|
||||
clear() {
|
||||
data.clear();
|
||||
},
|
||||
dispose() {
|
||||
data.clear();
|
||||
}
|
||||
};
|
||||
});
|
2
node_modules/unstorage/drivers/memory.d.ts
generated
vendored
Normal file
2
node_modules/unstorage/drivers/memory.d.ts
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
declare const _default: (opts: void) => import("..").Driver<void, Map<string, any>>;
|
||||
export default _default;
|
36
node_modules/unstorage/drivers/memory.mjs
generated
vendored
Normal file
36
node_modules/unstorage/drivers/memory.mjs
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
import { defineDriver } from "./utils/index.mjs";
|
||||
const DRIVER_NAME = "memory";
|
||||
export default defineDriver(() => {
|
||||
const data = /* @__PURE__ */ new Map();
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
getInstance: () => data,
|
||||
hasItem(key) {
|
||||
return data.has(key);
|
||||
},
|
||||
getItem(key) {
|
||||
return data.get(key) ?? null;
|
||||
},
|
||||
getItemRaw(key) {
|
||||
return data.get(key) ?? null;
|
||||
},
|
||||
setItem(key, value) {
|
||||
data.set(key, value);
|
||||
},
|
||||
setItemRaw(key, value) {
|
||||
data.set(key, value);
|
||||
},
|
||||
removeItem(key) {
|
||||
data.delete(key);
|
||||
},
|
||||
getKeys() {
|
||||
return [...data.keys()];
|
||||
},
|
||||
clear() {
|
||||
data.clear();
|
||||
},
|
||||
dispose() {
|
||||
data.clear();
|
||||
}
|
||||
};
|
||||
});
|
119
node_modules/unstorage/drivers/mongodb.cjs
generated
vendored
Normal file
119
node_modules/unstorage/drivers/mongodb.cjs
generated
vendored
Normal file
@@ -0,0 +1,119 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _mongodb = require("mongodb");
|
||||
const DRIVER_NAME = "mongodb";
|
||||
module.exports = (0, _utils.defineDriver)(opts => {
|
||||
let collection;
|
||||
const getMongoCollection = () => {
|
||||
if (!collection) {
|
||||
if (!opts.connectionString) {
|
||||
throw (0, _utils.createRequiredError)(DRIVER_NAME, "connectionString");
|
||||
}
|
||||
const mongoClient = new _mongodb.MongoClient(opts.connectionString, opts.clientOptions);
|
||||
const db = mongoClient.db(opts.databaseName || "unstorage");
|
||||
collection = db.collection(opts.collectionName || "unstorage");
|
||||
}
|
||||
return collection;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: getMongoCollection,
|
||||
async hasItem(key) {
|
||||
const result = await getMongoCollection().findOne({
|
||||
key
|
||||
});
|
||||
return !!result;
|
||||
},
|
||||
async getItem(key) {
|
||||
const document = await getMongoCollection().findOne({
|
||||
key
|
||||
});
|
||||
return document?.value ?? null;
|
||||
},
|
||||
async getItems(items) {
|
||||
const keys = items.map(item => item.key);
|
||||
const result = await getMongoCollection().find({
|
||||
key: {
|
||||
$in: keys
|
||||
}
|
||||
}).toArray();
|
||||
const resultMap = new Map(result.map(doc => [doc.key, doc]));
|
||||
return keys.map(key => {
|
||||
return {
|
||||
key,
|
||||
value: resultMap.get(key)?.value ?? null
|
||||
};
|
||||
});
|
||||
},
|
||||
async setItem(key, value) {
|
||||
const currentDateTime = /* @__PURE__ */new Date();
|
||||
await getMongoCollection().updateOne({
|
||||
key
|
||||
}, {
|
||||
$set: {
|
||||
key,
|
||||
value,
|
||||
modifiedAt: currentDateTime
|
||||
},
|
||||
$setOnInsert: {
|
||||
createdAt: currentDateTime
|
||||
}
|
||||
}, {
|
||||
upsert: true
|
||||
});
|
||||
},
|
||||
async setItems(items) {
|
||||
const currentDateTime = /* @__PURE__ */new Date();
|
||||
const operations = items.map(({
|
||||
key,
|
||||
value
|
||||
}) => ({
|
||||
updateOne: {
|
||||
filter: {
|
||||
key
|
||||
},
|
||||
update: {
|
||||
$set: {
|
||||
key,
|
||||
value,
|
||||
modifiedAt: currentDateTime
|
||||
},
|
||||
$setOnInsert: {
|
||||
createdAt: currentDateTime
|
||||
}
|
||||
},
|
||||
upsert: true
|
||||
}
|
||||
}));
|
||||
await getMongoCollection().bulkWrite(operations);
|
||||
},
|
||||
async removeItem(key) {
|
||||
await getMongoCollection().deleteOne({
|
||||
key
|
||||
});
|
||||
},
|
||||
async getKeys() {
|
||||
return await getMongoCollection().find().project({
|
||||
key: true
|
||||
}).map(d => d.key).toArray();
|
||||
},
|
||||
async getMeta(key) {
|
||||
const document = await getMongoCollection().findOne({
|
||||
key
|
||||
});
|
||||
return document ? {
|
||||
mtime: document.modifiedAt,
|
||||
birthtime: document.createdAt
|
||||
} : {};
|
||||
},
|
||||
async clear() {
|
||||
await getMongoCollection().deleteMany({});
|
||||
}
|
||||
};
|
||||
});
|
23
node_modules/unstorage/drivers/mongodb.d.ts
generated
vendored
Normal file
23
node_modules/unstorage/drivers/mongodb.d.ts
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
import { type Collection, type MongoClientOptions } from "mongodb";
|
||||
export interface MongoDbOptions {
|
||||
/**
|
||||
* The MongoDB connection string.
|
||||
*/
|
||||
connectionString: string;
|
||||
/**
|
||||
* Optional configuration settings for the MongoClient instance.
|
||||
*/
|
||||
clientOptions?: MongoClientOptions;
|
||||
/**
|
||||
* The name of the database to use.
|
||||
* @default "unstorage"
|
||||
*/
|
||||
databaseName?: string;
|
||||
/**
|
||||
* The name of the collection to use.
|
||||
* @default "unstorage"
|
||||
*/
|
||||
collectionName?: string;
|
||||
}
|
||||
declare const _default: (opts: MongoDbOptions) => import("..").Driver<MongoDbOptions, Collection<import("mongodb").Document>>;
|
||||
export default _default;
|
82
node_modules/unstorage/drivers/mongodb.mjs
generated
vendored
Normal file
82
node_modules/unstorage/drivers/mongodb.mjs
generated
vendored
Normal file
@@ -0,0 +1,82 @@
|
||||
import { createRequiredError, defineDriver } from "./utils/index.mjs";
|
||||
import { MongoClient } from "mongodb";
|
||||
const DRIVER_NAME = "mongodb";
|
||||
export default defineDriver((opts) => {
|
||||
let collection;
|
||||
const getMongoCollection = () => {
|
||||
if (!collection) {
|
||||
if (!opts.connectionString) {
|
||||
throw createRequiredError(DRIVER_NAME, "connectionString");
|
||||
}
|
||||
const mongoClient = new MongoClient(
|
||||
opts.connectionString,
|
||||
opts.clientOptions
|
||||
);
|
||||
const db = mongoClient.db(opts.databaseName || "unstorage");
|
||||
collection = db.collection(opts.collectionName || "unstorage");
|
||||
}
|
||||
return collection;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: getMongoCollection,
|
||||
async hasItem(key) {
|
||||
const result = await getMongoCollection().findOne({ key });
|
||||
return !!result;
|
||||
},
|
||||
async getItem(key) {
|
||||
const document = await getMongoCollection().findOne({ key });
|
||||
return document?.value ?? null;
|
||||
},
|
||||
async getItems(items) {
|
||||
const keys = items.map((item) => item.key);
|
||||
const result = await getMongoCollection().find({ key: { $in: keys } }).toArray();
|
||||
const resultMap = new Map(result.map((doc) => [doc.key, doc]));
|
||||
return keys.map((key) => {
|
||||
return { key, value: resultMap.get(key)?.value ?? null };
|
||||
});
|
||||
},
|
||||
async setItem(key, value) {
|
||||
const currentDateTime = /* @__PURE__ */ new Date();
|
||||
await getMongoCollection().updateOne(
|
||||
{ key },
|
||||
{
|
||||
$set: { key, value, modifiedAt: currentDateTime },
|
||||
$setOnInsert: { createdAt: currentDateTime }
|
||||
},
|
||||
{ upsert: true }
|
||||
);
|
||||
},
|
||||
async setItems(items) {
|
||||
const currentDateTime = /* @__PURE__ */ new Date();
|
||||
const operations = items.map(({ key, value }) => ({
|
||||
updateOne: {
|
||||
filter: { key },
|
||||
update: {
|
||||
$set: { key, value, modifiedAt: currentDateTime },
|
||||
$setOnInsert: { createdAt: currentDateTime }
|
||||
},
|
||||
upsert: true
|
||||
}
|
||||
}));
|
||||
await getMongoCollection().bulkWrite(operations);
|
||||
},
|
||||
async removeItem(key) {
|
||||
await getMongoCollection().deleteOne({ key });
|
||||
},
|
||||
async getKeys() {
|
||||
return await getMongoCollection().find().project({ key: true }).map((d) => d.key).toArray();
|
||||
},
|
||||
async getMeta(key) {
|
||||
const document = await getMongoCollection().findOne({ key });
|
||||
return document ? {
|
||||
mtime: document.modifiedAt,
|
||||
birthtime: document.createdAt
|
||||
} : {};
|
||||
},
|
||||
async clear() {
|
||||
await getMongoCollection().deleteMany({});
|
||||
}
|
||||
};
|
||||
});
|
81
node_modules/unstorage/drivers/netlify-blobs.cjs
generated
vendored
Normal file
81
node_modules/unstorage/drivers/netlify-blobs.cjs
generated
vendored
Normal file
@@ -0,0 +1,81 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _blobs = require("@netlify/blobs");
|
||||
var _ofetch = require("ofetch");
|
||||
const DRIVER_NAME = "netlify-blobs";
|
||||
module.exports = (0, _utils.defineDriver)(options => {
|
||||
const {
|
||||
deployScoped,
|
||||
name,
|
||||
...opts
|
||||
} = options;
|
||||
let store;
|
||||
const getClient = () => {
|
||||
if (!store) {
|
||||
if (deployScoped) {
|
||||
if (name) {
|
||||
throw (0, _utils.createError)(DRIVER_NAME, "deploy-scoped stores cannot have a name");
|
||||
}
|
||||
store = (0, _blobs.getDeployStore)({
|
||||
fetch: _ofetch.fetch,
|
||||
...options
|
||||
});
|
||||
} else {
|
||||
if (!name) {
|
||||
throw (0, _utils.createRequiredError)(DRIVER_NAME, "name");
|
||||
}
|
||||
store = (0, _blobs.getStore)({
|
||||
name: encodeURIComponent(name),
|
||||
fetch: _ofetch.fetch,
|
||||
...opts
|
||||
});
|
||||
}
|
||||
}
|
||||
return store;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options,
|
||||
getInstance: getClient,
|
||||
async hasItem(key) {
|
||||
return getClient().getMetadata(key).then(Boolean);
|
||||
},
|
||||
getItem: (key, tops) => {
|
||||
return getClient().get(key, tops);
|
||||
},
|
||||
getMeta(key) {
|
||||
return getClient().getMetadata(key);
|
||||
},
|
||||
getItemRaw(key, topts) {
|
||||
return getClient().get(key, {
|
||||
type: topts?.type ?? "arrayBuffer"
|
||||
});
|
||||
},
|
||||
async setItem(key, value, topts) {
|
||||
await getClient().set(key, value, topts);
|
||||
},
|
||||
async setItemRaw(key, value, topts) {
|
||||
await getClient().set(key, value, topts);
|
||||
},
|
||||
removeItem(key) {
|
||||
return getClient().delete(key);
|
||||
},
|
||||
async getKeys(base, tops) {
|
||||
return (await getClient().list({
|
||||
...tops,
|
||||
prefix: base
|
||||
})).blobs.map(item => item.key);
|
||||
},
|
||||
async clear(base) {
|
||||
const client = getClient();
|
||||
return Promise.allSettled((await client.list({
|
||||
prefix: base
|
||||
})).blobs.map(item => client.delete(item.key))).then(() => {});
|
||||
}
|
||||
};
|
||||
});
|
19
node_modules/unstorage/drivers/netlify-blobs.d.ts
generated
vendored
Normal file
19
node_modules/unstorage/drivers/netlify-blobs.d.ts
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
import type { Store, GetStoreOptions, GetDeployStoreOptions } from "@netlify/blobs";
|
||||
export type NetlifyStoreOptions = NetlifyDeployStoreLegacyOptions | NetlifyDeployStoreOptions | NetlifyNamedStoreOptions;
|
||||
export interface ExtraOptions {
|
||||
/** If set to `true`, the store is scoped to the deploy. This means that it is only available from that deploy, and will be deleted or rolled-back alongside it. */
|
||||
deployScoped?: boolean;
|
||||
}
|
||||
export interface NetlifyDeployStoreOptions extends GetDeployStoreOptions, ExtraOptions {
|
||||
name?: never;
|
||||
deployScoped: true;
|
||||
}
|
||||
export interface NetlifyDeployStoreLegacyOptions extends NetlifyDeployStoreOptions {
|
||||
region?: never;
|
||||
}
|
||||
export interface NetlifyNamedStoreOptions extends GetStoreOptions, ExtraOptions {
|
||||
name: string;
|
||||
deployScoped?: false;
|
||||
}
|
||||
declare const _default: (opts: NetlifyStoreOptions) => import("..").Driver<NetlifyStoreOptions, Store>;
|
||||
export default _default;
|
67
node_modules/unstorage/drivers/netlify-blobs.mjs
generated
vendored
Normal file
67
node_modules/unstorage/drivers/netlify-blobs.mjs
generated
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
import { createError, createRequiredError, defineDriver } from "./utils/index.mjs";
|
||||
import { getStore, getDeployStore } from "@netlify/blobs";
|
||||
import { fetch } from "ofetch";
|
||||
const DRIVER_NAME = "netlify-blobs";
|
||||
export default defineDriver((options) => {
|
||||
const { deployScoped, name, ...opts } = options;
|
||||
let store;
|
||||
const getClient = () => {
|
||||
if (!store) {
|
||||
if (deployScoped) {
|
||||
if (name) {
|
||||
throw createError(
|
||||
DRIVER_NAME,
|
||||
"deploy-scoped stores cannot have a name"
|
||||
);
|
||||
}
|
||||
store = getDeployStore({ fetch, ...options });
|
||||
} else {
|
||||
if (!name) {
|
||||
throw createRequiredError(DRIVER_NAME, "name");
|
||||
}
|
||||
store = getStore({ name: encodeURIComponent(name), fetch, ...opts });
|
||||
}
|
||||
}
|
||||
return store;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options,
|
||||
getInstance: getClient,
|
||||
async hasItem(key) {
|
||||
return getClient().getMetadata(key).then(Boolean);
|
||||
},
|
||||
getItem: (key, tops) => {
|
||||
return getClient().get(key, tops);
|
||||
},
|
||||
getMeta(key) {
|
||||
return getClient().getMetadata(key);
|
||||
},
|
||||
getItemRaw(key, topts) {
|
||||
return getClient().get(key, { type: topts?.type ?? "arrayBuffer" });
|
||||
},
|
||||
async setItem(key, value, topts) {
|
||||
await getClient().set(key, value, topts);
|
||||
},
|
||||
async setItemRaw(key, value, topts) {
|
||||
await getClient().set(key, value, topts);
|
||||
},
|
||||
removeItem(key) {
|
||||
return getClient().delete(key);
|
||||
},
|
||||
async getKeys(base, tops) {
|
||||
return (await getClient().list({ ...tops, prefix: base })).blobs.map(
|
||||
(item) => item.key
|
||||
);
|
||||
},
|
||||
async clear(base) {
|
||||
const client = getClient();
|
||||
return Promise.allSettled(
|
||||
(await client.list({ prefix: base })).blobs.map(
|
||||
(item) => client.delete(item.key)
|
||||
)
|
||||
).then(() => {
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
36
node_modules/unstorage/drivers/null.cjs
generated
vendored
Normal file
36
node_modules/unstorage/drivers/null.cjs
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
const DRIVER_NAME = "null";
|
||||
module.exports = (0, _utils.defineDriver)(() => {
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
hasItem() {
|
||||
return false;
|
||||
},
|
||||
getItem() {
|
||||
return null;
|
||||
},
|
||||
getItemRaw() {
|
||||
return null;
|
||||
},
|
||||
getItems() {
|
||||
return [];
|
||||
},
|
||||
getMeta() {
|
||||
return null;
|
||||
},
|
||||
getKeys() {
|
||||
return [];
|
||||
},
|
||||
setItem() {},
|
||||
setItemRaw() {},
|
||||
setItems() {},
|
||||
removeItem() {},
|
||||
clear() {}
|
||||
};
|
||||
});
|
2
node_modules/unstorage/drivers/null.d.ts
generated
vendored
Normal file
2
node_modules/unstorage/drivers/null.d.ts
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
declare const _default: (opts: void) => import("..").Driver<void, never>;
|
||||
export default _default;
|
35
node_modules/unstorage/drivers/null.mjs
generated
vendored
Normal file
35
node_modules/unstorage/drivers/null.mjs
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
import { defineDriver } from "./utils/index.mjs";
|
||||
const DRIVER_NAME = "null";
|
||||
export default defineDriver(() => {
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
hasItem() {
|
||||
return false;
|
||||
},
|
||||
getItem() {
|
||||
return null;
|
||||
},
|
||||
getItemRaw() {
|
||||
return null;
|
||||
},
|
||||
getItems() {
|
||||
return [];
|
||||
},
|
||||
getMeta() {
|
||||
return null;
|
||||
},
|
||||
getKeys() {
|
||||
return [];
|
||||
},
|
||||
setItem() {
|
||||
},
|
||||
setItemRaw() {
|
||||
},
|
||||
setItems() {
|
||||
},
|
||||
removeItem() {
|
||||
},
|
||||
clear() {
|
||||
}
|
||||
};
|
||||
});
|
67
node_modules/unstorage/drivers/overlay.cjs
generated
vendored
Normal file
67
node_modules/unstorage/drivers/overlay.cjs
generated
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
const OVERLAY_REMOVED = "__OVERLAY_REMOVED__";
|
||||
const DRIVER_NAME = "overlay";
|
||||
module.exports = (0, _utils.defineDriver)(options => {
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options,
|
||||
async hasItem(key, opts) {
|
||||
for (const layer of options.layers) {
|
||||
if (await layer.hasItem(key, opts)) {
|
||||
if (layer === options.layers[0] && (await options.layers[0]?.getItem(key)) === OVERLAY_REMOVED) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
},
|
||||
async getItem(key) {
|
||||
for (const layer of options.layers) {
|
||||
const value = await layer.getItem(key);
|
||||
if (value === OVERLAY_REMOVED) {
|
||||
return null;
|
||||
}
|
||||
if (value !== null) {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
},
|
||||
// TODO: Support native meta
|
||||
// async getMeta (key) {},
|
||||
async setItem(key, value, opts) {
|
||||
await options.layers[0]?.setItem?.(key, value, opts);
|
||||
},
|
||||
async removeItem(key, opts) {
|
||||
await options.layers[0]?.setItem?.(key, OVERLAY_REMOVED, opts);
|
||||
},
|
||||
async getKeys(base, opts) {
|
||||
const allKeys = await Promise.all(options.layers.map(async layer => {
|
||||
const keys = await layer.getKeys(base, opts);
|
||||
return keys.map(key => (0, _utils.normalizeKey)(key));
|
||||
}));
|
||||
const uniqueKeys = [...new Set(allKeys.flat())];
|
||||
const existingKeys = await Promise.all(uniqueKeys.map(async key => {
|
||||
if ((await options.layers[0]?.getItem(key)) === OVERLAY_REMOVED) {
|
||||
return false;
|
||||
}
|
||||
return key;
|
||||
}));
|
||||
return existingKeys.filter(Boolean);
|
||||
},
|
||||
async dispose() {
|
||||
await Promise.all(options.layers.map(async layer => {
|
||||
if (layer.dispose) {
|
||||
await layer.dispose();
|
||||
}
|
||||
}));
|
||||
}
|
||||
};
|
||||
});
|
6
node_modules/unstorage/drivers/overlay.d.ts
generated
vendored
Normal file
6
node_modules/unstorage/drivers/overlay.d.ts
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
import type { Driver } from "..";
|
||||
export interface OverlayStorageOptions {
|
||||
layers: Driver[];
|
||||
}
|
||||
declare const _default: (opts: OverlayStorageOptions) => Driver<OverlayStorageOptions, never>;
|
||||
export default _default;
|
68
node_modules/unstorage/drivers/overlay.mjs
generated
vendored
Normal file
68
node_modules/unstorage/drivers/overlay.mjs
generated
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
import { defineDriver } from "./utils/index.mjs";
|
||||
import { normalizeKey } from "./utils/index.mjs";
|
||||
const OVERLAY_REMOVED = "__OVERLAY_REMOVED__";
|
||||
const DRIVER_NAME = "overlay";
|
||||
export default defineDriver((options) => {
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options,
|
||||
async hasItem(key, opts) {
|
||||
for (const layer of options.layers) {
|
||||
if (await layer.hasItem(key, opts)) {
|
||||
if (layer === options.layers[0] && await options.layers[0]?.getItem(key) === OVERLAY_REMOVED) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
},
|
||||
async getItem(key) {
|
||||
for (const layer of options.layers) {
|
||||
const value = await layer.getItem(key);
|
||||
if (value === OVERLAY_REMOVED) {
|
||||
return null;
|
||||
}
|
||||
if (value !== null) {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
},
|
||||
// TODO: Support native meta
|
||||
// async getMeta (key) {},
|
||||
async setItem(key, value, opts) {
|
||||
await options.layers[0]?.setItem?.(key, value, opts);
|
||||
},
|
||||
async removeItem(key, opts) {
|
||||
await options.layers[0]?.setItem?.(key, OVERLAY_REMOVED, opts);
|
||||
},
|
||||
async getKeys(base, opts) {
|
||||
const allKeys = await Promise.all(
|
||||
options.layers.map(async (layer) => {
|
||||
const keys = await layer.getKeys(base, opts);
|
||||
return keys.map((key) => normalizeKey(key));
|
||||
})
|
||||
);
|
||||
const uniqueKeys = [...new Set(allKeys.flat())];
|
||||
const existingKeys = await Promise.all(
|
||||
uniqueKeys.map(async (key) => {
|
||||
if (await options.layers[0]?.getItem(key) === OVERLAY_REMOVED) {
|
||||
return false;
|
||||
}
|
||||
return key;
|
||||
})
|
||||
);
|
||||
return existingKeys.filter(Boolean);
|
||||
},
|
||||
async dispose() {
|
||||
await Promise.all(
|
||||
options.layers.map(async (layer) => {
|
||||
if (layer.dispose) {
|
||||
await layer.dispose();
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
};
|
||||
});
|
80
node_modules/unstorage/drivers/planetscale.cjs
generated
vendored
Normal file
80
node_modules/unstorage/drivers/planetscale.cjs
generated
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _database = require("@planetscale/database");
|
||||
var _nodeFetchNative = require("node-fetch-native");
|
||||
const DRIVER_NAME = "planetscale";
|
||||
module.exports = (0, _utils.defineDriver)((opts = {}) => {
|
||||
opts.table = opts.table || "storage";
|
||||
let _connection;
|
||||
const getConnection = () => {
|
||||
if (!_connection) {
|
||||
if (!opts.url) {
|
||||
throw (0, _utils.createRequiredError)(DRIVER_NAME, "url");
|
||||
}
|
||||
_connection = (0, _database.connect)({
|
||||
url: opts.url,
|
||||
fetch: _nodeFetchNative.fetch
|
||||
});
|
||||
if (opts.boostCache) {
|
||||
_connection.execute("SET @@boost_cached_queries = true;").catch(error => {
|
||||
console.error("[unstorage] [planetscale] Failed to enable cached queries:", error);
|
||||
});
|
||||
}
|
||||
}
|
||||
return _connection;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: getConnection,
|
||||
hasItem: async key => {
|
||||
const res = await getConnection().execute(`SELECT EXISTS (SELECT 1 FROM ${opts.table} WHERE id = :key) as value;`, {
|
||||
key
|
||||
});
|
||||
return rows(res)[0]?.value == "1";
|
||||
},
|
||||
getItem: async key => {
|
||||
const res = await getConnection().execute(`SELECT value from ${opts.table} WHERE id=:key;`, {
|
||||
key
|
||||
});
|
||||
return rows(res)[0]?.value ?? null;
|
||||
},
|
||||
setItem: async (key, value) => {
|
||||
await getConnection().execute(`INSERT INTO ${opts.table} (id, value) VALUES (:key, :value) ON DUPLICATE KEY UPDATE value = :value;`, {
|
||||
key,
|
||||
value
|
||||
});
|
||||
},
|
||||
removeItem: async key => {
|
||||
await getConnection().execute(`DELETE FROM ${opts.table} WHERE id=:key;`, {
|
||||
key
|
||||
});
|
||||
},
|
||||
getMeta: async key => {
|
||||
const res = await getConnection().execute(`SELECT created_at, updated_at from ${opts.table} WHERE id=:key;`, {
|
||||
key
|
||||
});
|
||||
return {
|
||||
birthtime: rows(res)[0]?.created_at,
|
||||
mtime: rows(res)[0]?.updated_at
|
||||
};
|
||||
},
|
||||
getKeys: async (base = "") => {
|
||||
const res = await getConnection().execute(`SELECT id from ${opts.table} WHERE id LIKE :base;`, {
|
||||
base: `${base}%`
|
||||
});
|
||||
return rows(res).map(r => r.id);
|
||||
},
|
||||
clear: async () => {
|
||||
await getConnection().execute(`DELETE FROM ${opts.table};`);
|
||||
}
|
||||
};
|
||||
});
|
||||
function rows(res) {
|
||||
return res.rows || [];
|
||||
}
|
8
node_modules/unstorage/drivers/planetscale.d.ts
generated
vendored
Normal file
8
node_modules/unstorage/drivers/planetscale.d.ts
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
import type { Connection } from "@planetscale/database";
|
||||
export interface PlanetscaleDriverOptions {
|
||||
url?: string;
|
||||
table?: string;
|
||||
boostCache?: boolean;
|
||||
}
|
||||
declare const _default: (opts: PlanetscaleDriverOptions | undefined) => import("..").Driver<PlanetscaleDriverOptions | undefined, Connection>;
|
||||
export default _default;
|
82
node_modules/unstorage/drivers/planetscale.mjs
generated
vendored
Normal file
82
node_modules/unstorage/drivers/planetscale.mjs
generated
vendored
Normal file
@@ -0,0 +1,82 @@
|
||||
import { createRequiredError, defineDriver } from "./utils/index.mjs";
|
||||
import { connect } from "@planetscale/database";
|
||||
import { fetch } from "node-fetch-native";
|
||||
const DRIVER_NAME = "planetscale";
|
||||
export default defineDriver((opts = {}) => {
|
||||
opts.table = opts.table || "storage";
|
||||
let _connection;
|
||||
const getConnection = () => {
|
||||
if (!_connection) {
|
||||
if (!opts.url) {
|
||||
throw createRequiredError(DRIVER_NAME, "url");
|
||||
}
|
||||
_connection = connect({
|
||||
url: opts.url,
|
||||
fetch
|
||||
});
|
||||
if (opts.boostCache) {
|
||||
_connection.execute("SET @@boost_cached_queries = true;").catch((error) => {
|
||||
console.error(
|
||||
"[unstorage] [planetscale] Failed to enable cached queries:",
|
||||
error
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
return _connection;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: getConnection,
|
||||
hasItem: async (key) => {
|
||||
const res = await getConnection().execute(
|
||||
`SELECT EXISTS (SELECT 1 FROM ${opts.table} WHERE id = :key) as value;`,
|
||||
{ key }
|
||||
);
|
||||
return rows(res)[0]?.value == "1";
|
||||
},
|
||||
getItem: async (key) => {
|
||||
const res = await getConnection().execute(
|
||||
`SELECT value from ${opts.table} WHERE id=:key;`,
|
||||
{ key }
|
||||
);
|
||||
return rows(res)[0]?.value ?? null;
|
||||
},
|
||||
setItem: async (key, value) => {
|
||||
await getConnection().execute(
|
||||
`INSERT INTO ${opts.table} (id, value) VALUES (:key, :value) ON DUPLICATE KEY UPDATE value = :value;`,
|
||||
{ key, value }
|
||||
);
|
||||
},
|
||||
removeItem: async (key) => {
|
||||
await getConnection().execute(
|
||||
`DELETE FROM ${opts.table} WHERE id=:key;`,
|
||||
{ key }
|
||||
);
|
||||
},
|
||||
getMeta: async (key) => {
|
||||
const res = await getConnection().execute(
|
||||
`SELECT created_at, updated_at from ${opts.table} WHERE id=:key;`,
|
||||
{ key }
|
||||
);
|
||||
return {
|
||||
birthtime: rows(res)[0]?.created_at,
|
||||
mtime: rows(res)[0]?.updated_at
|
||||
};
|
||||
},
|
||||
getKeys: async (base = "") => {
|
||||
const res = await getConnection().execute(
|
||||
`SELECT id from ${opts.table} WHERE id LIKE :base;`,
|
||||
{ base: `${base}%` }
|
||||
);
|
||||
return rows(res).map((r) => r.id);
|
||||
},
|
||||
clear: async () => {
|
||||
await getConnection().execute(`DELETE FROM ${opts.table};`);
|
||||
}
|
||||
};
|
||||
});
|
||||
function rows(res) {
|
||||
return res.rows || [];
|
||||
}
|
94
node_modules/unstorage/drivers/redis.cjs
generated
vendored
Normal file
94
node_modules/unstorage/drivers/redis.cjs
generated
vendored
Normal file
@@ -0,0 +1,94 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _ioredis = _interopRequireDefault(require("ioredis"));
|
||||
function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; }
|
||||
const DRIVER_NAME = "redis";
|
||||
module.exports = (0, _utils.defineDriver)(opts => {
|
||||
let redisClient;
|
||||
const getRedisClient = () => {
|
||||
if (redisClient) {
|
||||
return redisClient;
|
||||
}
|
||||
if (opts.cluster) {
|
||||
redisClient = new _ioredis.default.Cluster(opts.cluster, opts.clusterOptions);
|
||||
} else if (opts.url) {
|
||||
redisClient = new _ioredis.default(opts.url, opts);
|
||||
} else {
|
||||
redisClient = new _ioredis.default(opts);
|
||||
}
|
||||
return redisClient;
|
||||
};
|
||||
const base = (opts.base || "").replace(/:$/, "");
|
||||
const p = (...keys) => (0, _utils.joinKeys)(base, ...keys);
|
||||
const d = key => base ? key.replace(`${base}:`, "") : key;
|
||||
if (opts.preConnect) {
|
||||
try {
|
||||
getRedisClient();
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
}
|
||||
}
|
||||
const scan = async pattern => {
|
||||
const client = getRedisClient();
|
||||
const keys = [];
|
||||
let cursor = "0";
|
||||
do {
|
||||
const [nextCursor, scanKeys] = opts.scanCount ? await client.scan(cursor, "MATCH", pattern, "COUNT", opts.scanCount) : await client.scan(cursor, "MATCH", pattern);
|
||||
cursor = nextCursor;
|
||||
keys.push(...scanKeys);
|
||||
} while (cursor !== "0");
|
||||
return keys;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: getRedisClient,
|
||||
async hasItem(key) {
|
||||
return Boolean(await getRedisClient().exists(p(key)));
|
||||
},
|
||||
async getItem(key) {
|
||||
const value = await getRedisClient().get(p(key));
|
||||
return value ?? null;
|
||||
},
|
||||
async getItems(items) {
|
||||
const keys = items.map(item => p(item.key));
|
||||
const data = await getRedisClient().mget(...keys);
|
||||
return keys.map((key, index) => {
|
||||
return {
|
||||
key: d(key),
|
||||
value: data[index] ?? null
|
||||
};
|
||||
});
|
||||
},
|
||||
async setItem(key, value, tOptions) {
|
||||
const ttl = tOptions?.ttl ?? opts.ttl;
|
||||
if (ttl) {
|
||||
await getRedisClient().set(p(key), value, "EX", ttl);
|
||||
} else {
|
||||
await getRedisClient().set(p(key), value);
|
||||
}
|
||||
},
|
||||
async removeItem(key) {
|
||||
await getRedisClient().unlink(p(key));
|
||||
},
|
||||
async getKeys(base2) {
|
||||
const keys = await scan(p(base2, "*"));
|
||||
return keys.map(key => d(key));
|
||||
},
|
||||
async clear(base2) {
|
||||
const keys = await scan(p(base2, "*"));
|
||||
if (keys.length === 0) {
|
||||
return;
|
||||
}
|
||||
await getRedisClient().unlink(keys);
|
||||
},
|
||||
dispose() {
|
||||
return getRedisClient().disconnect();
|
||||
}
|
||||
};
|
||||
});
|
37
node_modules/unstorage/drivers/redis.d.ts
generated
vendored
Normal file
37
node_modules/unstorage/drivers/redis.d.ts
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
import Redis, { Cluster, type ClusterNode, type ClusterOptions, type RedisOptions as _RedisOptions } from "ioredis";
|
||||
export interface RedisOptions extends _RedisOptions {
|
||||
/**
|
||||
* Optional prefix to use for all keys. Can be used for namespacing.
|
||||
*/
|
||||
base?: string;
|
||||
/**
|
||||
* Url to use for connecting to redis. Takes precedence over `host` option. Has the format `redis://<REDIS_USER>:<REDIS_PASSWORD>@<REDIS_HOST>:<REDIS_PORT>`
|
||||
*/
|
||||
url?: string;
|
||||
/**
|
||||
* List of redis nodes to use for cluster mode. Takes precedence over `url` and `host` options.
|
||||
*/
|
||||
cluster?: ClusterNode[];
|
||||
/**
|
||||
* Options to use for cluster mode.
|
||||
*/
|
||||
clusterOptions?: ClusterOptions;
|
||||
/**
|
||||
* Default TTL for all items in seconds.
|
||||
*/
|
||||
ttl?: number;
|
||||
/**
|
||||
* How many keys to scan at once.
|
||||
*
|
||||
* [redis documentation](https://redis.io/docs/latest/commands/scan/#the-count-option)
|
||||
*/
|
||||
scanCount?: number;
|
||||
/**
|
||||
* Whether to initialize the redis instance immediately.
|
||||
* Otherwise, it will be initialized on the first read/write call.
|
||||
* @default false
|
||||
*/
|
||||
preConnect?: boolean;
|
||||
}
|
||||
declare const _default: (opts: RedisOptions) => import("..").Driver<RedisOptions, Redis | Cluster>;
|
||||
export default _default;
|
87
node_modules/unstorage/drivers/redis.mjs
generated
vendored
Normal file
87
node_modules/unstorage/drivers/redis.mjs
generated
vendored
Normal file
@@ -0,0 +1,87 @@
|
||||
import { defineDriver, joinKeys } from "./utils/index.mjs";
|
||||
import Redis from "ioredis";
|
||||
const DRIVER_NAME = "redis";
|
||||
export default defineDriver((opts) => {
|
||||
let redisClient;
|
||||
const getRedisClient = () => {
|
||||
if (redisClient) {
|
||||
return redisClient;
|
||||
}
|
||||
if (opts.cluster) {
|
||||
redisClient = new Redis.Cluster(opts.cluster, opts.clusterOptions);
|
||||
} else if (opts.url) {
|
||||
redisClient = new Redis(opts.url, opts);
|
||||
} else {
|
||||
redisClient = new Redis(opts);
|
||||
}
|
||||
return redisClient;
|
||||
};
|
||||
const base = (opts.base || "").replace(/:$/, "");
|
||||
const p = (...keys) => joinKeys(base, ...keys);
|
||||
const d = (key) => base ? key.replace(`${base}:`, "") : key;
|
||||
if (opts.preConnect) {
|
||||
try {
|
||||
getRedisClient();
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
}
|
||||
}
|
||||
const scan = async (pattern) => {
|
||||
const client = getRedisClient();
|
||||
const keys = [];
|
||||
let cursor = "0";
|
||||
do {
|
||||
const [nextCursor, scanKeys] = opts.scanCount ? await client.scan(cursor, "MATCH", pattern, "COUNT", opts.scanCount) : await client.scan(cursor, "MATCH", pattern);
|
||||
cursor = nextCursor;
|
||||
keys.push(...scanKeys);
|
||||
} while (cursor !== "0");
|
||||
return keys;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options: opts,
|
||||
getInstance: getRedisClient,
|
||||
async hasItem(key) {
|
||||
return Boolean(await getRedisClient().exists(p(key)));
|
||||
},
|
||||
async getItem(key) {
|
||||
const value = await getRedisClient().get(p(key));
|
||||
return value ?? null;
|
||||
},
|
||||
async getItems(items) {
|
||||
const keys = items.map((item) => p(item.key));
|
||||
const data = await getRedisClient().mget(...keys);
|
||||
return keys.map((key, index) => {
|
||||
return {
|
||||
key: d(key),
|
||||
value: data[index] ?? null
|
||||
};
|
||||
});
|
||||
},
|
||||
async setItem(key, value, tOptions) {
|
||||
const ttl = tOptions?.ttl ?? opts.ttl;
|
||||
if (ttl) {
|
||||
await getRedisClient().set(p(key), value, "EX", ttl);
|
||||
} else {
|
||||
await getRedisClient().set(p(key), value);
|
||||
}
|
||||
},
|
||||
async removeItem(key) {
|
||||
await getRedisClient().unlink(p(key));
|
||||
},
|
||||
async getKeys(base2) {
|
||||
const keys = await scan(p(base2, "*"));
|
||||
return keys.map((key) => d(key));
|
||||
},
|
||||
async clear(base2) {
|
||||
const keys = await scan(p(base2, "*"));
|
||||
if (keys.length === 0) {
|
||||
return;
|
||||
}
|
||||
await getRedisClient().unlink(keys);
|
||||
},
|
||||
dispose() {
|
||||
return getRedisClient().disconnect();
|
||||
}
|
||||
};
|
||||
});
|
169
node_modules/unstorage/drivers/s3.cjs
generated
vendored
Normal file
169
node_modules/unstorage/drivers/s3.cjs
generated
vendored
Normal file
@@ -0,0 +1,169 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _aws4fetch = require("aws4fetch");
|
||||
const DRIVER_NAME = "s3";
|
||||
module.exports = (0, _utils.defineDriver)(options => {
|
||||
let _awsClient;
|
||||
const getAwsClient = () => {
|
||||
if (!_awsClient) {
|
||||
if (!options.accessKeyId) {
|
||||
throw (0, _utils.createRequiredError)(DRIVER_NAME, "accessKeyId");
|
||||
}
|
||||
if (!options.secretAccessKey) {
|
||||
throw (0, _utils.createRequiredError)(DRIVER_NAME, "secretAccessKey");
|
||||
}
|
||||
if (!options.endpoint) {
|
||||
throw (0, _utils.createRequiredError)(DRIVER_NAME, "endpoint");
|
||||
}
|
||||
if (!options.region) {
|
||||
throw (0, _utils.createRequiredError)(DRIVER_NAME, "region");
|
||||
}
|
||||
_awsClient = new _aws4fetch.AwsClient({
|
||||
service: "s3",
|
||||
accessKeyId: options.accessKeyId,
|
||||
secretAccessKey: options.secretAccessKey,
|
||||
region: options.region
|
||||
});
|
||||
}
|
||||
return _awsClient;
|
||||
};
|
||||
const baseURL = `${options.endpoint.replace(/\/$/, "")}/${options.bucket || ""}`;
|
||||
const url = (key = "") => `${baseURL}/${(0, _utils.normalizeKey)(key, "/")}`;
|
||||
const awsFetch = async (url2, opts) => {
|
||||
const request = await getAwsClient().sign(url2, opts);
|
||||
const res = await fetch(request);
|
||||
if (!res.ok) {
|
||||
if (res.status === 404) {
|
||||
return null;
|
||||
}
|
||||
throw (0, _utils.createError)(DRIVER_NAME, `[${request.method}] ${url2}: ${res.status} ${res.statusText} ${await res.text()}`);
|
||||
}
|
||||
return res;
|
||||
};
|
||||
const headObject = async key => {
|
||||
const res = await awsFetch(url(key), {
|
||||
method: "HEAD"
|
||||
});
|
||||
if (!res) {
|
||||
return null;
|
||||
}
|
||||
const metaHeaders = {};
|
||||
for (const [key2, value] of res.headers.entries()) {
|
||||
const match = /x-amz-meta-(.*)/.exec(key2);
|
||||
if (match?.[1]) {
|
||||
metaHeaders[match[1]] = value;
|
||||
}
|
||||
}
|
||||
return metaHeaders;
|
||||
};
|
||||
const listObjects = async prefix => {
|
||||
const res = await awsFetch(baseURL).then(r => r?.text());
|
||||
if (!res) {
|
||||
console.log("no list", prefix ? `${baseURL}?prefix=${prefix}` : baseURL);
|
||||
return null;
|
||||
}
|
||||
return parseList(res);
|
||||
};
|
||||
const getObject = key => {
|
||||
return awsFetch(url(key));
|
||||
};
|
||||
const putObject = async (key, value) => {
|
||||
return awsFetch(url(key), {
|
||||
method: "PUT",
|
||||
body: value
|
||||
});
|
||||
};
|
||||
const deleteObject = async key => {
|
||||
return awsFetch(url(key), {
|
||||
method: "DELETE"
|
||||
}).then(r => {
|
||||
if (r?.status !== 204 && r?.status !== 200) {
|
||||
throw (0, _utils.createError)(DRIVER_NAME, `Failed to delete ${key}`);
|
||||
}
|
||||
});
|
||||
};
|
||||
const deleteObjects = async base => {
|
||||
const keys = await listObjects(base);
|
||||
if (!keys?.length) {
|
||||
return null;
|
||||
}
|
||||
if (options.bulkDelete === false) {
|
||||
await Promise.all(keys.map(key => deleteObject(key)));
|
||||
} else {
|
||||
const body = deleteKeysReq(keys);
|
||||
await awsFetch(`${baseURL}?delete`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"x-amz-checksum-sha256": await sha256Base64(body)
|
||||
},
|
||||
body
|
||||
});
|
||||
}
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options,
|
||||
getItem(key) {
|
||||
return getObject(key).then(res => res ? res.text() : null);
|
||||
},
|
||||
getItemRaw(key) {
|
||||
return getObject(key).then(res => res ? res.arrayBuffer() : null);
|
||||
},
|
||||
async setItem(key, value) {
|
||||
await putObject(key, value);
|
||||
},
|
||||
async setItemRaw(key, value) {
|
||||
await putObject(key, value);
|
||||
},
|
||||
getMeta(key) {
|
||||
return headObject(key);
|
||||
},
|
||||
hasItem(key) {
|
||||
return headObject(key).then(meta => !!meta);
|
||||
},
|
||||
getKeys(base) {
|
||||
return listObjects(base).then(keys => keys || []);
|
||||
},
|
||||
async removeItem(key) {
|
||||
await deleteObject(key);
|
||||
},
|
||||
async clear(base) {
|
||||
await deleteObjects(base);
|
||||
}
|
||||
};
|
||||
});
|
||||
function deleteKeysReq(keys) {
|
||||
return `<Delete>${keys.map(key => {
|
||||
key = key.replace(/&/g, "&").replace(/</g, "<").replace(/>/g, ">");
|
||||
return /* xml */`<Object><Key>${key}</Key></Object>`;
|
||||
}).join("")}</Delete>`;
|
||||
}
|
||||
async function sha256Base64(str) {
|
||||
const buffer = new TextEncoder().encode(str);
|
||||
const hash = await crypto.subtle.digest("SHA-256", buffer);
|
||||
const bytes = new Uint8Array(hash);
|
||||
const binaryString = String.fromCharCode(...bytes);
|
||||
return btoa(binaryString);
|
||||
}
|
||||
function parseList(xml) {
|
||||
if (!xml.startsWith("<?xml")) {
|
||||
throw new Error("Invalid XML");
|
||||
}
|
||||
const listBucketResult = xml.match(/<ListBucketResult[^>]*>([\s\S]*)<\/ListBucketResult>/)?.[1];
|
||||
if (!listBucketResult) {
|
||||
throw new Error("Missing <ListBucketResult>");
|
||||
}
|
||||
const contents = listBucketResult.match(/<Contents[^>]*>([\s\S]*?)<\/Contents>/g);
|
||||
if (!contents?.length) {
|
||||
return [];
|
||||
}
|
||||
return contents.map(content => {
|
||||
const key = content.match(/<Key>([\s\S]+?)<\/Key>/)?.[1];
|
||||
return key;
|
||||
}).filter(Boolean);
|
||||
}
|
34
node_modules/unstorage/drivers/s3.d.ts
generated
vendored
Normal file
34
node_modules/unstorage/drivers/s3.d.ts
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
export interface S3DriverOptions {
|
||||
/**
|
||||
* Access Key ID
|
||||
*/
|
||||
accessKeyId: string;
|
||||
/**
|
||||
* Secret Access Key
|
||||
*/
|
||||
secretAccessKey: string;
|
||||
/**
|
||||
* The endpoint URL of the S3 service.
|
||||
*
|
||||
* - For AWS S3: "https://s3.[region].amazonaws.com/"
|
||||
* - For cloudflare R2: "https://[uid].r2.cloudflarestorage.com/"
|
||||
*/
|
||||
endpoint: string;
|
||||
/**
|
||||
* The region of the S3 bucket.
|
||||
*
|
||||
* - For AWS S3, this is the region of the bucket.
|
||||
* - For cloudflare, this is can be set to `auto`.
|
||||
*/
|
||||
region: string;
|
||||
/**
|
||||
* The name of the bucket.
|
||||
*/
|
||||
bucket: string;
|
||||
/**
|
||||
* Enabled by default to speedup `clear()` operation. Set to `false` if provider is not implementing [DeleteObject](https://docs.aws.amazon.com/AmazonS3/latest/API/API_DeleteObjects.html).
|
||||
*/
|
||||
bulkDelete?: boolean;
|
||||
}
|
||||
declare const _default: (opts: S3DriverOptions) => import("..").Driver<S3DriverOptions, never>;
|
||||
export default _default;
|
174
node_modules/unstorage/drivers/s3.mjs
generated
vendored
Normal file
174
node_modules/unstorage/drivers/s3.mjs
generated
vendored
Normal file
@@ -0,0 +1,174 @@
|
||||
import {
|
||||
defineDriver,
|
||||
createRequiredError,
|
||||
normalizeKey,
|
||||
createError
|
||||
} from "./utils/index.mjs";
|
||||
import { AwsClient } from "aws4fetch";
|
||||
const DRIVER_NAME = "s3";
|
||||
export default defineDriver((options) => {
|
||||
let _awsClient;
|
||||
const getAwsClient = () => {
|
||||
if (!_awsClient) {
|
||||
if (!options.accessKeyId) {
|
||||
throw createRequiredError(DRIVER_NAME, "accessKeyId");
|
||||
}
|
||||
if (!options.secretAccessKey) {
|
||||
throw createRequiredError(DRIVER_NAME, "secretAccessKey");
|
||||
}
|
||||
if (!options.endpoint) {
|
||||
throw createRequiredError(DRIVER_NAME, "endpoint");
|
||||
}
|
||||
if (!options.region) {
|
||||
throw createRequiredError(DRIVER_NAME, "region");
|
||||
}
|
||||
_awsClient = new AwsClient({
|
||||
service: "s3",
|
||||
accessKeyId: options.accessKeyId,
|
||||
secretAccessKey: options.secretAccessKey,
|
||||
region: options.region
|
||||
});
|
||||
}
|
||||
return _awsClient;
|
||||
};
|
||||
const baseURL = `${options.endpoint.replace(/\/$/, "")}/${options.bucket || ""}`;
|
||||
const url = (key = "") => `${baseURL}/${normalizeKey(key, "/")}`;
|
||||
const awsFetch = async (url2, opts) => {
|
||||
const request = await getAwsClient().sign(url2, opts);
|
||||
const res = await fetch(request);
|
||||
if (!res.ok) {
|
||||
if (res.status === 404) {
|
||||
return null;
|
||||
}
|
||||
throw createError(
|
||||
DRIVER_NAME,
|
||||
`[${request.method}] ${url2}: ${res.status} ${res.statusText} ${await res.text()}`
|
||||
);
|
||||
}
|
||||
return res;
|
||||
};
|
||||
const headObject = async (key) => {
|
||||
const res = await awsFetch(url(key), { method: "HEAD" });
|
||||
if (!res) {
|
||||
return null;
|
||||
}
|
||||
const metaHeaders = {};
|
||||
for (const [key2, value] of res.headers.entries()) {
|
||||
const match = /x-amz-meta-(.*)/.exec(key2);
|
||||
if (match?.[1]) {
|
||||
metaHeaders[match[1]] = value;
|
||||
}
|
||||
}
|
||||
return metaHeaders;
|
||||
};
|
||||
const listObjects = async (prefix) => {
|
||||
const res = await awsFetch(baseURL).then((r) => r?.text());
|
||||
if (!res) {
|
||||
console.log("no list", prefix ? `${baseURL}?prefix=${prefix}` : baseURL);
|
||||
return null;
|
||||
}
|
||||
return parseList(res);
|
||||
};
|
||||
const getObject = (key) => {
|
||||
return awsFetch(url(key));
|
||||
};
|
||||
const putObject = async (key, value) => {
|
||||
return awsFetch(url(key), {
|
||||
method: "PUT",
|
||||
body: value
|
||||
});
|
||||
};
|
||||
const deleteObject = async (key) => {
|
||||
return awsFetch(url(key), { method: "DELETE" }).then((r) => {
|
||||
if (r?.status !== 204 && r?.status !== 200) {
|
||||
throw createError(DRIVER_NAME, `Failed to delete ${key}`);
|
||||
}
|
||||
});
|
||||
};
|
||||
const deleteObjects = async (base) => {
|
||||
const keys = await listObjects(base);
|
||||
if (!keys?.length) {
|
||||
return null;
|
||||
}
|
||||
if (options.bulkDelete === false) {
|
||||
await Promise.all(keys.map((key) => deleteObject(key)));
|
||||
} else {
|
||||
const body = deleteKeysReq(keys);
|
||||
await awsFetch(`${baseURL}?delete`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"x-amz-checksum-sha256": await sha256Base64(body)
|
||||
},
|
||||
body
|
||||
});
|
||||
}
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
options,
|
||||
getItem(key) {
|
||||
return getObject(key).then((res) => res ? res.text() : null);
|
||||
},
|
||||
getItemRaw(key) {
|
||||
return getObject(key).then((res) => res ? res.arrayBuffer() : null);
|
||||
},
|
||||
async setItem(key, value) {
|
||||
await putObject(key, value);
|
||||
},
|
||||
async setItemRaw(key, value) {
|
||||
await putObject(key, value);
|
||||
},
|
||||
getMeta(key) {
|
||||
return headObject(key);
|
||||
},
|
||||
hasItem(key) {
|
||||
return headObject(key).then((meta) => !!meta);
|
||||
},
|
||||
getKeys(base) {
|
||||
return listObjects(base).then((keys) => keys || []);
|
||||
},
|
||||
async removeItem(key) {
|
||||
await deleteObject(key);
|
||||
},
|
||||
async clear(base) {
|
||||
await deleteObjects(base);
|
||||
}
|
||||
};
|
||||
});
|
||||
function deleteKeysReq(keys) {
|
||||
return `<Delete>${keys.map((key) => {
|
||||
key = key.replace(/&/g, "&").replace(/</g, "<").replace(/>/g, ">");
|
||||
return (
|
||||
/* xml */
|
||||
`<Object><Key>${key}</Key></Object>`
|
||||
);
|
||||
}).join("")}</Delete>`;
|
||||
}
|
||||
async function sha256Base64(str) {
|
||||
const buffer = new TextEncoder().encode(str);
|
||||
const hash = await crypto.subtle.digest("SHA-256", buffer);
|
||||
const bytes = new Uint8Array(hash);
|
||||
const binaryString = String.fromCharCode(...bytes);
|
||||
return btoa(binaryString);
|
||||
}
|
||||
function parseList(xml) {
|
||||
if (!xml.startsWith("<?xml")) {
|
||||
throw new Error("Invalid XML");
|
||||
}
|
||||
const listBucketResult = xml.match(
|
||||
/<ListBucketResult[^>]*>([\s\S]*)<\/ListBucketResult>/
|
||||
)?.[1];
|
||||
if (!listBucketResult) {
|
||||
throw new Error("Missing <ListBucketResult>");
|
||||
}
|
||||
const contents = listBucketResult.match(
|
||||
/<Contents[^>]*>([\s\S]*?)<\/Contents>/g
|
||||
);
|
||||
if (!contents?.length) {
|
||||
return [];
|
||||
}
|
||||
return contents.map((content) => {
|
||||
const key = content.match(/<Key>([\s\S]+?)<\/Key>/)?.[1];
|
||||
return key;
|
||||
}).filter(Boolean);
|
||||
}
|
19
node_modules/unstorage/drivers/session-storage.cjs
generated
vendored
Normal file
19
node_modules/unstorage/drivers/session-storage.cjs
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _localstorage = _interopRequireDefault(require("./localstorage.cjs"));
|
||||
function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; }
|
||||
const DRIVER_NAME = "session-storage";
|
||||
module.exports = (0, _utils.defineDriver)((opts = {}) => {
|
||||
return {
|
||||
...(0, _localstorage.default)({
|
||||
windowKey: "sessionStorage",
|
||||
...opts
|
||||
}),
|
||||
name: DRIVER_NAME
|
||||
};
|
||||
});
|
5
node_modules/unstorage/drivers/session-storage.d.ts
generated
vendored
Normal file
5
node_modules/unstorage/drivers/session-storage.d.ts
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
import { type LocalStorageOptions } from "./localstorage";
|
||||
export interface SessionStorageOptions extends LocalStorageOptions {
|
||||
}
|
||||
declare const _default: (opts: LocalStorageOptions | undefined) => import("..").Driver<LocalStorageOptions | undefined, Storage>;
|
||||
export default _default;
|
12
node_modules/unstorage/drivers/session-storage.mjs
generated
vendored
Normal file
12
node_modules/unstorage/drivers/session-storage.mjs
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
import { defineDriver } from "./utils/index.mjs";
|
||||
import localstorage from "./localstorage.mjs";
|
||||
const DRIVER_NAME = "session-storage";
|
||||
export default defineDriver((opts = {}) => {
|
||||
return {
|
||||
...localstorage({
|
||||
windowKey: "sessionStorage",
|
||||
...opts
|
||||
}),
|
||||
name: DRIVER_NAME
|
||||
};
|
||||
});
|
83
node_modules/unstorage/drivers/uploadthing.cjs
generated
vendored
Normal file
83
node_modules/unstorage/drivers/uploadthing.cjs
generated
vendored
Normal file
@@ -0,0 +1,83 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _utils = require("./utils/index.cjs");
|
||||
var _server = require("uploadthing/server");
|
||||
const DRIVER_NAME = "uploadthing";
|
||||
module.exports = (0, _utils.defineDriver)((opts = {}) => {
|
||||
let client;
|
||||
const base = opts.base ? (0, _utils.normalizeKey)(opts.base) : "";
|
||||
const r = key => base ? `${base}:${key}` : key;
|
||||
const getClient = () => {
|
||||
return client ??= new _server.UTApi({
|
||||
...opts,
|
||||
defaultKeyType: "customId"
|
||||
});
|
||||
};
|
||||
const getKeys = async base2 => {
|
||||
const client2 = getClient();
|
||||
const {
|
||||
files
|
||||
} = await client2.listFiles({});
|
||||
return files.map(file => file.customId).filter(k => k && k.startsWith(base2));
|
||||
};
|
||||
const toFile = (key, value) => {
|
||||
return Object.assign(new Blob([value]), {
|
||||
name: key,
|
||||
customId: key
|
||||
});
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
getInstance() {
|
||||
return getClient();
|
||||
},
|
||||
getKeys(base2) {
|
||||
return getKeys(r(base2));
|
||||
},
|
||||
async hasItem(key) {
|
||||
const client2 = getClient();
|
||||
const res = await client2.getFileUrls(r(key));
|
||||
return res.data.length > 0;
|
||||
},
|
||||
async getItem(key) {
|
||||
const client2 = getClient();
|
||||
const url = await client2.getFileUrls(r(key)).then(res => res.data[0]?.url);
|
||||
if (!url) return null;
|
||||
return fetch(url).then(res => res.text());
|
||||
},
|
||||
async getItemRaw(key) {
|
||||
const client2 = getClient();
|
||||
const url = await client2.getFileUrls(r(key)).then(res => res.data[0]?.url);
|
||||
if (!url) return null;
|
||||
return fetch(url).then(res => res.arrayBuffer());
|
||||
},
|
||||
async setItem(key, value) {
|
||||
const client2 = getClient();
|
||||
await client2.uploadFiles(toFile(r(key), value));
|
||||
},
|
||||
async setItemRaw(key, value) {
|
||||
const client2 = getClient();
|
||||
await client2.uploadFiles(toFile(r(key), value));
|
||||
},
|
||||
async setItems(items) {
|
||||
const client2 = getClient();
|
||||
await client2.uploadFiles(items.map(item => toFile(r(item.key), item.value)));
|
||||
},
|
||||
async removeItem(key) {
|
||||
const client2 = getClient();
|
||||
await client2.deleteFiles([r(key)]);
|
||||
},
|
||||
async clear(base2) {
|
||||
const client2 = getClient();
|
||||
const keys = await getKeys(r(base2));
|
||||
await client2.deleteFiles(keys);
|
||||
}
|
||||
// getMeta(key, opts) {
|
||||
// // TODO: We don't currently have an endpoint to fetch metadata, but it does exist
|
||||
// },
|
||||
};
|
||||
});
|
8
node_modules/unstorage/drivers/uploadthing.d.ts
generated
vendored
Normal file
8
node_modules/unstorage/drivers/uploadthing.d.ts
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
import { UTApi } from "uploadthing/server";
|
||||
type UTApiOptions = Omit<Exclude<ConstructorParameters<typeof UTApi>[0], undefined>, "defaultKeyType">;
|
||||
export interface UploadThingOptions extends UTApiOptions {
|
||||
/** base key to add to keys */
|
||||
base?: string;
|
||||
}
|
||||
declare const _default: (opts: UploadThingOptions) => import("..").Driver<UploadThingOptions, UTApi>;
|
||||
export default _default;
|
77
node_modules/unstorage/drivers/uploadthing.mjs
generated
vendored
Normal file
77
node_modules/unstorage/drivers/uploadthing.mjs
generated
vendored
Normal file
@@ -0,0 +1,77 @@
|
||||
import { defineDriver, normalizeKey } from "./utils/index.mjs";
|
||||
import { UTApi } from "uploadthing/server";
|
||||
const DRIVER_NAME = "uploadthing";
|
||||
export default defineDriver((opts = {}) => {
|
||||
let client;
|
||||
const base = opts.base ? normalizeKey(opts.base) : "";
|
||||
const r = (key) => base ? `${base}:${key}` : key;
|
||||
const getClient = () => {
|
||||
return client ??= new UTApi({
|
||||
...opts,
|
||||
defaultKeyType: "customId"
|
||||
});
|
||||
};
|
||||
const getKeys = async (base2) => {
|
||||
const client2 = getClient();
|
||||
const { files } = await client2.listFiles({});
|
||||
return files.map((file) => file.customId).filter((k) => k && k.startsWith(base2));
|
||||
};
|
||||
const toFile = (key, value) => {
|
||||
return Object.assign(new Blob([value]), {
|
||||
name: key,
|
||||
customId: key
|
||||
});
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
getInstance() {
|
||||
return getClient();
|
||||
},
|
||||
getKeys(base2) {
|
||||
return getKeys(r(base2));
|
||||
},
|
||||
async hasItem(key) {
|
||||
const client2 = getClient();
|
||||
const res = await client2.getFileUrls(r(key));
|
||||
return res.data.length > 0;
|
||||
},
|
||||
async getItem(key) {
|
||||
const client2 = getClient();
|
||||
const url = await client2.getFileUrls(r(key)).then((res) => res.data[0]?.url);
|
||||
if (!url) return null;
|
||||
return fetch(url).then((res) => res.text());
|
||||
},
|
||||
async getItemRaw(key) {
|
||||
const client2 = getClient();
|
||||
const url = await client2.getFileUrls(r(key)).then((res) => res.data[0]?.url);
|
||||
if (!url) return null;
|
||||
return fetch(url).then((res) => res.arrayBuffer());
|
||||
},
|
||||
async setItem(key, value) {
|
||||
const client2 = getClient();
|
||||
await client2.uploadFiles(toFile(r(key), value));
|
||||
},
|
||||
async setItemRaw(key, value) {
|
||||
const client2 = getClient();
|
||||
await client2.uploadFiles(toFile(r(key), value));
|
||||
},
|
||||
async setItems(items) {
|
||||
const client2 = getClient();
|
||||
await client2.uploadFiles(
|
||||
items.map((item) => toFile(r(item.key), item.value))
|
||||
);
|
||||
},
|
||||
async removeItem(key) {
|
||||
const client2 = getClient();
|
||||
await client2.deleteFiles([r(key)]);
|
||||
},
|
||||
async clear(base2) {
|
||||
const client2 = getClient();
|
||||
const keys = await getKeys(r(base2));
|
||||
await client2.deleteFiles(keys);
|
||||
}
|
||||
// getMeta(key, opts) {
|
||||
// // TODO: We don't currently have an endpoint to fetch metadata, but it does exist
|
||||
// },
|
||||
};
|
||||
});
|
80
node_modules/unstorage/drivers/upstash.cjs
generated
vendored
Normal file
80
node_modules/unstorage/drivers/upstash.cjs
generated
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _redis = require("@upstash/redis");
|
||||
var _utils = require("./utils/index.cjs");
|
||||
const DRIVER_NAME = "upstash";
|
||||
module.exports = (0, _utils.defineDriver)((options = {}) => {
|
||||
const base = (0, _utils.normalizeKey)(options?.base);
|
||||
const r = (...keys) => (0, _utils.joinKeys)(base, ...keys);
|
||||
let redisClient;
|
||||
const getClient = () => {
|
||||
if (redisClient) {
|
||||
return redisClient;
|
||||
}
|
||||
const url = options.url || globalThis.process?.env?.UPSTASH_REDIS_REST_URL;
|
||||
const token = options.token || globalThis.process?.env?.UPSTASH_REDIS_REST_TOKEN;
|
||||
redisClient = new _redis.Redis({
|
||||
url,
|
||||
token,
|
||||
...options
|
||||
});
|
||||
return redisClient;
|
||||
};
|
||||
const scan = async pattern => {
|
||||
const client = getClient();
|
||||
const keys = [];
|
||||
let cursor = "0";
|
||||
do {
|
||||
const [nextCursor, scanKeys] = await client.scan(cursor, {
|
||||
match: pattern,
|
||||
count: options.scanCount
|
||||
});
|
||||
cursor = nextCursor;
|
||||
keys.push(...scanKeys);
|
||||
} while (cursor !== "0");
|
||||
return keys;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
getInstance: getClient,
|
||||
async hasItem(key) {
|
||||
return Boolean(await getClient().exists(r(key)));
|
||||
},
|
||||
async getItem(key) {
|
||||
return await getClient().get(r(key));
|
||||
},
|
||||
async getItems(items) {
|
||||
const keys = items.map(item => r(item.key));
|
||||
const data = await getClient().mget(...keys);
|
||||
return keys.map((key, index) => {
|
||||
return {
|
||||
key: base ? key.slice(base.length + 1) : key,
|
||||
value: data[index] ?? null
|
||||
};
|
||||
});
|
||||
},
|
||||
async setItem(key, value, tOptions) {
|
||||
const ttl = tOptions?.ttl || options.ttl;
|
||||
return getClient().set(r(key), value, ttl ? {
|
||||
ex: ttl
|
||||
} : void 0).then(() => {});
|
||||
},
|
||||
async removeItem(key) {
|
||||
await getClient().unlink(r(key));
|
||||
},
|
||||
async getKeys(_base) {
|
||||
return await scan(r(_base, "*")).then(keys => base ? keys.map(key => key.slice(base.length + 1)) : keys);
|
||||
},
|
||||
async clear(base2) {
|
||||
const keys = await scan(r(base2, "*"));
|
||||
if (keys.length === 0) {
|
||||
return;
|
||||
}
|
||||
await getClient().del(...keys);
|
||||
}
|
||||
};
|
||||
});
|
19
node_modules/unstorage/drivers/upstash.d.ts
generated
vendored
Normal file
19
node_modules/unstorage/drivers/upstash.d.ts
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
import { type RedisConfigNodejs, Redis } from "@upstash/redis";
|
||||
export interface UpstashOptions extends Partial<RedisConfigNodejs> {
|
||||
/**
|
||||
* Optional prefix to use for all keys. Can be used for namespacing.
|
||||
*/
|
||||
base?: string;
|
||||
/**
|
||||
* Default TTL for all items in seconds.
|
||||
*/
|
||||
ttl?: number;
|
||||
/**
|
||||
* How many keys to scan at once.
|
||||
*
|
||||
* [redis documentation](https://redis.io/docs/latest/commands/scan/#the-count-option)
|
||||
*/
|
||||
scanCount?: number;
|
||||
}
|
||||
declare const _default: (opts: UpstashOptions) => import("..").Driver<UpstashOptions, Redis>;
|
||||
export default _default;
|
73
node_modules/unstorage/drivers/upstash.mjs
generated
vendored
Normal file
73
node_modules/unstorage/drivers/upstash.mjs
generated
vendored
Normal file
@@ -0,0 +1,73 @@
|
||||
import { Redis } from "@upstash/redis";
|
||||
import { defineDriver, normalizeKey, joinKeys } from "./utils/index.mjs";
|
||||
const DRIVER_NAME = "upstash";
|
||||
export default defineDriver(
|
||||
(options = {}) => {
|
||||
const base = normalizeKey(options?.base);
|
||||
const r = (...keys) => joinKeys(base, ...keys);
|
||||
let redisClient;
|
||||
const getClient = () => {
|
||||
if (redisClient) {
|
||||
return redisClient;
|
||||
}
|
||||
const url = options.url || globalThis.process?.env?.UPSTASH_REDIS_REST_URL;
|
||||
const token = options.token || globalThis.process?.env?.UPSTASH_REDIS_REST_TOKEN;
|
||||
redisClient = new Redis({ url, token, ...options });
|
||||
return redisClient;
|
||||
};
|
||||
const scan = async (pattern) => {
|
||||
const client = getClient();
|
||||
const keys = [];
|
||||
let cursor = "0";
|
||||
do {
|
||||
const [nextCursor, scanKeys] = await client.scan(cursor, {
|
||||
match: pattern,
|
||||
count: options.scanCount
|
||||
});
|
||||
cursor = nextCursor;
|
||||
keys.push(...scanKeys);
|
||||
} while (cursor !== "0");
|
||||
return keys;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
getInstance: getClient,
|
||||
async hasItem(key) {
|
||||
return Boolean(await getClient().exists(r(key)));
|
||||
},
|
||||
async getItem(key) {
|
||||
return await getClient().get(r(key));
|
||||
},
|
||||
async getItems(items) {
|
||||
const keys = items.map((item) => r(item.key));
|
||||
const data = await getClient().mget(...keys);
|
||||
return keys.map((key, index) => {
|
||||
return {
|
||||
key: base ? key.slice(base.length + 1) : key,
|
||||
value: data[index] ?? null
|
||||
};
|
||||
});
|
||||
},
|
||||
async setItem(key, value, tOptions) {
|
||||
const ttl = tOptions?.ttl || options.ttl;
|
||||
return getClient().set(r(key), value, ttl ? { ex: ttl } : void 0).then(() => {
|
||||
});
|
||||
},
|
||||
async removeItem(key) {
|
||||
await getClient().unlink(r(key));
|
||||
},
|
||||
async getKeys(_base) {
|
||||
return await scan(r(_base, "*")).then(
|
||||
(keys) => base ? keys.map((key) => key.slice(base.length + 1)) : keys
|
||||
);
|
||||
},
|
||||
async clear(base2) {
|
||||
const keys = await scan(r(base2, "*"));
|
||||
if (keys.length === 0) {
|
||||
return;
|
||||
}
|
||||
await getClient().del(...keys);
|
||||
}
|
||||
};
|
||||
}
|
||||
);
|
31
node_modules/unstorage/drivers/utils/cloudflare.cjs
generated
vendored
Normal file
31
node_modules/unstorage/drivers/utils/cloudflare.cjs
generated
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.getBinding = getBinding;
|
||||
exports.getKVBinding = getKVBinding;
|
||||
exports.getR2Binding = getR2Binding;
|
||||
var _index = require("./index.cjs");
|
||||
function getBinding(binding) {
|
||||
let bindingName = "[binding]";
|
||||
if (typeof binding === "string") {
|
||||
bindingName = binding;
|
||||
binding = globalThis[bindingName] || globalThis.__env__?.[bindingName];
|
||||
}
|
||||
if (!binding) {
|
||||
throw (0, _index.createError)("cloudflare", `Invalid binding \`${bindingName}\`: \`${binding}\``);
|
||||
}
|
||||
for (const key of ["get", "put", "delete"]) {
|
||||
if (!(key in binding)) {
|
||||
throw (0, _index.createError)("cloudflare", `Invalid binding \`${bindingName}\`: \`${key}\` key is missing`);
|
||||
}
|
||||
}
|
||||
return binding;
|
||||
}
|
||||
function getKVBinding(binding = "STORAGE") {
|
||||
return getBinding(binding);
|
||||
}
|
||||
function getR2Binding(binding = "BUCKET") {
|
||||
return getBinding(binding);
|
||||
}
|
3
node_modules/unstorage/drivers/utils/cloudflare.d.ts
generated
vendored
Normal file
3
node_modules/unstorage/drivers/utils/cloudflare.d.ts
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
export declare function getBinding(binding: KVNamespace | R2Bucket | string): KVNamespace<string> | R2Bucket;
|
||||
export declare function getKVBinding(binding?: KVNamespace | string): KVNamespace;
|
||||
export declare function getR2Binding(binding?: R2Bucket | string): R2Bucket;
|
29
node_modules/unstorage/drivers/utils/cloudflare.mjs
generated
vendored
Normal file
29
node_modules/unstorage/drivers/utils/cloudflare.mjs
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
import { createError } from "./index.mjs";
|
||||
export function getBinding(binding) {
|
||||
let bindingName = "[binding]";
|
||||
if (typeof binding === "string") {
|
||||
bindingName = binding;
|
||||
binding = globalThis[bindingName] || globalThis.__env__?.[bindingName];
|
||||
}
|
||||
if (!binding) {
|
||||
throw createError(
|
||||
"cloudflare",
|
||||
`Invalid binding \`${bindingName}\`: \`${binding}\``
|
||||
);
|
||||
}
|
||||
for (const key of ["get", "put", "delete"]) {
|
||||
if (!(key in binding)) {
|
||||
throw createError(
|
||||
"cloudflare",
|
||||
`Invalid binding \`${bindingName}\`: \`${key}\` key is missing`
|
||||
);
|
||||
}
|
||||
}
|
||||
return binding;
|
||||
}
|
||||
export function getKVBinding(binding = "STORAGE") {
|
||||
return getBinding(binding);
|
||||
}
|
||||
export function getR2Binding(binding = "BUCKET") {
|
||||
return getBinding(binding);
|
||||
}
|
35
node_modules/unstorage/drivers/utils/index.cjs
generated
vendored
Normal file
35
node_modules/unstorage/drivers/utils/index.cjs
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.createError = createError;
|
||||
exports.createRequiredError = createRequiredError;
|
||||
exports.defineDriver = defineDriver;
|
||||
exports.joinKeys = joinKeys;
|
||||
exports.normalizeKey = normalizeKey;
|
||||
function defineDriver(factory) {
|
||||
return factory;
|
||||
}
|
||||
function normalizeKey(key, sep = ":") {
|
||||
if (!key) {
|
||||
return "";
|
||||
}
|
||||
return key.replace(/[:/\\]/g, sep).replace(/^[:/\\]|[:/\\]$/g, "");
|
||||
}
|
||||
function joinKeys(...keys) {
|
||||
return keys.map(key => normalizeKey(key)).filter(Boolean).join(":");
|
||||
}
|
||||
function createError(driver, message, opts) {
|
||||
const err = new Error(`[unstorage] [${driver}] ${message}`, opts);
|
||||
if (Error.captureStackTrace) {
|
||||
Error.captureStackTrace(err, createError);
|
||||
}
|
||||
return err;
|
||||
}
|
||||
function createRequiredError(driver, name) {
|
||||
if (Array.isArray(name)) {
|
||||
return createError(driver, `Missing some of the required options ${name.map(n => "`" + n + "`").join(", ")}`);
|
||||
}
|
||||
return createError(driver, `Missing required option \`${name}\`.`);
|
||||
}
|
10
node_modules/unstorage/drivers/utils/index.d.ts
generated
vendored
Normal file
10
node_modules/unstorage/drivers/utils/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
import type { Driver } from "../..";
|
||||
type DriverFactory<OptionsT, InstanceT> = (opts: OptionsT) => Driver<OptionsT, InstanceT>;
|
||||
interface ErrorOptions {
|
||||
}
|
||||
export declare function defineDriver<OptionsT = any, InstanceT = never>(factory: DriverFactory<OptionsT, InstanceT>): DriverFactory<OptionsT, InstanceT>;
|
||||
export declare function normalizeKey(key: string | undefined, sep?: ":" | "/"): string;
|
||||
export declare function joinKeys(...keys: string[]): string;
|
||||
export declare function createError(driver: string, message: string, opts?: ErrorOptions): Error;
|
||||
export declare function createRequiredError(driver: string, name: string | string[]): Error;
|
||||
export {};
|
28
node_modules/unstorage/drivers/utils/index.mjs
generated
vendored
Normal file
28
node_modules/unstorage/drivers/utils/index.mjs
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
export function defineDriver(factory) {
|
||||
return factory;
|
||||
}
|
||||
export function normalizeKey(key, sep = ":") {
|
||||
if (!key) {
|
||||
return "";
|
||||
}
|
||||
return key.replace(/[:/\\]/g, sep).replace(/^[:/\\]|[:/\\]$/g, "");
|
||||
}
|
||||
export function joinKeys(...keys) {
|
||||
return keys.map((key) => normalizeKey(key)).filter(Boolean).join(":");
|
||||
}
|
||||
export function createError(driver, message, opts) {
|
||||
const err = new Error(`[unstorage] [${driver}] ${message}`, opts);
|
||||
if (Error.captureStackTrace) {
|
||||
Error.captureStackTrace(err, createError);
|
||||
}
|
||||
return err;
|
||||
}
|
||||
export function createRequiredError(driver, name) {
|
||||
if (Array.isArray(name)) {
|
||||
return createError(
|
||||
driver,
|
||||
`Missing some of the required options ${name.map((n) => "`" + n + "`").join(", ")}`
|
||||
);
|
||||
}
|
||||
return createError(driver, `Missing required option \`${name}\`.`);
|
||||
}
|
78
node_modules/unstorage/drivers/utils/node-fs.cjs
generated
vendored
Normal file
78
node_modules/unstorage/drivers/utils/node-fs.cjs
generated
vendored
Normal file
@@ -0,0 +1,78 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.ensuredir = ensuredir;
|
||||
exports.readFile = readFile;
|
||||
exports.readdir = readdir;
|
||||
exports.readdirRecursive = readdirRecursive;
|
||||
exports.rmRecursive = rmRecursive;
|
||||
exports.stat = stat;
|
||||
exports.unlink = unlink;
|
||||
exports.writeFile = writeFile;
|
||||
var _nodeFs = require("node:fs");
|
||||
var _nodePath = require("node:path");
|
||||
function ignoreNotfound(err) {
|
||||
return err.code === "ENOENT" || err.code === "EISDIR" ? null : err;
|
||||
}
|
||||
function ignoreExists(err) {
|
||||
return err.code === "EEXIST" ? null : err;
|
||||
}
|
||||
async function writeFile(path, data, encoding) {
|
||||
await ensuredir((0, _nodePath.dirname)(path));
|
||||
return _nodeFs.promises.writeFile(path, data, encoding);
|
||||
}
|
||||
function readFile(path, encoding) {
|
||||
return _nodeFs.promises.readFile(path, encoding).catch(ignoreNotfound);
|
||||
}
|
||||
function stat(path) {
|
||||
return _nodeFs.promises.stat(path).catch(ignoreNotfound);
|
||||
}
|
||||
function unlink(path) {
|
||||
return _nodeFs.promises.unlink(path).catch(ignoreNotfound);
|
||||
}
|
||||
function readdir(dir) {
|
||||
return _nodeFs.promises.readdir(dir, {
|
||||
withFileTypes: true
|
||||
}).catch(ignoreNotfound).then(r => r || []);
|
||||
}
|
||||
async function ensuredir(dir) {
|
||||
if ((0, _nodeFs.existsSync)(dir)) {
|
||||
return;
|
||||
}
|
||||
await ensuredir((0, _nodePath.dirname)(dir)).catch(ignoreExists);
|
||||
await _nodeFs.promises.mkdir(dir).catch(ignoreExists);
|
||||
}
|
||||
async function readdirRecursive(dir, ignore, maxDepth) {
|
||||
if (ignore && ignore(dir)) {
|
||||
return [];
|
||||
}
|
||||
const entries = await readdir(dir);
|
||||
const files = [];
|
||||
await Promise.all(entries.map(async entry => {
|
||||
const entryPath = (0, _nodePath.resolve)(dir, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
if (maxDepth === void 0 || maxDepth > 0) {
|
||||
const dirFiles = await readdirRecursive(entryPath, ignore, maxDepth === void 0 ? void 0 : maxDepth - 1);
|
||||
files.push(...dirFiles.map(f => entry.name + "/" + f));
|
||||
}
|
||||
} else {
|
||||
if (!(ignore && ignore(entry.name))) {
|
||||
files.push(entry.name);
|
||||
}
|
||||
}
|
||||
}));
|
||||
return files;
|
||||
}
|
||||
async function rmRecursive(dir) {
|
||||
const entries = await readdir(dir);
|
||||
await Promise.all(entries.map(entry => {
|
||||
const entryPath = (0, _nodePath.resolve)(dir, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
return rmRecursive(entryPath).then(() => _nodeFs.promises.rmdir(entryPath));
|
||||
} else {
|
||||
return _nodeFs.promises.unlink(entryPath);
|
||||
}
|
||||
}));
|
||||
}
|
11
node_modules/unstorage/drivers/utils/node-fs.d.ts
generated
vendored
Normal file
11
node_modules/unstorage/drivers/utils/node-fs.d.ts
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
import { Dirent, promises as fsPromises } from "node:fs";
|
||||
type WriteFileData = Parameters<typeof fsPromises.writeFile>[1];
|
||||
export declare function writeFile(path: string, data: WriteFileData, encoding?: BufferEncoding): Promise<void>;
|
||||
export declare function readFile(path: string, encoding?: BufferEncoding): Promise<any>;
|
||||
export declare function stat(path: string): Promise<any>;
|
||||
export declare function unlink(path: string): Promise<any>;
|
||||
export declare function readdir(dir: string): Promise<Dirent[]>;
|
||||
export declare function ensuredir(dir: string): Promise<void>;
|
||||
export declare function readdirRecursive(dir: string, ignore?: (p: string) => boolean, maxDepth?: number): Promise<string[]>;
|
||||
export declare function rmRecursive(dir: string): Promise<void>;
|
||||
export {};
|
71
node_modules/unstorage/drivers/utils/node-fs.mjs
generated
vendored
Normal file
71
node_modules/unstorage/drivers/utils/node-fs.mjs
generated
vendored
Normal file
@@ -0,0 +1,71 @@
|
||||
import { existsSync, promises as fsPromises } from "node:fs";
|
||||
import { resolve, dirname } from "node:path";
|
||||
function ignoreNotfound(err) {
|
||||
return err.code === "ENOENT" || err.code === "EISDIR" ? null : err;
|
||||
}
|
||||
function ignoreExists(err) {
|
||||
return err.code === "EEXIST" ? null : err;
|
||||
}
|
||||
export async function writeFile(path, data, encoding) {
|
||||
await ensuredir(dirname(path));
|
||||
return fsPromises.writeFile(path, data, encoding);
|
||||
}
|
||||
export function readFile(path, encoding) {
|
||||
return fsPromises.readFile(path, encoding).catch(ignoreNotfound);
|
||||
}
|
||||
export function stat(path) {
|
||||
return fsPromises.stat(path).catch(ignoreNotfound);
|
||||
}
|
||||
export function unlink(path) {
|
||||
return fsPromises.unlink(path).catch(ignoreNotfound);
|
||||
}
|
||||
export function readdir(dir) {
|
||||
return fsPromises.readdir(dir, { withFileTypes: true }).catch(ignoreNotfound).then((r) => r || []);
|
||||
}
|
||||
export async function ensuredir(dir) {
|
||||
if (existsSync(dir)) {
|
||||
return;
|
||||
}
|
||||
await ensuredir(dirname(dir)).catch(ignoreExists);
|
||||
await fsPromises.mkdir(dir).catch(ignoreExists);
|
||||
}
|
||||
export async function readdirRecursive(dir, ignore, maxDepth) {
|
||||
if (ignore && ignore(dir)) {
|
||||
return [];
|
||||
}
|
||||
const entries = await readdir(dir);
|
||||
const files = [];
|
||||
await Promise.all(
|
||||
entries.map(async (entry) => {
|
||||
const entryPath = resolve(dir, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
if (maxDepth === void 0 || maxDepth > 0) {
|
||||
const dirFiles = await readdirRecursive(
|
||||
entryPath,
|
||||
ignore,
|
||||
maxDepth === void 0 ? void 0 : maxDepth - 1
|
||||
);
|
||||
files.push(...dirFiles.map((f) => entry.name + "/" + f));
|
||||
}
|
||||
} else {
|
||||
if (!(ignore && ignore(entry.name))) {
|
||||
files.push(entry.name);
|
||||
}
|
||||
}
|
||||
})
|
||||
);
|
||||
return files;
|
||||
}
|
||||
export async function rmRecursive(dir) {
|
||||
const entries = await readdir(dir);
|
||||
await Promise.all(
|
||||
entries.map((entry) => {
|
||||
const entryPath = resolve(dir, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
return rmRecursive(entryPath).then(() => fsPromises.rmdir(entryPath));
|
||||
} else {
|
||||
return fsPromises.unlink(entryPath);
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
117
node_modules/unstorage/drivers/vercel-blob.cjs
generated
vendored
Normal file
117
node_modules/unstorage/drivers/vercel-blob.cjs
generated
vendored
Normal file
@@ -0,0 +1,117 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
|
||||
var _blob = require("@vercel/blob");
|
||||
var _utils = require("./utils/index.cjs");
|
||||
const DRIVER_NAME = "vercel-blob";
|
||||
module.exports = (0, _utils.defineDriver)(opts => {
|
||||
const optsBase = (0, _utils.normalizeKey)(opts?.base);
|
||||
const r = (...keys) => (0, _utils.joinKeys)(optsBase, ...keys).replace(/:/g, "/");
|
||||
const envName = `${opts.envPrefix || "BLOB"}_READ_WRITE_TOKEN`;
|
||||
const getToken = () => {
|
||||
if (opts.access !== "public") {
|
||||
throw (0, _utils.createError)(DRIVER_NAME, `You must set { access: "public" }`);
|
||||
}
|
||||
const token = opts.token || globalThis.process?.env?.[envName];
|
||||
if (!token) {
|
||||
throw (0, _utils.createError)(DRIVER_NAME, `Missing token. Set ${envName} env or token config.`);
|
||||
}
|
||||
return token;
|
||||
};
|
||||
const get = async key => {
|
||||
const {
|
||||
blobs
|
||||
} = await (0, _blob.list)({
|
||||
token: getToken(),
|
||||
prefix: r(key)
|
||||
});
|
||||
const blob = blobs.find(item => item.pathname === r(key));
|
||||
return blob;
|
||||
};
|
||||
return {
|
||||
name: DRIVER_NAME,
|
||||
async hasItem(key) {
|
||||
const blob = await get(key);
|
||||
return !!blob;
|
||||
},
|
||||
async getItem(key) {
|
||||
const blob = await get(key);
|
||||
return blob ? fetch(blob.url).then(res => res.text()) : null;
|
||||
},
|
||||
async getItemRaw(key) {
|
||||
const blob = await get(key);
|
||||
return blob ? fetch(blob.url).then(res => res.arrayBuffer()) : null;
|
||||
},
|
||||
async getMeta(key) {
|
||||
const blob = await get(key);
|
||||
if (!blob) return null;
|
||||
const blobHead = await (0, _blob.head)(blob.url, {
|
||||
token: getToken()
|
||||
});
|
||||
if (!blobHead) return null;
|
||||
return {
|
||||
mtime: blobHead.uploadedAt,
|
||||
...blobHead
|
||||
};
|
||||
},
|
||||
async setItem(key, value, opts2) {
|
||||
await (0, _blob.put)(r(key), value, {
|
||||
access: "public",
|
||||
addRandomSuffix: false,
|
||||
token: getToken(),
|
||||
...opts2
|
||||
});
|
||||
},
|
||||
async setItemRaw(key, value, opts2) {
|
||||
await (0, _blob.put)(r(key), value, {
|
||||
access: "public",
|
||||
addRandomSuffix: false,
|
||||
token: getToken(),
|
||||
...opts2
|
||||
});
|
||||
},
|
||||
async removeItem(key) {
|
||||
const blob = await get(key);
|
||||
if (blob) await (0, _blob.del)(blob.url, {
|
||||
token: getToken()
|
||||
});
|
||||
},
|
||||
async getKeys(base) {
|
||||
const blobs = [];
|
||||
let cursor = void 0;
|
||||
do {
|
||||
const listBlobResult = await (0, _blob.list)({
|
||||
token: getToken(),
|
||||
cursor,
|
||||
prefix: r(base)
|
||||
});
|
||||
cursor = listBlobResult.cursor;
|
||||
for (const blob of listBlobResult.blobs) {
|
||||
blobs.push(blob);
|
||||
}
|
||||
} while (cursor);
|
||||
return blobs.map(blob => blob.pathname.replace(new RegExp(`^${optsBase.replace(/:/g, "/")}/`), ""));
|
||||
},
|
||||
async clear(base) {
|
||||
let cursor = void 0;
|
||||
const blobs = [];
|
||||
do {
|
||||
const listBlobResult = await (0, _blob.list)({
|
||||
token: getToken(),
|
||||
cursor,
|
||||
prefix: r(base)
|
||||
});
|
||||
blobs.push(...listBlobResult.blobs);
|
||||
cursor = listBlobResult.cursor;
|
||||
} while (cursor);
|
||||
if (blobs.length > 0) {
|
||||
await (0, _blob.del)(blobs.map(blob => blob.url), {
|
||||
token: getToken()
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user