Migrate storage to DogeCloud and expand admin dashboard
This commit is contained in:
@@ -7,7 +7,7 @@ import {spawnSync} from 'node:child_process';
|
||||
|
||||
import {
|
||||
buildObjectKey,
|
||||
createAuthorizationHeader,
|
||||
createAwsV4Headers,
|
||||
encodeObjectKey,
|
||||
getFrontendSpaAliasContentType,
|
||||
getFrontendSpaAliasKeys,
|
||||
@@ -16,6 +16,7 @@ import {
|
||||
listFiles,
|
||||
normalizeEndpoint,
|
||||
parseSimpleEnv,
|
||||
requestDogeCloudTemporaryS3Session,
|
||||
} from './oss-deploy-lib.mjs';
|
||||
|
||||
const repoRoot = process.cwd();
|
||||
@@ -72,34 +73,39 @@ function runBuild() {
|
||||
async function uploadFile({
|
||||
bucket,
|
||||
endpoint,
|
||||
region,
|
||||
objectKey,
|
||||
filePath,
|
||||
contentTypeOverride,
|
||||
accessKeyId,
|
||||
accessKeySecret,
|
||||
secretAccessKey,
|
||||
sessionToken,
|
||||
}) {
|
||||
const body = await fs.readFile(filePath);
|
||||
const contentType = contentTypeOverride || getContentType(objectKey);
|
||||
const date = new Date().toUTCString();
|
||||
const amzDate = new Date().toISOString().replace(/[:-]|\.\d{3}/g, '');
|
||||
const url = `https://${bucket}.${normalizeEndpoint(endpoint)}/${encodeObjectKey(objectKey)}`;
|
||||
const authorization = createAuthorizationHeader({
|
||||
const signatureHeaders = createAwsV4Headers({
|
||||
method: 'PUT',
|
||||
endpoint,
|
||||
region,
|
||||
bucket,
|
||||
objectKey,
|
||||
contentType,
|
||||
date,
|
||||
headers: {
|
||||
'Content-Type': contentType,
|
||||
},
|
||||
amzDate,
|
||||
accessKeyId,
|
||||
accessKeySecret,
|
||||
secretAccessKey,
|
||||
sessionToken,
|
||||
});
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
Authorization: authorization,
|
||||
...signatureHeaders,
|
||||
'Cache-Control': getCacheControl(objectKey),
|
||||
'Content-Length': String(body.byteLength),
|
||||
'Content-Type': contentType,
|
||||
Date: date,
|
||||
},
|
||||
body,
|
||||
});
|
||||
@@ -113,9 +119,11 @@ async function uploadFile({
|
||||
async function uploadSpaAliases({
|
||||
bucket,
|
||||
endpoint,
|
||||
region,
|
||||
distIndexPath,
|
||||
accessKeyId,
|
||||
accessKeySecret,
|
||||
secretAccessKey,
|
||||
sessionToken,
|
||||
remotePrefix,
|
||||
dryRun,
|
||||
}) {
|
||||
@@ -133,11 +141,13 @@ async function uploadSpaAliases({
|
||||
await uploadFile({
|
||||
bucket,
|
||||
endpoint,
|
||||
region,
|
||||
objectKey,
|
||||
filePath: distIndexPath,
|
||||
contentTypeOverride: contentType,
|
||||
accessKeyId,
|
||||
accessKeySecret,
|
||||
secretAccessKey,
|
||||
sessionToken,
|
||||
});
|
||||
console.log(`uploaded alias ${objectKey}`);
|
||||
}
|
||||
@@ -148,11 +158,26 @@ async function main() {
|
||||
|
||||
await loadEnvFileIfPresent();
|
||||
|
||||
const accessKeyId = requireEnv('YOYUZH_OSS_ACCESS_KEY_ID');
|
||||
const accessKeySecret = requireEnv('YOYUZH_OSS_ACCESS_KEY_SECRET');
|
||||
const endpoint = requireEnv('YOYUZH_OSS_ENDPOINT');
|
||||
const bucket = requireEnv('YOYUZH_OSS_BUCKET');
|
||||
const remotePrefix = process.env.YOYUZH_OSS_PREFIX || '';
|
||||
const apiAccessKey = requireEnv('YOYUZH_DOGECLOUD_API_ACCESS_KEY');
|
||||
const apiSecretKey = requireEnv('YOYUZH_DOGECLOUD_API_SECRET_KEY');
|
||||
const scope = requireEnv('YOYUZH_DOGECLOUD_FRONT_SCOPE');
|
||||
const apiBaseUrl = process.env.YOYUZH_DOGECLOUD_API_BASE_URL || 'https://api.dogecloud.com';
|
||||
const region = process.env.YOYUZH_DOGECLOUD_S3_REGION || 'automatic';
|
||||
const remotePrefix = process.env.YOYUZH_DOGECLOUD_FRONT_PREFIX || '';
|
||||
const ttlSeconds = Number(process.env.YOYUZH_DOGECLOUD_FRONT_TTL_SECONDS || '3600');
|
||||
const {
|
||||
accessKeyId,
|
||||
secretAccessKey,
|
||||
sessionToken,
|
||||
endpoint,
|
||||
bucket,
|
||||
} = await requestDogeCloudTemporaryS3Session({
|
||||
apiBaseUrl,
|
||||
accessKey: apiAccessKey,
|
||||
secretKey: apiSecretKey,
|
||||
scope,
|
||||
ttlSeconds,
|
||||
});
|
||||
|
||||
if (!skipBuild) {
|
||||
runBuild();
|
||||
@@ -175,10 +200,12 @@ async function main() {
|
||||
await uploadFile({
|
||||
bucket,
|
||||
endpoint,
|
||||
region,
|
||||
objectKey,
|
||||
filePath,
|
||||
accessKeyId,
|
||||
accessKeySecret,
|
||||
secretAccessKey,
|
||||
sessionToken,
|
||||
});
|
||||
console.log(`uploaded ${objectKey}`);
|
||||
}
|
||||
@@ -186,9 +213,11 @@ async function main() {
|
||||
await uploadSpaAliases({
|
||||
bucket,
|
||||
endpoint,
|
||||
region,
|
||||
distIndexPath: path.join(distDir, 'index.html'),
|
||||
accessKeyId,
|
||||
accessKeySecret,
|
||||
secretAccessKey,
|
||||
sessionToken,
|
||||
remotePrefix,
|
||||
dryRun,
|
||||
});
|
||||
|
||||
402
scripts/migrate-aliyun-oss-to-s3.mjs
Normal file
402
scripts/migrate-aliyun-oss-to-s3.mjs
Normal file
@@ -0,0 +1,402 @@
|
||||
import crypto from 'node:crypto';
|
||||
import https from 'node:https';
|
||||
import {pathToFileURL} from 'node:url';
|
||||
|
||||
import {
|
||||
createAwsV4Headers,
|
||||
encodeObjectKey,
|
||||
normalizeEndpoint,
|
||||
requestDogeCloudTemporaryS3Session,
|
||||
} from './oss-deploy-lib.mjs';
|
||||
|
||||
const DEFAULTS = {
|
||||
sourceEndpoint: 'https://oss-ap-northeast-1.aliyuncs.com',
|
||||
targetEndpoint: 'https://cos.ap-chengdu.myqcloud.com',
|
||||
targetRegion: 'automatic',
|
||||
prefix: '',
|
||||
dryRun: false,
|
||||
overwrite: false,
|
||||
};
|
||||
|
||||
export function parseArgs(argv) {
|
||||
const options = {...DEFAULTS};
|
||||
|
||||
for (const arg of argv) {
|
||||
if (arg === '--dry-run') {
|
||||
options.dryRun = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (arg === '--overwrite') {
|
||||
options.overwrite = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (arg.startsWith('--prefix=')) {
|
||||
options.prefix = arg.slice('--prefix='.length);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (arg.startsWith('--source-endpoint=')) {
|
||||
options.sourceEndpoint = arg.slice('--source-endpoint='.length);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (arg.startsWith('--source-bucket=')) {
|
||||
options.sourceBucket = arg.slice('--source-bucket='.length);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (arg.startsWith('--source-access-key-id=')) {
|
||||
options.sourceAccessKeyId = arg.slice('--source-access-key-id='.length);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (arg.startsWith('--source-access-key-secret=')) {
|
||||
options.sourceAccessKeySecret = arg.slice('--source-access-key-secret='.length);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (arg.startsWith('--target-api-base-url=')) {
|
||||
options.targetApiBaseUrl = arg.slice('--target-api-base-url='.length);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (arg.startsWith('--target-region=')) {
|
||||
options.targetRegion = arg.slice('--target-region='.length);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (arg.startsWith('--target-scope=')) {
|
||||
options.targetScope = arg.slice('--target-scope='.length);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (arg.startsWith('--target-api-access-key=')) {
|
||||
options.targetApiAccessKey = arg.slice('--target-api-access-key='.length);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (arg.startsWith('--target-api-secret-key=')) {
|
||||
options.targetApiSecretKey = arg.slice('--target-api-secret-key='.length);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (arg.startsWith('--target-ttl-seconds=')) {
|
||||
options.targetTtlSeconds = Number(arg.slice('--target-ttl-seconds='.length));
|
||||
continue;
|
||||
}
|
||||
|
||||
throw new Error(`Unknown argument: ${arg}`);
|
||||
}
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
export function pickTransferredHeaders(sourceHeaders) {
|
||||
const forwardedHeaders = {};
|
||||
const supportedHeaders = [
|
||||
'cache-control',
|
||||
'content-disposition',
|
||||
'content-encoding',
|
||||
'content-type',
|
||||
];
|
||||
|
||||
for (const headerName of supportedHeaders) {
|
||||
const value = sourceHeaders[headerName];
|
||||
if (typeof value === 'string' && value) {
|
||||
forwardedHeaders[headerName === 'content-type' ? 'Content-Type' : headerName] = value;
|
||||
}
|
||||
}
|
||||
|
||||
return forwardedHeaders;
|
||||
}
|
||||
|
||||
function requireOption(options, key) {
|
||||
const value = options[key];
|
||||
if (!value) {
|
||||
throw new Error(`Missing required option: ${key}`);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
function createOssAuthorizationHeader({
|
||||
method,
|
||||
bucket,
|
||||
objectKey,
|
||||
contentType,
|
||||
date,
|
||||
accessKeyId,
|
||||
accessKeySecret,
|
||||
}) {
|
||||
const stringToSign = [
|
||||
method.toUpperCase(),
|
||||
'',
|
||||
contentType,
|
||||
date,
|
||||
`/${bucket}/${objectKey}`,
|
||||
].join('\n');
|
||||
|
||||
const signature = crypto
|
||||
.createHmac('sha1', accessKeySecret)
|
||||
.update(stringToSign)
|
||||
.digest('base64');
|
||||
return `OSS ${accessKeyId}:${signature}`;
|
||||
}
|
||||
|
||||
function sourceRequest({method, endpoint, bucket, objectKey, accessKeyId, accessKeySecret, query = ''}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const normalizedEndpoint = normalizeEndpoint(endpoint);
|
||||
const date = new Date().toUTCString();
|
||||
const authorization = createOssAuthorizationHeader({
|
||||
method,
|
||||
bucket,
|
||||
objectKey,
|
||||
contentType: '',
|
||||
date,
|
||||
accessKeyId,
|
||||
accessKeySecret,
|
||||
});
|
||||
|
||||
const request = https.request({
|
||||
hostname: `${bucket}.${normalizedEndpoint}`,
|
||||
path: `/${encodeObjectKey(objectKey)}${query ? `?${query}` : ''}`,
|
||||
method,
|
||||
headers: {
|
||||
Authorization: authorization,
|
||||
Date: date,
|
||||
},
|
||||
}, (response) => {
|
||||
const chunks = [];
|
||||
response.on('data', (chunk) => {
|
||||
chunks.push(chunk);
|
||||
});
|
||||
response.on('end', () => {
|
||||
resolve({
|
||||
statusCode: response.statusCode ?? 500,
|
||||
headers: response.headers,
|
||||
body: Buffer.concat(chunks),
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
request.on('error', reject);
|
||||
request.end();
|
||||
});
|
||||
}
|
||||
|
||||
function targetRequest({
|
||||
method,
|
||||
endpoint,
|
||||
region,
|
||||
bucket,
|
||||
objectKey,
|
||||
accessKeyId,
|
||||
secretAccessKey,
|
||||
sessionToken,
|
||||
query = '',
|
||||
headers = {},
|
||||
body,
|
||||
}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const normalizedEndpoint = normalizeEndpoint(endpoint);
|
||||
const amzDate = new Date().toISOString().replace(/[:-]|\.\d{3}/g, '');
|
||||
const signedHeaders = createAwsV4Headers({
|
||||
method,
|
||||
endpoint,
|
||||
region,
|
||||
bucket,
|
||||
objectKey,
|
||||
query,
|
||||
headers,
|
||||
amzDate,
|
||||
accessKeyId,
|
||||
secretAccessKey,
|
||||
sessionToken,
|
||||
});
|
||||
|
||||
const request = https.request({
|
||||
hostname: `${bucket}.${normalizedEndpoint}`,
|
||||
path: `/${encodeObjectKey(objectKey)}${query ? `?${query}` : ''}`,
|
||||
method,
|
||||
headers: signedHeaders,
|
||||
}, (response) => {
|
||||
const chunks = [];
|
||||
response.on('data', (chunk) => {
|
||||
chunks.push(chunk);
|
||||
});
|
||||
response.on('end', () => {
|
||||
resolve({
|
||||
statusCode: response.statusCode ?? 500,
|
||||
headers: response.headers,
|
||||
body: Buffer.concat(chunks),
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
request.on('error', reject);
|
||||
if (body) {
|
||||
request.end(body);
|
||||
return;
|
||||
}
|
||||
request.end();
|
||||
});
|
||||
}
|
||||
|
||||
function extractXmlValues(xmlBuffer, tagName) {
|
||||
const xml = xmlBuffer.toString('utf8');
|
||||
const pattern = new RegExp(`<${tagName}>(.*?)</${tagName}>`, 'g');
|
||||
return [...xml.matchAll(pattern)].map((match) => match[1]);
|
||||
}
|
||||
|
||||
async function listSourceObjects(context, prefix) {
|
||||
const keys = [];
|
||||
let continuationToken = '';
|
||||
|
||||
while (true) {
|
||||
const query = new URLSearchParams({
|
||||
'list-type': '2',
|
||||
'max-keys': '1000',
|
||||
prefix,
|
||||
});
|
||||
if (continuationToken) {
|
||||
query.set('continuation-token', continuationToken);
|
||||
}
|
||||
|
||||
const response = await sourceRequest({
|
||||
...context,
|
||||
method: 'GET',
|
||||
objectKey: '',
|
||||
query: query.toString(),
|
||||
});
|
||||
if (response.statusCode < 200 || response.statusCode >= 300) {
|
||||
throw new Error(`List failed for prefix "${prefix}": ${response.statusCode} ${response.body.toString('utf8')}`);
|
||||
}
|
||||
|
||||
keys.push(...extractXmlValues(response.body, 'Key'));
|
||||
const truncated = extractXmlValues(response.body, 'IsTruncated')[0] === 'true';
|
||||
continuationToken = extractXmlValues(response.body, 'NextContinuationToken')[0] || '';
|
||||
if (!truncated || !continuationToken) {
|
||||
return keys;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function targetObjectExists(context, objectKey) {
|
||||
const response = await targetRequest({
|
||||
...context,
|
||||
method: 'HEAD',
|
||||
objectKey,
|
||||
});
|
||||
return response.statusCode >= 200 && response.statusCode < 300;
|
||||
}
|
||||
|
||||
async function copyObject(context, objectKey) {
|
||||
const sourceResponse = await sourceRequest({
|
||||
endpoint: context.sourceEndpoint,
|
||||
bucket: context.sourceBucket,
|
||||
accessKeyId: context.sourceAccessKeyId,
|
||||
accessKeySecret: context.sourceAccessKeySecret,
|
||||
method: 'GET',
|
||||
objectKey,
|
||||
});
|
||||
if (sourceResponse.statusCode < 200 || sourceResponse.statusCode >= 300) {
|
||||
throw new Error(`Download failed for ${objectKey}: ${sourceResponse.statusCode} ${sourceResponse.body.toString('utf8')}`);
|
||||
}
|
||||
|
||||
const forwardedHeaders = pickTransferredHeaders(sourceResponse.headers);
|
||||
const response = await targetRequest({
|
||||
endpoint: context.targetEndpoint,
|
||||
region: context.targetRegion,
|
||||
bucket: context.targetBucket,
|
||||
accessKeyId: context.targetAccessKeyId,
|
||||
secretAccessKey: context.targetSecretAccessKey,
|
||||
sessionToken: context.targetSessionToken,
|
||||
method: 'PUT',
|
||||
objectKey,
|
||||
headers: {
|
||||
...forwardedHeaders,
|
||||
'Content-Length': String(sourceResponse.body.byteLength),
|
||||
},
|
||||
body: sourceResponse.body,
|
||||
});
|
||||
if (response.statusCode < 200 || response.statusCode >= 300) {
|
||||
throw new Error(`Upload failed for ${objectKey}: ${response.statusCode} ${response.body.toString('utf8')}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const options = parseArgs(process.argv.slice(2));
|
||||
const targetSession = await requestDogeCloudTemporaryS3Session({
|
||||
apiBaseUrl: options.targetApiBaseUrl || 'https://api.dogecloud.com',
|
||||
accessKey: requireOption(options, 'targetApiAccessKey'),
|
||||
secretKey: requireOption(options, 'targetApiSecretKey'),
|
||||
scope: requireOption(options, 'targetScope'),
|
||||
ttlSeconds: options.targetTtlSeconds || 3600,
|
||||
});
|
||||
const context = {
|
||||
sourceEndpoint: options.sourceEndpoint,
|
||||
sourceBucket: requireOption(options, 'sourceBucket'),
|
||||
sourceAccessKeyId: requireOption(options, 'sourceAccessKeyId'),
|
||||
sourceAccessKeySecret: requireOption(options, 'sourceAccessKeySecret'),
|
||||
targetEndpoint: targetSession.endpoint,
|
||||
targetRegion: options.targetRegion,
|
||||
targetBucket: targetSession.bucket,
|
||||
targetAccessKeyId: targetSession.accessKeyId,
|
||||
targetSecretAccessKey: targetSession.secretAccessKey,
|
||||
targetSessionToken: targetSession.sessionToken,
|
||||
};
|
||||
|
||||
const keys = await listSourceObjects({
|
||||
endpoint: context.sourceEndpoint,
|
||||
bucket: context.sourceBucket,
|
||||
accessKeyId: context.sourceAccessKeyId,
|
||||
accessKeySecret: context.sourceAccessKeySecret,
|
||||
}, options.prefix);
|
||||
|
||||
const summary = {
|
||||
listed: keys.length,
|
||||
copied: 0,
|
||||
skippedExisting: 0,
|
||||
failed: 0,
|
||||
};
|
||||
|
||||
for (const objectKey of keys) {
|
||||
if (!options.overwrite && await targetObjectExists({
|
||||
endpoint: context.targetEndpoint,
|
||||
region: context.targetRegion,
|
||||
bucket: context.targetBucket,
|
||||
accessKeyId: context.targetAccessKeyId,
|
||||
secretAccessKey: context.targetSecretAccessKey,
|
||||
sessionToken: context.targetSessionToken,
|
||||
}, objectKey)) {
|
||||
summary.skippedExisting += 1;
|
||||
console.log(`[skip] ${objectKey}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
console.log(`${options.dryRun ? '[dry-run]' : '[copy]'} ${objectKey}`);
|
||||
if (options.dryRun) {
|
||||
summary.copied += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
await copyObject(context, objectKey);
|
||||
summary.copied += 1;
|
||||
} catch (error) {
|
||||
summary.failed += 1;
|
||||
console.error(`[failed] ${objectKey}: ${error instanceof Error ? error.message : String(error)}`);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('\nSummary');
|
||||
console.log(JSON.stringify(summary, null, 2));
|
||||
}
|
||||
|
||||
if (import.meta.url === pathToFileURL(process.argv[1]).href) {
|
||||
main().catch((error) => {
|
||||
console.error(error instanceof Error ? error.message : String(error));
|
||||
process.exitCode = 1;
|
||||
});
|
||||
}
|
||||
38
scripts/migrate-aliyun-oss-to-s3.test.mjs
Normal file
38
scripts/migrate-aliyun-oss-to-s3.test.mjs
Normal file
@@ -0,0 +1,38 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import test from 'node:test';
|
||||
|
||||
import {parseArgs, pickTransferredHeaders} from './migrate-aliyun-oss-to-s3.mjs';
|
||||
|
||||
test('parseArgs keeps migration flags and bucket options', () => {
|
||||
const options = parseArgs([
|
||||
'--dry-run',
|
||||
'--overwrite',
|
||||
'--prefix=games/',
|
||||
'--source-bucket=aliyun-front',
|
||||
'--target-scope=yoyuzh-front',
|
||||
'--target-api-access-key=doge-ak',
|
||||
]);
|
||||
|
||||
assert.equal(options.dryRun, true);
|
||||
assert.equal(options.overwrite, true);
|
||||
assert.equal(options.prefix, 'games/');
|
||||
assert.equal(options.sourceBucket, 'aliyun-front');
|
||||
assert.equal(options.targetScope, 'yoyuzh-front');
|
||||
assert.equal(options.targetApiAccessKey, 'doge-ak');
|
||||
});
|
||||
|
||||
test('pickTransferredHeaders preserves only safe object metadata headers', () => {
|
||||
const headers = pickTransferredHeaders({
|
||||
'cache-control': 'public,max-age=31536000,immutable',
|
||||
'content-type': 'text/javascript; charset=utf-8',
|
||||
'content-disposition': 'attachment; filename=test.js',
|
||||
etag: '"demo"',
|
||||
server: 'OSS',
|
||||
});
|
||||
|
||||
assert.deepEqual(headers, {
|
||||
'cache-control': 'public,max-age=31536000,immutable',
|
||||
'Content-Type': 'text/javascript; charset=utf-8',
|
||||
'content-disposition': 'attachment; filename=test.js',
|
||||
});
|
||||
});
|
||||
@@ -3,12 +3,13 @@ import {constants as fsConstants} from 'node:fs';
|
||||
import {spawn} from 'node:child_process';
|
||||
import https from 'node:https';
|
||||
import path from 'node:path';
|
||||
import crypto from 'node:crypto';
|
||||
|
||||
import {
|
||||
createAwsV4Headers,
|
||||
normalizeEndpoint,
|
||||
parseSimpleEnv,
|
||||
encodeObjectKey,
|
||||
requestDogeCloudTemporaryS3Session,
|
||||
} from './oss-deploy-lib.mjs';
|
||||
|
||||
const DEFAULTS = {
|
||||
@@ -16,7 +17,8 @@ const DEFAULTS = {
|
||||
storageRoot: '/opt/yoyuzh/storage',
|
||||
database: 'yoyuzh_portal',
|
||||
bucket: 'yoyuzh-files',
|
||||
endpoint: 'https://oss-ap-northeast-1.aliyuncs.com',
|
||||
endpoint: 'https://cos.ap-chengdu.myqcloud.com',
|
||||
region: 'automatic',
|
||||
};
|
||||
|
||||
function parseArgs(argv) {
|
||||
@@ -114,37 +116,6 @@ function runCommand(command, args) {
|
||||
});
|
||||
}
|
||||
|
||||
function createOssAuthorizationHeader({
|
||||
method,
|
||||
bucket,
|
||||
objectKey,
|
||||
contentType,
|
||||
date,
|
||||
accessKeyId,
|
||||
accessKeySecret,
|
||||
headers = {},
|
||||
}) {
|
||||
const canonicalizedHeaders = Object.entries(headers)
|
||||
.map(([key, value]) => [key.toLowerCase().trim(), String(value).trim()])
|
||||
.filter(([key]) => key.startsWith('x-oss-'))
|
||||
.sort(([left], [right]) => left.localeCompare(right))
|
||||
.map(([key, value]) => `${key}:${value}\n`)
|
||||
.join('');
|
||||
const canonicalizedResource = `/${bucket}/${objectKey}`;
|
||||
const stringToSign = [
|
||||
method.toUpperCase(),
|
||||
'',
|
||||
contentType,
|
||||
date,
|
||||
`${canonicalizedHeaders}${canonicalizedResource}`,
|
||||
].join('\n');
|
||||
const signature = crypto
|
||||
.createHmac('sha1', accessKeySecret)
|
||||
.update(stringToSign)
|
||||
.digest('base64');
|
||||
return `OSS ${accessKeyId}:${signature}`;
|
||||
}
|
||||
|
||||
async function readAppEnv(appEnvPath) {
|
||||
const raw = await fs.readFile(appEnvPath, 'utf8');
|
||||
return parseSimpleEnv(raw);
|
||||
@@ -178,20 +149,34 @@ async function queryFiles(database) {
|
||||
});
|
||||
}
|
||||
|
||||
function ossRequest({method, endpoint, bucket, objectKey, accessKeyId, accessKeySecret, headers = {}, query = '', body}) {
|
||||
function s3Request({
|
||||
method,
|
||||
endpoint,
|
||||
region,
|
||||
bucket,
|
||||
objectKey,
|
||||
accessKeyId,
|
||||
secretAccessKey,
|
||||
sessionToken,
|
||||
headers = {},
|
||||
query = '',
|
||||
body,
|
||||
}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const normalizedEndpoint = normalizeEndpoint(endpoint);
|
||||
const date = new Date().toUTCString();
|
||||
const contentType = headers['Content-Type'] || headers['content-type'] || '';
|
||||
const auth = createOssAuthorizationHeader({
|
||||
const amzDate = new Date().toISOString().replace(/[:-]|\.\d{3}/g, '');
|
||||
const signatureHeaders = createAwsV4Headers({
|
||||
method,
|
||||
endpoint,
|
||||
region,
|
||||
bucket,
|
||||
objectKey,
|
||||
contentType,
|
||||
date,
|
||||
accessKeyId,
|
||||
accessKeySecret,
|
||||
query,
|
||||
headers,
|
||||
amzDate,
|
||||
accessKeyId,
|
||||
secretAccessKey,
|
||||
sessionToken,
|
||||
});
|
||||
|
||||
const request = https.request({
|
||||
@@ -199,9 +184,7 @@ function ossRequest({method, endpoint, bucket, objectKey, accessKeyId, accessKey
|
||||
path: `/${encodeObjectKey(objectKey)}${query ? `?${query}` : ''}`,
|
||||
method,
|
||||
headers: {
|
||||
Date: date,
|
||||
Authorization: auth,
|
||||
...headers,
|
||||
...signatureHeaders,
|
||||
},
|
||||
}, (response) => {
|
||||
let data = '';
|
||||
@@ -229,7 +212,7 @@ function ossRequest({method, endpoint, bucket, objectKey, accessKeyId, accessKey
|
||||
}
|
||||
|
||||
async function objectExists(context, objectKey) {
|
||||
const response = await ossRequest({
|
||||
const response = await s3Request({
|
||||
...context,
|
||||
method: 'HEAD',
|
||||
objectKey,
|
||||
@@ -243,7 +226,7 @@ async function uploadLocalFile(context, objectKey, absolutePath, contentType = '
|
||||
const stat = await fileHandle.stat();
|
||||
|
||||
try {
|
||||
const response = await ossRequest({
|
||||
const response = await s3Request({
|
||||
...context,
|
||||
method: 'PUT',
|
||||
objectKey,
|
||||
@@ -263,12 +246,12 @@ async function uploadLocalFile(context, objectKey, absolutePath, contentType = '
|
||||
}
|
||||
|
||||
async function copyObject(context, sourceKey, targetKey) {
|
||||
const response = await ossRequest({
|
||||
const response = await s3Request({
|
||||
...context,
|
||||
method: 'PUT',
|
||||
objectKey: targetKey,
|
||||
headers: {
|
||||
'x-oss-copy-source': `/${context.bucket}/${encodeObjectKey(sourceKey)}`,
|
||||
'x-amz-copy-source': `/${context.bucket}/${encodeObjectKey(sourceKey)}`,
|
||||
},
|
||||
});
|
||||
|
||||
@@ -278,7 +261,7 @@ async function copyObject(context, sourceKey, targetKey) {
|
||||
}
|
||||
|
||||
async function deleteObject(context, objectKey) {
|
||||
const response = await ossRequest({
|
||||
const response = await s3Request({
|
||||
...context,
|
||||
method: 'DELETE',
|
||||
objectKey,
|
||||
@@ -318,7 +301,7 @@ async function listObjects(context, prefix) {
|
||||
query.set('continuation-token', continuationToken);
|
||||
}
|
||||
|
||||
const response = await ossRequest({
|
||||
const response = await s3Request({
|
||||
...context,
|
||||
method: 'GET',
|
||||
objectKey: '',
|
||||
@@ -370,21 +353,38 @@ async function buildArchivedObjectMap(context, files) {
|
||||
async function main() {
|
||||
const options = parseArgs(process.argv.slice(2));
|
||||
const appEnv = await readAppEnv(options.appEnvPath);
|
||||
const endpoint = appEnv.YOYUZH_OSS_ENDPOINT || DEFAULTS.endpoint;
|
||||
const bucket = options.bucket;
|
||||
const accessKeyId = appEnv.YOYUZH_OSS_ACCESS_KEY_ID;
|
||||
const accessKeySecret = appEnv.YOYUZH_OSS_ACCESS_KEY_SECRET;
|
||||
const apiAccessKey = appEnv.YOYUZH_DOGECLOUD_API_ACCESS_KEY;
|
||||
const apiSecretKey = appEnv.YOYUZH_DOGECLOUD_API_SECRET_KEY;
|
||||
const scope = appEnv.YOYUZH_DOGECLOUD_STORAGE_SCOPE || options.bucket;
|
||||
const apiBaseUrl = appEnv.YOYUZH_DOGECLOUD_API_BASE_URL || 'https://api.dogecloud.com';
|
||||
const region = appEnv.YOYUZH_DOGECLOUD_S3_REGION || DEFAULTS.region;
|
||||
|
||||
if (!accessKeyId || !accessKeySecret) {
|
||||
throw new Error('Missing OSS credentials in app env');
|
||||
if (!apiAccessKey || !apiSecretKey || !scope) {
|
||||
throw new Error('Missing DogeCloud storage configuration in app env');
|
||||
}
|
||||
|
||||
const {
|
||||
accessKeyId,
|
||||
secretAccessKey,
|
||||
sessionToken,
|
||||
endpoint,
|
||||
bucket,
|
||||
} = await requestDogeCloudTemporaryS3Session({
|
||||
apiBaseUrl,
|
||||
accessKey: apiAccessKey,
|
||||
secretKey: apiSecretKey,
|
||||
scope,
|
||||
ttlSeconds: Number(appEnv.YOYUZH_DOGECLOUD_STORAGE_TTL_SECONDS || '3600'),
|
||||
});
|
||||
|
||||
const files = await queryFiles(options.database);
|
||||
const context = {
|
||||
endpoint,
|
||||
region,
|
||||
bucket,
|
||||
accessKeyId,
|
||||
accessKeySecret,
|
||||
secretAccessKey,
|
||||
sessionToken,
|
||||
};
|
||||
const archivedObjectsByKey = await buildArchivedObjectMap(context, files);
|
||||
|
||||
|
||||
@@ -68,29 +68,179 @@ export function getFrontendSpaAliasContentType() {
|
||||
return 'text/html; charset=utf-8';
|
||||
}
|
||||
|
||||
export function createAuthorizationHeader({
|
||||
function toAmzDateParts(amzDate) {
|
||||
return {
|
||||
amzDate,
|
||||
dateStamp: amzDate.slice(0, 8),
|
||||
};
|
||||
}
|
||||
|
||||
function sha256Hex(value) {
|
||||
return crypto.createHash('sha256').update(value).digest('hex');
|
||||
}
|
||||
|
||||
function hmac(key, value, encoding) {
|
||||
const digest = crypto.createHmac('sha256', key).update(value).digest();
|
||||
return encoding ? digest.toString(encoding) : digest;
|
||||
}
|
||||
|
||||
function hmacSha1Hex(key, value) {
|
||||
return crypto.createHmac('sha1', key).update(value).digest('hex');
|
||||
}
|
||||
|
||||
function buildSigningKey(secretAccessKey, dateStamp, region, service) {
|
||||
const kDate = hmac(`AWS4${secretAccessKey}`, dateStamp);
|
||||
const kRegion = hmac(kDate, region);
|
||||
const kService = hmac(kRegion, service);
|
||||
return hmac(kService, 'aws4_request');
|
||||
}
|
||||
|
||||
function encodeQueryComponent(value) {
|
||||
return encodeURIComponent(value).replace(/[!'()*]/g, (character) =>
|
||||
`%${character.charCodeAt(0).toString(16).toUpperCase()}`
|
||||
);
|
||||
}
|
||||
|
||||
function toCanonicalQueryString(query) {
|
||||
const params = new URLSearchParams(query);
|
||||
return [...params.entries()]
|
||||
.sort(([leftKey, leftValue], [rightKey, rightValue]) =>
|
||||
leftKey === rightKey ? leftValue.localeCompare(rightValue) : leftKey.localeCompare(rightKey)
|
||||
)
|
||||
.map(([key, value]) => `${encodeQueryComponent(key)}=${encodeQueryComponent(value)}`)
|
||||
.join('&');
|
||||
}
|
||||
|
||||
export function extractDogeCloudScopeBucketName(scope) {
|
||||
const separatorIndex = scope.indexOf(':');
|
||||
return separatorIndex >= 0 ? scope.slice(0, separatorIndex) : scope;
|
||||
}
|
||||
|
||||
export function createDogeCloudApiAuthorization({apiPath, body, accessKey, secretKey}) {
|
||||
return `TOKEN ${accessKey}:${hmacSha1Hex(secretKey, `${apiPath}\n${body}`)}`;
|
||||
}
|
||||
|
||||
export async function requestDogeCloudTemporaryS3Session({
|
||||
apiBaseUrl = 'https://api.dogecloud.com',
|
||||
accessKey,
|
||||
secretKey,
|
||||
scope,
|
||||
ttlSeconds = 3600,
|
||||
fetchImpl = fetch,
|
||||
}) {
|
||||
const apiPath = '/auth/tmp_token.json';
|
||||
const body = JSON.stringify({
|
||||
channel: 'OSS_FULL',
|
||||
ttl: ttlSeconds,
|
||||
scopes: [scope],
|
||||
});
|
||||
const response = await fetchImpl(`${apiBaseUrl.replace(/\/+$/, '')}${apiPath}`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: createDogeCloudApiAuthorization({
|
||||
apiPath,
|
||||
body,
|
||||
accessKey,
|
||||
secretKey,
|
||||
}),
|
||||
},
|
||||
body,
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`DogeCloud tmp_token request failed: HTTP ${response.status}`);
|
||||
}
|
||||
|
||||
const payload = await response.json();
|
||||
if (payload.code !== 200) {
|
||||
throw new Error(`DogeCloud tmp_token request failed: ${payload.msg || 'unknown error'}`);
|
||||
}
|
||||
|
||||
const bucketName = extractDogeCloudScopeBucketName(scope);
|
||||
const buckets = Array.isArray(payload.data?.Buckets) ? payload.data.Buckets : [];
|
||||
const bucket = buckets.find((entry) => entry.name === bucketName) ?? buckets[0];
|
||||
if (!bucket) {
|
||||
throw new Error(`DogeCloud tmp_token response did not include a bucket for scope: ${bucketName}`);
|
||||
}
|
||||
|
||||
return {
|
||||
accessKeyId: payload.data.Credentials?.accessKeyId || '',
|
||||
secretAccessKey: payload.data.Credentials?.secretAccessKey || '',
|
||||
sessionToken: payload.data.Credentials?.sessionToken || '',
|
||||
bucket: bucket.s3Bucket,
|
||||
endpoint: bucket.s3Endpoint,
|
||||
bucketName: bucket.name,
|
||||
expiresAt: payload.data.ExpiredAt,
|
||||
};
|
||||
}
|
||||
|
||||
export function createAwsV4Headers({
|
||||
method,
|
||||
endpoint,
|
||||
bucket,
|
||||
objectKey,
|
||||
contentType,
|
||||
date,
|
||||
query = '',
|
||||
headers: extraHeaders = {},
|
||||
amzDate = new Date().toISOString().replace(/[:-]|\.\d{3}/g, ''),
|
||||
accessKeyId,
|
||||
accessKeySecret,
|
||||
secretAccessKey,
|
||||
sessionToken,
|
||||
region = 'automatic',
|
||||
}) {
|
||||
const stringToSign = [
|
||||
const {dateStamp} = toAmzDateParts(amzDate);
|
||||
const normalizedEndpoint = normalizeEndpoint(endpoint);
|
||||
const host = `${bucket}.${normalizedEndpoint}`;
|
||||
const canonicalUri = `/${encodeObjectKey(objectKey)}`;
|
||||
const canonicalQueryString = toCanonicalQueryString(query);
|
||||
const payloadHash = 'UNSIGNED-PAYLOAD';
|
||||
const service = 's3';
|
||||
const credentialScope = `${dateStamp}/${region}/${service}/aws4_request`;
|
||||
const signedHeaderEntries = [
|
||||
['host', host],
|
||||
['x-amz-content-sha256', payloadHash],
|
||||
['x-amz-date', amzDate],
|
||||
];
|
||||
|
||||
for (const [key, value] of Object.entries(extraHeaders)) {
|
||||
signedHeaderEntries.push([key.toLowerCase(), String(value).trim()]);
|
||||
}
|
||||
|
||||
if (sessionToken) {
|
||||
signedHeaderEntries.push(['x-amz-security-token', sessionToken]);
|
||||
}
|
||||
|
||||
signedHeaderEntries.sort(([left], [right]) => left.localeCompare(right));
|
||||
const signedHeaders = signedHeaderEntries.map(([key]) => key).join(';');
|
||||
const canonicalHeadersText = signedHeaderEntries.map(([key, value]) => `${key}:${value}\n`).join('');
|
||||
const canonicalRequest = [
|
||||
method.toUpperCase(),
|
||||
'',
|
||||
contentType,
|
||||
date,
|
||||
`/${bucket}/${objectKey}`,
|
||||
canonicalUri,
|
||||
canonicalQueryString,
|
||||
canonicalHeadersText,
|
||||
signedHeaders,
|
||||
payloadHash,
|
||||
].join('\n');
|
||||
const stringToSign = [
|
||||
'AWS4-HMAC-SHA256',
|
||||
amzDate,
|
||||
credentialScope,
|
||||
sha256Hex(canonicalRequest),
|
||||
].join('\n');
|
||||
const signature = hmac(buildSigningKey(secretAccessKey, dateStamp, region, service), stringToSign, 'hex');
|
||||
|
||||
const signature = crypto
|
||||
.createHmac('sha1', accessKeySecret)
|
||||
.update(stringToSign)
|
||||
.digest('base64');
|
||||
const resultHeaders = {
|
||||
Authorization: `AWS4-HMAC-SHA256 Credential=${accessKeyId}/${credentialScope}, SignedHeaders=${signedHeaders}, Signature=${signature}`,
|
||||
'x-amz-content-sha256': payloadHash,
|
||||
'x-amz-date': amzDate,
|
||||
...extraHeaders,
|
||||
};
|
||||
|
||||
return `OSS ${accessKeyId}:${signature}`;
|
||||
if (sessionToken) {
|
||||
resultHeaders['x-amz-security-token'] = sessionToken;
|
||||
}
|
||||
|
||||
return resultHeaders;
|
||||
}
|
||||
|
||||
export function encodeObjectKey(objectKey) {
|
||||
|
||||
@@ -3,12 +3,15 @@ import test from 'node:test';
|
||||
|
||||
import {
|
||||
buildObjectKey,
|
||||
createAuthorizationHeader,
|
||||
createDogeCloudApiAuthorization,
|
||||
createAwsV4Headers,
|
||||
extractDogeCloudScopeBucketName,
|
||||
getFrontendSpaAliasContentType,
|
||||
getFrontendSpaAliasKeys,
|
||||
getCacheControl,
|
||||
getContentType,
|
||||
normalizeEndpoint,
|
||||
requestDogeCloudTemporaryS3Session,
|
||||
} from './oss-deploy-lib.mjs';
|
||||
|
||||
test('normalizeEndpoint strips scheme and trailing slashes', () => {
|
||||
@@ -43,16 +46,98 @@ test('frontend spa aliases are uploaded as html entry points', () => {
|
||||
assert.equal(getFrontendSpaAliasContentType(), 'text/html; charset=utf-8');
|
||||
});
|
||||
|
||||
test('createAuthorizationHeader is stable for a known request', () => {
|
||||
const header = createAuthorizationHeader({
|
||||
test('createAwsV4Headers signs uploads with S3-compatible SigV4 headers', () => {
|
||||
const headers = createAwsV4Headers({
|
||||
method: 'PUT',
|
||||
endpoint: 'https://cos.ap-chengdu.myqcloud.com',
|
||||
bucket: 'demo-bucket',
|
||||
objectKey: 'assets/index.js',
|
||||
contentType: 'text/javascript; charset=utf-8',
|
||||
date: 'Tue, 17 Mar 2026 12:00:00 GMT',
|
||||
amzDate: '20260317T120000Z',
|
||||
accessKeyId: 'test-id',
|
||||
accessKeySecret: 'test-secret',
|
||||
secretAccessKey: 'test-secret',
|
||||
region: 'automatic',
|
||||
});
|
||||
|
||||
assert.equal(header, 'OSS test-id:JgyH7mTiSILGGWsnXJwg4KIBRO4=');
|
||||
assert.equal(headers['x-amz-content-sha256'], 'UNSIGNED-PAYLOAD');
|
||||
assert.equal(headers['x-amz-date'], '20260317T120000Z');
|
||||
assert.ok(headers.Authorization.startsWith('AWS4-HMAC-SHA256 Credential=test-id/20260317/automatic/s3/aws4_request'));
|
||||
});
|
||||
|
||||
test('extractDogeCloudScopeBucketName keeps only the logical bucket name', () => {
|
||||
assert.equal(extractDogeCloudScopeBucketName('yoyuzh-files:users/*'), 'yoyuzh-files');
|
||||
assert.equal(extractDogeCloudScopeBucketName('yoyuzh-front'), 'yoyuzh-front');
|
||||
});
|
||||
|
||||
test('createDogeCloudApiAuthorization signs body with HMAC-SHA1 hex', () => {
|
||||
const authorization = createDogeCloudApiAuthorization({
|
||||
apiPath: '/auth/tmp_token.json',
|
||||
body: '{"channel":"OSS_FULL","ttl":1800,"scopes":["yoyuzh-files"]}',
|
||||
accessKey: 'doge-ak',
|
||||
secretKey: 'doge-sk',
|
||||
});
|
||||
|
||||
assert.equal(
|
||||
authorization,
|
||||
'TOKEN doge-ak:2cf0cf7cf6ddaf673cfe47e55646779d44470929'
|
||||
);
|
||||
});
|
||||
|
||||
test('requestDogeCloudTemporaryS3Session requests temp credentials and returns matching bucket', async () => {
|
||||
const requests = [];
|
||||
const session = await requestDogeCloudTemporaryS3Session({
|
||||
apiBaseUrl: 'https://api.dogecloud.com',
|
||||
accessKey: 'doge-ak',
|
||||
secretKey: 'doge-sk',
|
||||
scope: 'yoyuzh-front:assets/*',
|
||||
ttlSeconds: 1200,
|
||||
fetchImpl: async (url, options) => {
|
||||
requests.push({url, options});
|
||||
return {
|
||||
ok: true,
|
||||
status: 200,
|
||||
async json() {
|
||||
return {
|
||||
code: 200,
|
||||
msg: 'OK',
|
||||
data: {
|
||||
Credentials: {
|
||||
accessKeyId: 'tmp-ak',
|
||||
secretAccessKey: 'tmp-sk',
|
||||
sessionToken: 'tmp-token',
|
||||
},
|
||||
ExpiredAt: 1777777777,
|
||||
Buckets: [
|
||||
{
|
||||
name: 'yoyuzh-files',
|
||||
s3Bucket: 's-cd-14873-yoyuzh-files-1258813047',
|
||||
s3Endpoint: 'https://cos.ap-chengdu.myqcloud.com',
|
||||
},
|
||||
{
|
||||
name: 'yoyuzh-front',
|
||||
s3Bucket: 's-cd-14873-yoyuzh-front-1258813047',
|
||||
s3Endpoint: 'https://cos.ap-chengdu.myqcloud.com',
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
},
|
||||
};
|
||||
},
|
||||
});
|
||||
|
||||
assert.equal(requests[0].url, 'https://api.dogecloud.com/auth/tmp_token.json');
|
||||
assert.equal(requests[0].options.method, 'POST');
|
||||
assert.equal(requests[0].options.headers['Content-Type'], 'application/json');
|
||||
assert.ok(requests[0].options.headers.Authorization.startsWith('TOKEN doge-ak:'));
|
||||
assert.equal(requests[0].options.body, '{"channel":"OSS_FULL","ttl":1200,"scopes":["yoyuzh-front:assets/*"]}');
|
||||
assert.deepEqual(session, {
|
||||
accessKeyId: 'tmp-ak',
|
||||
secretAccessKey: 'tmp-sk',
|
||||
sessionToken: 'tmp-token',
|
||||
bucket: 's-cd-14873-yoyuzh-front-1258813047',
|
||||
endpoint: 'https://cos.ap-chengdu.myqcloud.com',
|
||||
bucketName: 'yoyuzh-front',
|
||||
expiresAt: 1777777777,
|
||||
});
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user