update
This commit is contained in:
99
src/api.js
99
src/api.js
@@ -1,3 +1,52 @@
|
|||||||
|
import {
|
||||||
|
DeleteObjectCommand,
|
||||||
|
GetObjectCommand,
|
||||||
|
ListObjectsCommand,
|
||||||
|
S3Client,
|
||||||
|
} from "@aws-sdk/client-s3";
|
||||||
|
import { getSignedUrl } from "@aws-sdk/s3-request-presigner";
|
||||||
|
|
||||||
|
const BUCKET = "repertory";
|
||||||
|
|
||||||
|
const oldItems = [];
|
||||||
|
|
||||||
|
const s3 = new S3Client({
|
||||||
|
region: "any",
|
||||||
|
endpoint: "https://gateway.storjshare.io",
|
||||||
|
forcePathStyle: true,
|
||||||
|
credentials: {
|
||||||
|
accessKeyId: process.env.R_AWS_KEY,
|
||||||
|
secretAccessKey: process.env.R_AWS_SECRET,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const cleanOldItems = async () => {
|
||||||
|
console.log(`cleaning|count|${oldItems.length}`);
|
||||||
|
while (oldItems.length > 0) {
|
||||||
|
try {
|
||||||
|
const key = oldItems.pop();
|
||||||
|
console.log(`cleaning|key|${key}`);
|
||||||
|
await s3.send(new DeleteObjectCommand({ Bucket: BUCKET, Key: key }));
|
||||||
|
} catch (err) {
|
||||||
|
console.error(err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const createDownloadLink = async (key) => {
|
||||||
|
let filename = key.split("/");
|
||||||
|
filename = filename[filename.length - 1];
|
||||||
|
return await getSignedUrl(
|
||||||
|
s3,
|
||||||
|
new GetObjectCommand({
|
||||||
|
Bucket: BUCKET,
|
||||||
|
Key: key,
|
||||||
|
ResponseContentDisposition: `attachment; filename="${filename}"`,
|
||||||
|
}),
|
||||||
|
{ expiresIn: 3600 },
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
const getBucketFiles = async (folderName) => {
|
const getBucketFiles = async (folderName) => {
|
||||||
try {
|
try {
|
||||||
folderName = (folderName || "").toLowerCase();
|
folderName = (folderName || "").toLowerCase();
|
||||||
@@ -35,15 +84,17 @@ const getBucketFiles = async (folderName) => {
|
|||||||
key: obj.Key || "",
|
key: obj.Key || "",
|
||||||
};
|
};
|
||||||
})
|
})
|
||||||
|
// sort just to have deterministic selection for nightly, output order will be manual
|
||||||
.sort((a, b) => (a.sort > b.sort ? -1 : a.sort < b.sort ? 1 : 0));
|
.sort((a, b) => (a.sort > b.sort ? -1 : a.sort < b.sort ? 1 : 0));
|
||||||
|
|
||||||
const itemCount = {};
|
const byKey = Object.fromEntries(ret.map((r) => [r.key, r]));
|
||||||
|
|
||||||
const ext = ".tar.gz";
|
const ext = ".tar.gz";
|
||||||
|
|
||||||
// 1) choose which .tar.gz to keep (nightly retention unchanged)
|
|
||||||
const tars = ret.filter((it) => it.name.endsWith(ext));
|
const tars = ret.filter((it) => it.name.endsWith(ext));
|
||||||
const keepTarKeys = new Set();
|
|
||||||
|
|
||||||
|
// Nightly retention: keep newest 3 per (product_platform_arch)
|
||||||
|
const keepTarKeys = new Set();
|
||||||
|
const itemCount = {};
|
||||||
for (const t of tars) {
|
for (const t of tars) {
|
||||||
if (folderName === "nightly") {
|
if (folderName === "nightly") {
|
||||||
const parts = t.name.split("_"); // 0=product,1=version,2=build,3=platform,4=arch
|
const parts = t.name.split("_"); // 0=product,1=version,2=build,3=platform,4=arch
|
||||||
@@ -57,21 +108,18 @@ const getBucketFiles = async (folderName) => {
|
|||||||
if (!oldItems.includes(t.key)) {
|
if (!oldItems.includes(t.key)) {
|
||||||
oldItems.push(t.key, t.key + ".sha256", t.key + ".sig");
|
oldItems.push(t.key, t.key + ".sha256", t.key + ".sig");
|
||||||
}
|
}
|
||||||
// windows companion
|
// mark companions old (if they exist)
|
||||||
if (parts[3] === "windows") {
|
if (parts[3] === "windows") {
|
||||||
const setupKey =
|
const setupKey =
|
||||||
t.key.substring(0, t.key.length - ext.length) + "_setup.exe";
|
t.key.substring(0, t.key.length - ext.length) + "_setup.exe";
|
||||||
const hasSetup = ret.find((x) => x.key === setupKey);
|
if (byKey[setupKey] && !oldItems.includes(setupKey)) {
|
||||||
if (hasSetup && !oldItems.includes(setupKey)) {
|
|
||||||
oldItems.push(setupKey, setupKey + ".sha256", setupKey + ".sig");
|
oldItems.push(setupKey, setupKey + ".sha256", setupKey + ".sig");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// darwin companion
|
|
||||||
if (parts[3] === "darwin") {
|
if (parts[3] === "darwin") {
|
||||||
const dmgKey =
|
const dmgKey =
|
||||||
t.key.substring(0, t.key.length - ext.length) + ".dmg";
|
t.key.substring(0, t.key.length - ext.length) + ".dmg";
|
||||||
const hasDmg = ret.find((x) => x.key === dmgKey);
|
if (byKey[dmgKey] && !oldItems.includes(dmgKey)) {
|
||||||
if (hasDmg && !oldItems.includes(dmgKey)) {
|
|
||||||
oldItems.push(dmgKey, dmgKey + ".sha256", dmgKey + ".sig");
|
oldItems.push(dmgKey, dmgKey + ".sha256", dmgKey + ".sig");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -81,11 +129,7 @@ const getBucketFiles = async (folderName) => {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// 2) fast lookup by key
|
// Build output strictly in order: DMG group -> SETUP group -> TAR group per (0..4) tuple
|
||||||
const byKey = Object.fromEntries(ret.map((r) => [r.key, r]));
|
|
||||||
|
|
||||||
// 3) build final list strictly in the order you want for each group:
|
|
||||||
// DMG(+sidecars) -> SETUP(+sidecars) -> TAR(+sidecars)
|
|
||||||
const out = [];
|
const out = [];
|
||||||
for (const t of tars) {
|
for (const t of tars) {
|
||||||
if (!keepTarKeys.has(t.key)) continue;
|
if (!keepTarKeys.has(t.key)) continue;
|
||||||
@@ -94,32 +138,26 @@ const getBucketFiles = async (folderName) => {
|
|||||||
const dmgKey = base + ".dmg";
|
const dmgKey = base + ".dmg";
|
||||||
const setupKey = base + "_setup.exe";
|
const setupKey = base + "_setup.exe";
|
||||||
|
|
||||||
// group order: 1) DMG group (if present)
|
// 1) dmg + sidecars
|
||||||
const dmg = byKey[dmgKey];
|
const dmg = byKey[dmgKey];
|
||||||
if (dmg) {
|
if (dmg) {
|
||||||
out.push(dmg);
|
out.push(dmg);
|
||||||
const dmgSha = byKey[dmgKey + ".sha256"];
|
if (byKey[dmgKey + ".sha256"]) out.push(byKey[dmgKey + ".sha256"]);
|
||||||
if (dmgSha) out.push(dmgSha);
|
if (byKey[dmgKey + ".sig"]) out.push(byKey[dmgKey + ".sig"]);
|
||||||
const dmgSig = byKey[dmgKey + ".sig"];
|
|
||||||
if (dmgSig) out.push(dmgSig);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// 2) SETUP group (if present)
|
// 2) setup + sidecars
|
||||||
const setup = byKey[setupKey];
|
const setup = byKey[setupKey];
|
||||||
if (setup) {
|
if (setup) {
|
||||||
out.push(setup);
|
out.push(setup);
|
||||||
const setupSha = byKey[setupKey + ".sha256"];
|
if (byKey[setupKey + ".sha256"]) out.push(byKey[setupKey + ".sha256"]);
|
||||||
if (setupSha) out.push(setupSha);
|
if (byKey[setupKey + ".sig"]) out.push(byKey[setupKey + ".sig"]);
|
||||||
const setupSig = byKey[setupKey + ".sig"];
|
|
||||||
if (setupSig) out.push(setupSig);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// 3) TAR group (always present for this loop)
|
// 3) tar.gz + sidecars
|
||||||
out.push(t);
|
out.push(t);
|
||||||
const tarSha = byKey[t.key + ".sha256"];
|
if (byKey[t.key + ".sha256"]) out.push(byKey[t.key + ".sha256"]);
|
||||||
if (tarSha) out.push(tarSha);
|
if (byKey[t.key + ".sig"]) out.push(byKey[t.key + ".sig"]);
|
||||||
const tarSig = byKey[t.key + ".sig"];
|
|
||||||
if (tarSig) out.push(tarSig);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return out;
|
return out;
|
||||||
@@ -128,3 +166,4 @@ const getBucketFiles = async (folderName) => {
|
|||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
export { cleanOldItems, createDownloadLink, getBucketFiles };
|
||||||
|
Reference in New Issue
Block a user