Files
fifthgrid_browser/src/api.js
2025-09-13 22:36:54 -05:00

233 lines
6.7 KiB
JavaScript

import {
DeleteObjectCommand,
GetObjectCommand,
ListObjectsCommand,
S3Client,
} from "@aws-sdk/client-s3";
import { getSignedUrl } from "@aws-sdk/s3-request-presigner";
const BUCKET = "repertory";
const TAR = ".tar.gz";
const REQUIRE_TRIPLETS = true; // set false to allow partial groups
const oldItems = [];
const s3 = new S3Client({
region: "any",
endpoint: "https://gateway.storjshare.io",
forcePathStyle: true,
credentials: {
accessKeyId: process.env.R_AWS_KEY,
secretAccessKey: process.env.R_AWS_SECRET,
},
});
const cleanOldItems = async () => {
console.log(`cleaning|count|${oldItems.length}`);
while (oldItems.length > 0) {
try {
const key = oldItems.pop();
console.log(`cleaning|key|${key}`);
await s3.send(
new DeleteObjectCommand({
Bucket: BUCKET,
Key: key,
}),
);
} catch (err) {
console.error(err);
}
}
};
const createDownloadLink = async (key) => {
let filename = key.split("/");
filename = filename[filename.length - 1];
return await getSignedUrl(
s3,
new GetObjectCommand({
Bucket: BUCKET,
Key: key,
ResponseContentDisposition: `attachment; filename="${filename}"`,
}),
{ expiresIn: 3600 },
);
};
/* ---------------- helpers ---------------- */
const sidecars = (k) => [k + ".sha256", k + ".sig"];
const pushTriplet = (out, byKey, mainKey) => {
const main = byKey[mainKey];
if (!main) return false;
const sha = byKey[mainKey + ".sha256"];
const sig = byKey[mainKey + ".sig"];
if (REQUIRE_TRIPLETS && (!sha || !sig)) {
// strict: only emit if all 3 files exist
return false;
}
out.push(main);
if (sha) out.push(sha);
if (sig) out.push(sig);
return true;
};
const normalizeArch = (a) => {
a = (a || "").toLowerCase();
if (a === "arm64") return "aarch64";
if (a === "x86_64") return "x86-64";
return a;
};
const parseFromTarName = (name) => {
// name is a *file name* like repertory_2.1.0-rc_8465201_darwin_x86-64.tar.gz
const base = name.endsWith(TAR) ? name.slice(0, -TAR.length) : name;
const parts = base.split("_"); // 0=product,1=version,2=build,3=platform,4=arch
const product = parts[0] || "";
const version = parts[1] || "";
const build = parts[2] || "";
const platform = (parts[3] || "").toLowerCase();
const arch = normalizeArch(parts[4] || "");
const buildId = `${product}|${version}|${build}`;
return { base, platform, arch, buildId, product };
};
/* ---------------- main ---------------- */
const getBucketFiles = async (folderName) => {
try {
folderName = (folderName || "").toLowerCase();
const folderKey = encodeURIComponent(folderName) + "/";
const data = await s3.send(
new ListObjectsCommand({
Bucket: BUCKET,
Prefix: folderKey,
}),
);
const contents = Array.isArray(data && data.Contents) ? data.Contents : [];
// normalize + newest-first
const ret = contents
.filter((obj) => obj.Key !== folderKey)
.map((obj) => {
const d =
obj.LastModified instanceof Date
? obj.LastModified
: new Date(obj.LastModified);
return {
date: d
.toLocaleDateString("en-US", {
year: "numeric",
month: "2-digit",
day: "2-digit",
hour: "2-digit",
minute: "2-digit",
second: "2-digit",
})
.replace(/,/g, ""),
sort: Number.isFinite(d.getTime()) ? d.getTime() : 0,
name: (obj.Key || "").replace(folderKey, ""),
key: obj.Key || "",
};
})
.sort((a, b) => (a.sort > b.sort ? -1 : a.sort < b.sort ? 1 : 0));
const byKey = Object.fromEntries(ret.map((r) => [r.key, r]));
// anchor on .tar.gz for grouping/retention
const tars = ret.filter((it) => it.name.endsWith(TAR));
// nightly retention: keep top 3 per product_platform_arch; mark others (+ companions) for deletion
const keepTarKeys = new Set();
const itemCount = {};
for (const t of tars) {
const { platform, arch, product } = parseFromTarName(t.name);
if (folderName === "nightly") {
const groupId = `${product}_${platform}_${arch}`;
itemCount[groupId] = itemCount[groupId] || 0;
if (++itemCount[groupId] <= 3) {
keepTarKeys.add(t.key);
} else {
if (!oldItems.includes(t.key)) {
oldItems.push(t.key, ...sidecars(t.key));
}
if (platform === "windows") {
const setupKey = t.key.slice(0, -TAR.length) + "_setup.exe";
if (byKey[setupKey] && !oldItems.includes(setupKey)) {
oldItems.push(setupKey, ...sidecars(setupKey));
}
}
if (platform === "darwin") {
const dmgKey = t.key.slice(0, -TAR.length) + ".dmg";
if (byKey[dmgKey] && !oldItems.includes(dmgKey)) {
oldItems.push(dmgKey, ...sidecars(dmgKey));
}
}
}
} else {
keepTarKeys.add(t.key);
}
}
// build map: buildId -> { maxSort, map: Map("platform|arch" -> tarItem) }
const builds = new Map();
for (const t of tars) {
if (!keepTarKeys.has(t.key)) continue;
const { platform, arch, buildId } = parseFromTarName(t.name);
const keyPA = `${platform}|${arch}`;
if (!builds.has(buildId))
builds.set(buildId, { maxSort: t.sort, map: new Map() });
const bucket = builds.get(buildId);
bucket.maxSort = Math.max(bucket.maxSort, t.sort);
bucket.map.set(keyPA, t);
}
// order builds newest-first
const orderedBuilds = Array.from(builds.entries()).sort(
(a, b) => b[1].maxSort - a[1].maxSort,
);
// explicit iteration order
const platforms = ["darwin", "windows", "linux"];
const archs = ["aarch64", "x86-64"];
// emit: for each build, for each platform/arch: .dmg (darwin), _setup.exe (windows), .tar.gz
const out = [];
for (const [, bucket] of orderedBuilds) {
for (const p of platforms) {
for (const a of archs) {
const tar = bucket.map.get(`${p}|${a}`);
if (!tar) continue;
const base = tar.key.slice(0, -TAR.length);
if (p === "darwin") {
const dmgKey = base + ".dmg";
pushTriplet(out, byKey, dmgKey);
}
if (p === "windows") {
const setupKey = base + "_setup.exe";
pushTriplet(out, byKey, setupKey);
}
// tar.gz (always last in the group)
pushTriplet(out, byKey, tar.key);
}
}
}
return out;
} catch (err) {
console.error(err);
return [];
}
};
export { cleanOldItems, createDownloadLink, getBucketFiles };