update
This commit is contained in:
122
src/api.js
122
src/api.js
@@ -7,6 +7,8 @@ import {
|
||||
import { getSignedUrl } from "@aws-sdk/s3-request-presigner";
|
||||
|
||||
const BUCKET = "repertory";
|
||||
const TAR = ".tar.gz";
|
||||
const REQUIRE_TRIPLETS = true; // set false to allow partial groups
|
||||
|
||||
const oldItems = [];
|
||||
|
||||
@@ -26,7 +28,12 @@ const cleanOldItems = async () => {
|
||||
try {
|
||||
const key = oldItems.pop();
|
||||
console.log(`cleaning|key|${key}`);
|
||||
await s3.send(new DeleteObjectCommand({ Bucket: BUCKET, Key: key }));
|
||||
await s3.send(
|
||||
new DeleteObjectCommand({
|
||||
Bucket: BUCKET,
|
||||
Key: key,
|
||||
}),
|
||||
);
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
}
|
||||
@@ -47,6 +54,49 @@ const createDownloadLink = async (key) => {
|
||||
);
|
||||
};
|
||||
|
||||
/* ---------------- helpers ---------------- */
|
||||
|
||||
const sidecars = (k) => [k + ".sha256", k + ".sig"];
|
||||
|
||||
const pushTriplet = (out, byKey, mainKey) => {
|
||||
const main = byKey[mainKey];
|
||||
if (!main) return false;
|
||||
const sha = byKey[mainKey + ".sha256"];
|
||||
const sig = byKey[mainKey + ".sig"];
|
||||
|
||||
if (REQUIRE_TRIPLETS && (!sha || !sig)) {
|
||||
// strict: only emit if all 3 files exist
|
||||
return false;
|
||||
}
|
||||
|
||||
out.push(main);
|
||||
if (sha) out.push(sha);
|
||||
if (sig) out.push(sig);
|
||||
return true;
|
||||
};
|
||||
|
||||
const normalizeArch = (a) => {
|
||||
a = (a || "").toLowerCase();
|
||||
if (a === "arm64") return "aarch64";
|
||||
if (a === "x86_64") return "x86-64";
|
||||
return a;
|
||||
};
|
||||
|
||||
const parseFromTarName = (name) => {
|
||||
// name is a *file name* like repertory_2.1.0-rc_8465201_darwin_x86-64.tar.gz
|
||||
const base = name.endsWith(TAR) ? name.slice(0, -TAR.length) : name;
|
||||
const parts = base.split("_"); // 0=product,1=version,2=build,3=platform,4=arch
|
||||
const product = parts[0] || "";
|
||||
const version = parts[1] || "";
|
||||
const build = parts[2] || "";
|
||||
const platform = (parts[3] || "").toLowerCase();
|
||||
const arch = normalizeArch(parts[4] || "");
|
||||
const buildId = `${product}|${version}|${build}`;
|
||||
return { base, platform, arch, buildId, product };
|
||||
};
|
||||
|
||||
/* ---------------- main ---------------- */
|
||||
|
||||
const getBucketFiles = async (folderName) => {
|
||||
try {
|
||||
folderName = (folderName || "").toLowerCase();
|
||||
@@ -59,9 +109,9 @@ const getBucketFiles = async (folderName) => {
|
||||
}),
|
||||
);
|
||||
|
||||
const contents = Array.isArray(data?.Contents) ? data.Contents : [];
|
||||
const contents = Array.isArray(data && data.Contents) ? data.Contents : [];
|
||||
|
||||
// normalize objects
|
||||
// normalize + newest-first
|
||||
const ret = contents
|
||||
.filter((obj) => obj.Key !== folderKey)
|
||||
.map((obj) => {
|
||||
@@ -88,39 +138,17 @@ const getBucketFiles = async (folderName) => {
|
||||
.sort((a, b) => (a.sort > b.sort ? -1 : a.sort < b.sort ? 1 : 0));
|
||||
|
||||
const byKey = Object.fromEntries(ret.map((r) => [r.key, r]));
|
||||
const TAR = ".tar.gz";
|
||||
|
||||
// helpers
|
||||
const sidecars = (k) => [k + ".sha256", k + ".sig"];
|
||||
const tarBase = (name) =>
|
||||
name.endsWith(TAR) ? name.slice(0, -TAR.length) : name;
|
||||
const parsePlatArchFromTarName = (name) => {
|
||||
// name is the *tar* file's name, strip ".tar.gz" first
|
||||
const base = tarBase(name);
|
||||
const parts = base.split("_"); // 0=product,1=version,2=build,3=platform,4=arch
|
||||
const platform = (parts[3] || "").toLowerCase();
|
||||
// arch is clean now (no extension)
|
||||
let arch = (parts[4] || "").toLowerCase();
|
||||
if (arch === "arm64") arch = "aarch64";
|
||||
if (arch === "x86_64") arch = "x86-64";
|
||||
return {
|
||||
base,
|
||||
platform,
|
||||
arch,
|
||||
buildId: `${parts[0]}|${parts[1]}|${parts[2]}`,
|
||||
};
|
||||
};
|
||||
|
||||
// anchors
|
||||
// anchor on .tar.gz for grouping/retention
|
||||
const tars = ret.filter((it) => it.name.endsWith(TAR));
|
||||
|
||||
// nightly retention (per product_platform_arch)
|
||||
// nightly retention: keep top 3 per product_platform_arch; mark others (+ companions) for deletion
|
||||
const keepTarKeys = new Set();
|
||||
const itemCount = {};
|
||||
for (const t of tars) {
|
||||
const { platform, arch } = parsePlatArchFromTarName(t.name);
|
||||
const { platform, arch, product } = parseFromTarName(t.name);
|
||||
if (folderName === "nightly") {
|
||||
const groupId = `${t.name.split("_")[0]}_${platform}_${arch}`;
|
||||
const groupId = `${product}_${platform}_${arch}`;
|
||||
itemCount[groupId] = itemCount[groupId] || 0;
|
||||
if (++itemCount[groupId] <= 3) {
|
||||
keepTarKeys.add(t.key);
|
||||
@@ -146,11 +174,11 @@ const getBucketFiles = async (folderName) => {
|
||||
}
|
||||
}
|
||||
|
||||
// group by build (0,1,2) -> platform/arch -> tar
|
||||
const builds = new Map(); // buildId -> { maxSort, map: Map("platform|arch" -> tarItem) }
|
||||
// build map: buildId -> { maxSort, map: Map("platform|arch" -> tarItem) }
|
||||
const builds = new Map();
|
||||
for (const t of tars) {
|
||||
if (!keepTarKeys.has(t.key)) continue;
|
||||
const { platform, arch, buildId } = parsePlatArchFromTarName(t.name);
|
||||
const { platform, arch, buildId } = parseFromTarName(t.name);
|
||||
const keyPA = `${platform}|${arch}`;
|
||||
if (!builds.has(buildId))
|
||||
builds.set(buildId, { maxSort: t.sort, map: new Map() });
|
||||
@@ -166,42 +194,30 @@ const getBucketFiles = async (folderName) => {
|
||||
|
||||
// explicit iteration order
|
||||
const platforms = ["darwin", "windows", "linux"];
|
||||
const archs = ["aarch64", "x86-64"]; // normalized above
|
||||
const archs = ["aarch64", "x86-64"];
|
||||
|
||||
// emit: for each build, for each platform/arch: .dmg (darwin), _setup.exe (windows), .tar.gz
|
||||
const out = [];
|
||||
for (const [, bucket] of orderedBuilds) {
|
||||
for (const p of platforms) {
|
||||
for (const a of archs) {
|
||||
const t = bucket.map.get(`${p}|${a}`);
|
||||
if (!t) continue;
|
||||
const tar = bucket.map.get(`${p}|${a}`);
|
||||
if (!tar) continue;
|
||||
|
||||
const base = t.key.slice(0, -TAR.length);
|
||||
const base = tar.key.slice(0, -TAR.length);
|
||||
|
||||
// 1) .dmg (darwin only)
|
||||
if (p === "darwin") {
|
||||
const dmgKey = base + ".dmg";
|
||||
const dmg = byKey[dmgKey];
|
||||
if (dmg) {
|
||||
out.push(dmg);
|
||||
for (const sk of sidecars(dmgKey))
|
||||
if (byKey[sk]) out.push(byKey[sk]);
|
||||
}
|
||||
pushTriplet(out, byKey, dmgKey);
|
||||
}
|
||||
|
||||
// 2) _setup.exe (windows only)
|
||||
if (p === "windows") {
|
||||
const setupKey = base + "_setup.exe";
|
||||
const setup = byKey[setupKey];
|
||||
if (setup) {
|
||||
out.push(setup);
|
||||
for (const sk of sidecars(setupKey))
|
||||
if (byKey[sk]) out.push(byKey[sk]);
|
||||
}
|
||||
pushTriplet(out, byKey, setupKey);
|
||||
}
|
||||
|
||||
// 3) .tar.gz
|
||||
out.push(t);
|
||||
for (const sk of sidecars(t.key)) if (byKey[sk]) out.push(byKey[sk]);
|
||||
// tar.gz (always last in the group)
|
||||
pushTriplet(out, byKey, tar.key);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Reference in New Issue
Block a user