grouping
This commit is contained in:
140
src/api.js
140
src/api.js
@@ -48,16 +48,21 @@ const byNewest = (a, b) => b.sort - a.sort;
|
||||
|
||||
const sidecarsFor = (key) => [key + ".sha256", key + ".sig"];
|
||||
|
||||
// parts[0]=product, parts[1]=version (e.g., 2.1.0-rc), parts[2]=build (e.g., 8465201),
|
||||
// parts[3]=platform (windows|darwin|linux), parts[4]=arch (x86-64|aarch64)
|
||||
const parseName = (fname) => {
|
||||
// Expected convention: parts[3] = platform (windows|darwin|linux), parts[4] = arch
|
||||
const parts = fname.split("_");
|
||||
const product = parts[0] ?? "";
|
||||
const version = parts[1] ?? "";
|
||||
const build = parts[2] ?? "";
|
||||
const platform = (parts[3] ?? "").toLowerCase();
|
||||
const arch = parts[4] ?? "";
|
||||
const groupId = `${parts[0] ?? ""}_${platform}_${arch}`;
|
||||
return { parts, platform, arch, groupId };
|
||||
const groupId = `${product}_${platform}_${arch}`; // retention groups
|
||||
const releaseId = `${product}_${build}`; // emit grouping
|
||||
return { parts, product, version, build, platform, arch, groupId, releaseId };
|
||||
};
|
||||
|
||||
// Remove sidecar suffixes first, then packaging suffix to compute a common base across variants
|
||||
// Remove sidecar suffixes first, then packaging suffix
|
||||
const stripSidecar = (name) => {
|
||||
if (name.endsWith(".sha256")) return name.slice(0, -".sha256".length);
|
||||
if (name.endsWith(".sig")) return name.slice(0, -".sig".length);
|
||||
@@ -77,6 +82,27 @@ const isTar = (name) => name.endsWith(TAR_EXT);
|
||||
const isDmg = (name) => name.endsWith(".dmg");
|
||||
const isSetup = (name) => name.endsWith("_setup.exe");
|
||||
|
||||
// platform/arch sort preference inside a build
|
||||
const platformRank = (platform) =>
|
||||
platform === "darwin"
|
||||
? 0
|
||||
: platform === "windows"
|
||||
? 1
|
||||
: platform === "linux"
|
||||
? 2
|
||||
: 3;
|
||||
|
||||
const archRank = (arch) =>
|
||||
arch === "aarch64"
|
||||
? 0
|
||||
: arch === "arm64"
|
||||
? 0 // normalize if needed later
|
||||
: arch === "x86-64"
|
||||
? 1
|
||||
: arch === "x86_64"
|
||||
? 1
|
||||
: 2;
|
||||
|
||||
/* ---------------- public API ---------------- */
|
||||
|
||||
const cleanOldItems = async () => {
|
||||
@@ -122,56 +148,72 @@ const getBucketFiles = async (folderName) => {
|
||||
const contents = (data.Contents ?? []).filter((it) => it.Key !== folderKey);
|
||||
const listed = contents.map(toListed(folderKey)).sort(byNewest);
|
||||
|
||||
// Build indexes
|
||||
// Indexes
|
||||
const byKey = Object.fromEntries(listed.map((i) => [i.key, i]));
|
||||
const byBase = new Map(); // base -> { tar, dmg, setup, sidecars:Set<string> }
|
||||
const byBase = new Map(); // base -> { rep, tar, dmg, setup, sidecars:Set<string>, platform, arch, releaseId }
|
||||
for (const it of listed) {
|
||||
const base = baseStem(it.name);
|
||||
let g = byBase.get(base);
|
||||
if (!g) {
|
||||
g = { tar: null, dmg: null, setup: null, sidecars: new Set() };
|
||||
// We’ll fill platform/arch/releaseId from the *first* non-sidecar file we see
|
||||
g = {
|
||||
rep: null,
|
||||
tar: null,
|
||||
dmg: null,
|
||||
setup: null,
|
||||
sidecars: new Set(),
|
||||
platform: "",
|
||||
arch: "",
|
||||
releaseId: "",
|
||||
};
|
||||
byBase.set(base, g);
|
||||
}
|
||||
if (isTar(it.name)) {
|
||||
g.tar = it;
|
||||
g.rep = g.rep ?? it;
|
||||
const meta = parseName(it.name);
|
||||
g.platform = meta.platform;
|
||||
g.arch = meta.arch;
|
||||
g.releaseId = meta.releaseId;
|
||||
} else if (isDmg(it.name)) {
|
||||
g.dmg = it;
|
||||
g.rep = g.rep ?? it;
|
||||
const meta = parseName(it.name);
|
||||
if (!g.platform) g.platform = meta.platform;
|
||||
if (!g.arch) g.arch = meta.arch;
|
||||
if (!g.releaseId) g.releaseId = meta.releaseId;
|
||||
} else if (isSetup(it.name)) {
|
||||
g.setup = it;
|
||||
g.rep = g.rep ?? it;
|
||||
const meta = parseName(it.name);
|
||||
if (!g.platform) g.platform = meta.platform;
|
||||
if (!g.arch) g.arch = meta.arch;
|
||||
if (!g.releaseId) g.releaseId = meta.releaseId;
|
||||
} else if (it.name.endsWith(".sha256") || it.name.endsWith(".sig")) {
|
||||
g.sidecars.add(it.key);
|
||||
}
|
||||
}
|
||||
|
||||
const groups = Array.from(byBase.values());
|
||||
const groups = Array.from(byBase.values()).filter((g) => g.rep);
|
||||
|
||||
// Nightly retention: keep newest 3 per (name_platform_arch).
|
||||
// ---------- Retention (unchanged logic): newest 3 per product+platform+arch ----------
|
||||
const keepBaseSet = new Set();
|
||||
if (folder === "nightly") {
|
||||
const buckets = new Map(); // groupId -> array of group objects
|
||||
const buckets = new Map(); // groupId -> [groups]
|
||||
for (const g of groups) {
|
||||
const rep = g.tar ?? g.dmg ?? g.setup;
|
||||
if (!rep) continue;
|
||||
const { groupId } = parseName(rep.name);
|
||||
if (!buckets.has(groupId)) buckets.set(groupId, []);
|
||||
buckets.get(groupId).push(g);
|
||||
const meta = parseName(g.rep.name);
|
||||
const gid = meta.groupId;
|
||||
if (!buckets.has(gid)) buckets.set(gid, []);
|
||||
buckets.get(gid).push(g);
|
||||
}
|
||||
|
||||
for (const [, arr] of buckets) {
|
||||
arr.sort((a, b) => {
|
||||
const ta = (a.tar ?? a.dmg ?? a.setup)?.sort ?? 0;
|
||||
const tb = (b.tar ?? b.dmg ?? b.setup)?.sort ?? 0;
|
||||
return tb - ta;
|
||||
});
|
||||
|
||||
arr.sort((a, b) => (b.rep?.sort ?? 0) - (a.rep?.sort ?? 0));
|
||||
arr.forEach((g, idx) => {
|
||||
const rep = g.tar ?? g.dmg ?? g.setup;
|
||||
if (!rep) return;
|
||||
const base = baseStem(rep.name);
|
||||
const base = baseStem(g.rep.name);
|
||||
if (idx < 3) {
|
||||
keepBaseSet.add(base);
|
||||
} else {
|
||||
// mark all artifacts in the group as old
|
||||
// mark all artifacts in this base as old
|
||||
if (g.tar)
|
||||
[g.tar.key, ...sidecarsFor(g.tar.key)].forEach((k) =>
|
||||
oldKeys.add(k),
|
||||
@@ -188,27 +230,42 @@ const getBucketFiles = async (folderName) => {
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// keep all groups in non-nightly
|
||||
for (const g of groups) {
|
||||
const rep = g.tar ?? g.dmg ?? g.setup;
|
||||
if (rep) keepBaseSet.add(baseStem(rep.name));
|
||||
keepBaseSet.add(baseStem(g.rep.name));
|
||||
}
|
||||
}
|
||||
|
||||
// Emit kept groups in newest-first order, using representative timestamp
|
||||
const kept = groups
|
||||
.filter((g) => {
|
||||
const rep = g.tar ?? g.dmg ?? g.setup;
|
||||
return rep && keepBaseSet.has(baseStem(rep.name));
|
||||
})
|
||||
.sort((a, b) => {
|
||||
const ta = (a.tar ?? a.dmg ?? a.setup)?.sort ?? 0;
|
||||
const tb = (b.tar ?? b.dmg ?? b.setup)?.sort ?? 0;
|
||||
return tb - ta;
|
||||
// ---------- Emit order: group KEPT bases by releaseId (build), newest-first by representative timestamp ----------
|
||||
const kept = groups.filter((g) => keepBaseSet.has(baseStem(g.rep.name)));
|
||||
const releases = new Map(); // releaseId -> { repSort:number, items: [g...] }
|
||||
for (const g of kept) {
|
||||
const rid = g.releaseId || parseName(g.rep.name).releaseId;
|
||||
if (!releases.has(rid)) {
|
||||
releases.set(rid, { repSort: g.rep.sort, items: [] });
|
||||
}
|
||||
const bucket = releases.get(rid);
|
||||
bucket.repSort = Math.max(bucket.repSort, g.rep.sort);
|
||||
bucket.items.push(g);
|
||||
}
|
||||
|
||||
// sort releases newest-first
|
||||
const releaseList = Array.from(releases.entries()).sort(
|
||||
(a, b) => b[1].repSort - a[1].repSort,
|
||||
);
|
||||
|
||||
// Inside each release, sort platforms/arches by preference
|
||||
const out = [];
|
||||
for (const [, bucket] of releaseList) {
|
||||
bucket.items.sort((a, b) => {
|
||||
const pr = platformRank(a.platform) - platformRank(b.platform);
|
||||
if (pr !== 0) return pr;
|
||||
const ar = archRank(a.arch) - archRank(b.arch);
|
||||
if (ar !== 0) return ar;
|
||||
// fallback: newest-first
|
||||
return (b.rep?.sort ?? 0) - (a.rep?.sort ?? 0);
|
||||
});
|
||||
|
||||
const out = [];
|
||||
for (const g of kept) {
|
||||
for (const g of bucket.items) {
|
||||
// tar + sidecars
|
||||
if (g.tar) {
|
||||
out.push(g.tar);
|
||||
@@ -234,6 +291,7 @@ const getBucketFiles = async (folderName) => {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return out;
|
||||
} catch (err) {
|
||||
|
Reference in New Issue
Block a user