Add mongoDB support #8

Merged
Sirherobrine23 merged 6 commits from db into main 2023-01-11 04:44:07 +00:00
7 changed files with 602 additions and 795 deletions
Showing only changes of commit 629bf02091 - Show all commits

5
.vscode/launch.json vendored

@ -12,7 +12,10 @@
"skipFiles": ["<node_internals>/**", "node_modules/**"], "skipFiles": ["<node_internals>/**", "node_modules/**"],
"cwd": "${workspaceRoot}", "cwd": "${workspaceRoot}",
"runtimeExecutable": "ts-node", "runtimeExecutable": "ts-node",
"args": ["src/index.ts", "server"] "args": ["src/index.ts", "server"],
"env": {
"DISABLE_CLUSTER": "true"
}
} }
] ]
} }

63
package-lock.json generated

@ -10,7 +10,6 @@
"license": "GPL-2.0", "license": "GPL-2.0",
"dependencies": { "dependencies": {
"@sirherobrine23/coreutils": "^2.2.5", "@sirherobrine23/coreutils": "^2.2.5",
"cron": "^2.1.0",
"express": "^4.18.2", "express": "^4.18.2",
"lzma-native": "^8.0.6", "lzma-native": "^8.0.6",
"mongodb": "^4.13.0", "mongodb": "^4.13.0",
@ -23,7 +22,6 @@
"apt-stream": "src/index.js" "apt-stream": "src/index.js"
}, },
"devDependencies": { "devDependencies": {
"@types/cron": "^2.0.0",
"@types/express": "^4.17.15", "@types/express": "^4.17.15",
"@types/lzma-native": "^4.0.1", "@types/lzma-native": "^4.0.1",
"@types/node": "^18.11.18", "@types/node": "^18.11.18",
@ -1356,16 +1354,6 @@
"@types/node": "*" "@types/node": "*"
} }
}, },
"node_modules/@types/cron": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/@types/cron/-/cron-2.0.0.tgz",
"integrity": "sha512-xZM08fqvwIXgghtPVkSPKNgC+JoMQ2OHazEvyTKnNf7aWu1aB6/4lBbQFrb03Td2cUGG7ITzMv3mFYnMu6xRaQ==",
"dev": true,
"dependencies": {
"@types/luxon": "*",
"@types/node": "*"
}
},
"node_modules/@types/express": { "node_modules/@types/express": {
"version": "4.17.15", "version": "4.17.15",
"resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.15.tgz", "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.15.tgz",
@ -1394,12 +1382,6 @@
"resolved": "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.0.1.tgz", "resolved": "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.0.1.tgz",
"integrity": "sha512-SZs7ekbP8CN0txVG2xVRH6EgKmEm31BOxA07vkFaETzZz1xh+cbt8BcI0slpymvwhx5dlFnQG2rTlPVQn+iRPQ==" "integrity": "sha512-SZs7ekbP8CN0txVG2xVRH6EgKmEm31BOxA07vkFaETzZz1xh+cbt8BcI0slpymvwhx5dlFnQG2rTlPVQn+iRPQ=="
}, },
"node_modules/@types/luxon": {
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/@types/luxon/-/luxon-3.2.0.tgz",
"integrity": "sha512-lGmaGFoaXHuOLXFvuju2bfvZRqxAqkHPx9Y9IQdQABrinJJshJwfNCKV+u7rR3kJbiqfTF/NhOkcxxAFrObyaA==",
"dev": true
},
"node_modules/@types/lzma-native": { "node_modules/@types/lzma-native": {
"version": "4.0.1", "version": "4.0.1",
"resolved": "https://registry.npmjs.org/@types/lzma-native/-/lzma-native-4.0.1.tgz", "resolved": "https://registry.npmjs.org/@types/lzma-native/-/lzma-native-4.0.1.tgz",
@ -1851,14 +1833,6 @@
"integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==",
"dev": true "dev": true
}, },
"node_modules/cron": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/cron/-/cron-2.1.0.tgz",
"integrity": "sha512-Hq7u3P8y7UWYvsZbSKHHJDVG0VO9O7tp2qljxzTScelcTODBfCme8AIhnZsFwmQ9NchZ3hr2uNr+s3DSms7q6w==",
"dependencies": {
"luxon": "^1.23.x"
}
},
"node_modules/cssom": { "node_modules/cssom": {
"version": "0.5.0", "version": "0.5.0",
"resolved": "https://registry.npmjs.org/cssom/-/cssom-0.5.0.tgz", "resolved": "https://registry.npmjs.org/cssom/-/cssom-0.5.0.tgz",
@ -2721,14 +2695,6 @@
"node": ">=10" "node": ">=10"
} }
}, },
"node_modules/luxon": {
"version": "1.28.1",
"resolved": "https://registry.npmjs.org/luxon/-/luxon-1.28.1.tgz",
"integrity": "sha512-gYHAa180mKrNIUJCbwpmD0aTu9kV0dREDrwNnuyFAsO1Wt0EVYSZelPnJlbj9HplzXX/YWXHFTL45kvZ53M0pw==",
"engines": {
"node": "*"
}
},
"node_modules/lzma-native": { "node_modules/lzma-native": {
"version": "8.0.6", "version": "8.0.6",
"resolved": "https://registry.npmjs.org/lzma-native/-/lzma-native-8.0.6.tgz", "resolved": "https://registry.npmjs.org/lzma-native/-/lzma-native-8.0.6.tgz",
@ -4940,16 +4906,6 @@
"@types/node": "*" "@types/node": "*"
} }
}, },
"@types/cron": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/@types/cron/-/cron-2.0.0.tgz",
"integrity": "sha512-xZM08fqvwIXgghtPVkSPKNgC+JoMQ2OHazEvyTKnNf7aWu1aB6/4lBbQFrb03Td2cUGG7ITzMv3mFYnMu6xRaQ==",
"dev": true,
"requires": {
"@types/luxon": "*",
"@types/node": "*"
}
},
"@types/express": { "@types/express": {
"version": "4.17.15", "version": "4.17.15",
"resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.15.tgz", "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.15.tgz",
@ -4978,12 +4934,6 @@
"resolved": "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.0.1.tgz", "resolved": "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.0.1.tgz",
"integrity": "sha512-SZs7ekbP8CN0txVG2xVRH6EgKmEm31BOxA07vkFaETzZz1xh+cbt8BcI0slpymvwhx5dlFnQG2rTlPVQn+iRPQ==" "integrity": "sha512-SZs7ekbP8CN0txVG2xVRH6EgKmEm31BOxA07vkFaETzZz1xh+cbt8BcI0slpymvwhx5dlFnQG2rTlPVQn+iRPQ=="
}, },
"@types/luxon": {
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/@types/luxon/-/luxon-3.2.0.tgz",
"integrity": "sha512-lGmaGFoaXHuOLXFvuju2bfvZRqxAqkHPx9Y9IQdQABrinJJshJwfNCKV+u7rR3kJbiqfTF/NhOkcxxAFrObyaA==",
"dev": true
},
"@types/lzma-native": { "@types/lzma-native": {
"version": "4.0.1", "version": "4.0.1",
"resolved": "https://registry.npmjs.org/@types/lzma-native/-/lzma-native-4.0.1.tgz", "resolved": "https://registry.npmjs.org/@types/lzma-native/-/lzma-native-4.0.1.tgz",
@ -5336,14 +5286,6 @@
"integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==",
"dev": true "dev": true
}, },
"cron": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/cron/-/cron-2.1.0.tgz",
"integrity": "sha512-Hq7u3P8y7UWYvsZbSKHHJDVG0VO9O7tp2qljxzTScelcTODBfCme8AIhnZsFwmQ9NchZ3hr2uNr+s3DSms7q6w==",
"requires": {
"luxon": "^1.23.x"
}
},
"cssom": { "cssom": {
"version": "0.5.0", "version": "0.5.0",
"resolved": "https://registry.npmjs.org/cssom/-/cssom-0.5.0.tgz", "resolved": "https://registry.npmjs.org/cssom/-/cssom-0.5.0.tgz",
@ -5983,11 +5925,6 @@
"yallist": "^4.0.0" "yallist": "^4.0.0"
} }
}, },
"luxon": {
"version": "1.28.1",
"resolved": "https://registry.npmjs.org/luxon/-/luxon-1.28.1.tgz",
"integrity": "sha512-gYHAa180mKrNIUJCbwpmD0aTu9kV0dREDrwNnuyFAsO1Wt0EVYSZelPnJlbj9HplzXX/YWXHFTL45kvZ53M0pw=="
},
"lzma-native": { "lzma-native": {
"version": "8.0.6", "version": "8.0.6",
"resolved": "https://registry.npmjs.org/lzma-native/-/lzma-native-8.0.6.tgz", "resolved": "https://registry.npmjs.org/lzma-native/-/lzma-native-8.0.6.tgz",

@ -34,7 +34,6 @@
"build": "tsc" "build": "tsc"
}, },
"devDependencies": { "devDependencies": {
"@types/cron": "^2.0.0",
"@types/express": "^4.17.15", "@types/express": "^4.17.15",
"@types/lzma-native": "^4.0.1", "@types/lzma-native": "^4.0.1",
"@types/node": "^18.11.18", "@types/node": "^18.11.18",
@ -45,7 +44,6 @@
}, },
"dependencies": { "dependencies": {
"@sirherobrine23/coreutils": "^2.2.5", "@sirherobrine23/coreutils": "^2.2.5",
"cron": "^2.1.0",
"express": "^4.18.2", "express": "^4.18.2",
"lzma-native": "^8.0.6", "lzma-native": "^8.0.6",
"mongodb": "^4.13.0", "mongodb": "^4.13.0",

@ -1,211 +1,174 @@
import coreUtils, { DebianPackage, httpRequestGithub, httpRequest, DockerRegistry, extendFs } from "@sirherobrine23/coreutils"; import { Compressor as lzmaCompress } from "lzma-native";
import { getConfig, distManegerPackages } from "./repoConfig.js"; import { createGzip } from "node:zlib";
import { createReadStream, createWriteStream, watchFile, promises as fs } from "node:fs"; import { getConfig } from "./repoConfig.js";
import { getPackages } from "./mirror.js";
import { Readable } from "node:stream"; import { Readable } from "node:stream";
import { CronJob } from "cron"; import package_maneger from "./packagesData.js";
import { format } from "node:util"; import coreUtils from "@sirherobrine23/coreutils";
import cluster from "node:cluster";
import express from "express"; import express from "express";
import openpgp from "openpgp"; import openpgp from "openpgp";
import tar from "tar";
import path from "node:path";
export default async function main(configPath: string) { export default async function initApp(config: string) {
// Load config const packageConfig = await getConfig(config);
let repositoryConfig = await getConfig(configPath); const packageManeger = await package_maneger(packageConfig);
const packInfos = new distManegerPackages(repositoryConfig);
// Express app
const app = express(); const app = express();
app.disable("x-powered-by").disable("etag").use(express.json()).use(express.urlencoded({ extended: true })).use((req, res, next) => { app.disable("x-powered-by").disable("etag").use(express.json()).use(express.urlencoded({extended: true})).use((req, res, next) => {
res.json = (data) => res.setHeader("Content-Type", "application/json").send(JSON.stringify(data, null, 2)); res.json = data => res.setHeader("Content-Type", "application/json").send(JSON.stringify(data, null, 2));
const requestInitial = Date.now(); const cluserID = cluster.worker?.id ?? 0;
console.log("[%s]: Method: %s, From: %s, Path %s", requestInitial, req.method, req.ip, req.path); // [%s TIME STAMP, cluserID: %f]: Path: %s, Method: %s, IP: %s
res.once("close", () => console.log("[%s]: Method: %s, From: %s, Path %s, Status: %s, Time: %sms", Date.now(), req.method, req.ip, req.path, res.statusCode, Date.now() - requestInitial)); console.log("[%s, cluserID: %f]: Path: %s, Method: %s, IP: %s", new Date().toISOString(), cluserID, req.path, req.method, req.ip);
res.on("close", () => console.log("[%s, cluserID: %f]: Path: %s, Method: %s, IP: %s, Status: %f", new Date().toISOString(), cluserID, req.path, req.method, req.ip, res.statusCode));
next(); next();
}); });
app.get("/pool/:dist/:suite/:package/:arch/:version/download.deb", async ({params: {dist, suite, package: packageName, arch, version}}, {writeHead}, next) => {
// Public key try {
app.get(["/public_key", "/public.gpg"], async ({res}) => { const data = (await packageManeger.getPackages(dist, suite, packageName, arch, version))?.at(-1);
const Key = repositoryConfig["apt-config"]?.pgpKey; if (!data) return next(new Error("Not Found"));
if (!Key) return res.status(400).json({error: "This repository no sign Packages files"}); const fileStream = await data.getFileStream();
const pubKey = (await openpgp.readKey({ armoredKey: Key.public })).armor(); fileStream.pipe(writeHead(200, {
return res.setHeader("Content-Type", "application/pgp-keys").send(pubKey); "Content-Type": "application/x-debian-package",
}); "Content-Length": data.control.Size,
"Content-Disposition": `attachment; filename="${packageName}_${version}_${arch}.deb"`,
// Sources list "SHA256_hash": data.control.SHA256,
app.get(["/source_list", "/sources.list"], async (req, res) => { "MD5Sum_hash": data.control.MD5sum
const remotePath = path.posix.resolve(req.baseUrl + req.path, ".."), }));
protocol = req.headers["x-forwarded-proto"] ?? req.protocol, } catch (err) {
hostname = process.env["RAILWAY_STATIC_URL"] ?? `${req.hostname}:${req.socket.localPort}`, next(err);
host = repositoryConfig["apt-config"]?.sourcesHost ?? `${protocol}://${hostname}${remotePath}`,
concatPackage = await packInfos.getAllDistribuitions(),
type = req.query.type ?? req.query.t,
Conflicting = !!(req.query.conflicting ?? req.query.c);
if (type === "json") {
return res.json({
host,
distribuitions: concatPackage
});
} else if (type === "deb822") {}
let sourcesList = "";
concatPackage.forEach((dist) => sourcesList += format("deb %s %s %s\n", host, (Conflicting ? "./" : "")+dist.dist, dist.suites.join(" ")));
return res.status(200).setHeader("Content-Type", "text/plain").send(sourcesList);
});
// Download
app.get(["/pool", "/"], async (_req, res) => res.json(await packInfos.getAllDistribuitions()));
app.get("/pool/:dist", async (req, res) => res.json(await packInfos.getDistribuition(req.params.dist)));
app.get("/pool/:dist/:suite", async ({params: {dist, suite}}, res) => res.json(await packInfos.getPackageInfo({dist, suite})));
app.get("/pool/:dist/:suite/:arch", async ({params: {dist, suite, arch}}, res) => res.json(await packInfos.getPackageInfo({dist, suite, arch})));
app.get("/pool/:dist/:suite/:arch/:packageName", async ({params: {dist, suite, arch, packageName}}, res) => res.json(await packInfos.getPackageInfo({dist, suite, arch, packageName})));
app.get("/pool/:dist/:suite/:arch/:packageName/:version", async ({params: {dist, suite, arch, packageName, version}}, res) => res.json(await packInfos.getPackageInfo({dist, suite, arch, packageName, version})));
app.get("/pool/:dist/:suite/:arch/:packageName/:version/download.deb", async ({params: {dist, suite, arch, packageName, version}}, res, next) => packInfos.getPackageStream(dist, suite, arch, packageName, version).then(data => data.stream.pipe(res.writeHead(200, {"Content-Type": "application/x-debian-package", "Content-Length": data.control.Size, "Content-Disposition": `attachment; filename="${packageName}_${version}_${arch}.deb"`, "SHA256_hash": data.control.SHA256, "MD5Sum_hash": data.control.MD5sum}))).catch(next));
app.get("/dists/(./)?:dist/:suite/binary-:arch/Packages(.(xz|gz)|)", (req, res) => {
if (req.path.endsWith(".gz")) {
packInfos.createPackages({
compress: "gzip",
dist: req.params.dist,
arch: req.params.arch,
suite: req.params.suite,
writeStream: res.writeHead(200, {
"Content-Encoding": "gzip",
"Content-Type": "application/x-gzip"
}),
});
} else if (req.path.endsWith(".xz")) {
packInfos.createPackages({
compress: "xz",
dist: req.params.dist,
arch: req.params.arch,
suite: req.params.suite,
writeStream: res.writeHead(200, {
"Content-Encoding": "xz",
"Content-Type": "application/x-xz"
}),
});
} else {
packInfos.createPackages({
dist: req.params.dist,
arch: req.params.arch,
suite: req.params.suite,
writeStream: res.writeHead(200, {
"Content-Type": "text/plain"
}),
});
} }
}); });
app.get("/pool/:dist/:suite/:package/:arch/:version", (req, res, next) => packageManeger.getPackages(req.params.dist, req.params.suite, req.params.package, req.params.arch, req.params.version).then(data => res.json(data.at(-1).control)).catch(next));
app.get("/pool/:dist/:suite/:package/:arch", (req, res, next) => packageManeger.getPackages(req.params.dist, req.params.suite, req.params.package, req.params.arch).then(data => res.json(data.map(({control}) => control))).catch(next));
app.get("/pool/:dist/:suite/:package", (req, res, next) => packageManeger.getPackages(req.params.dist, req.params.suite, req.params.package).then(data => res.json(data.map(({control}) => control))).catch(next));
app.get("/pool/:dist/:suite", (req, res, next) => packageManeger.getPackages(req.params.dist, req.params.suite).then(data => res.json(data.map(x => x.control))).catch(next));
app.get("/pool/:dist", (req, res, next) => packageManeger.getPackages(req.params.dist).then(data => res.json(data.reduce((old, current) => {
if (!old[current.suite]) old[current.suite] = [];
old[current.suite].push(current.control);
return old;
}, {}))).catch(next));
app.get(["/", "/pool"], ({}, res, next) => packageManeger.getPackages().then(data => res.json(data.reduce((old, current) => {
if (!old[current.dist]) old[current.dist] = {};
if (!old[current.dist][current.suite]) old[current.dist][current.suite] = [];
old[current.dist][current.suite].push(current.control);
return old;
}, {}))).catch(next));
// Create Package, Package.gz and Package.xz
async function createPackages(dist: string, suite: string, arch: string) {
let rawSize = 0, gzipSize = 0, lzmaSize = 0;
const mainReadstream = new Readable({read(){}}), rawSUMs = coreUtils.extendsCrypto.createHashAsync("all", mainReadstream).then(hash => ({size: rawSize, hash}));
const gzip = mainReadstream.pipe(createGzip()), gzipSUMs = coreUtils.extendsCrypto.createHashAsync("all", gzip).then(hash => ({size: gzipSize, hash}));
const lzma = mainReadstream.pipe(lzmaCompress()), lzmaSUMs = coreUtils.extendsCrypto.createHashAsync("all", lzma).then(hash => ({size: lzmaSize, hash}));
mainReadstream.on("data", data => rawSize += data.length);
gzip.on("data", data => gzipSize += data.length);
lzma.on("data", data => lzmaSize += data.length);
const packages = await packageManeger.getPackages(dist, suite, undefined, arch);
if (!packages.length) throw new Error("Check is dist or suite have packages");
let fist = true;
for (const {control} of packages) {
if (!(control.Size && (control.MD5sum || control.SHA256 || control.SHA1))) continue;
if (fist) fist = false; else mainReadstream.push("\n\n");
control.Filename = `pool/${dist}/${suite}/${control.Package}/${control.Architecture}/${control.Version}/download.deb`;
mainReadstream.push(Object.keys(control).map(key => mainReadstream.push(`${key}: ${control[key]}`)).join("\n"));
}
mainReadstream.push(null);
return {
raw: mainReadstream,
gzip,
lzma,
SUMs: {
raw: rawSUMs,
gzip: gzipSUMs,
lzma: lzmaSUMs
}
};
}
app.get("/dists/(./)?:dist/:suite/binary-:arch/Packages(.(xz|gz)|)", async ({params: {dist, suite, arch}, path: reqPath}, res, next) => createPackages(dist, suite, arch).then(packages => {
if (reqPath.endsWith(".gz")) return packages.gzip.pipe(res);
else if (reqPath.endsWith(".xz")) return packages.lzma.pipe(res);
else return packages.raw.pipe(res);
}).catch(next));
// Release // Release
async function createReleaseV1(dist: string) { async function createRelease(dist: string) {
const { suites, archs } = await packInfos.getDistribuition(dist); const packagesArray = await packageManeger.getPackages(dist);
const distConfig = repositoryConfig.repositories[dist]; if (!packagesArray.length) throw new Error("Check is dist have packages");
if (!distConfig) throw new Error("Dist not found"); const Release: {[key: string]: string|string[]} = {};
const ReleaseLines = [];
// Origin
const Origin = distConfig["apt-config"]?.origin ?? repositoryConfig["apt-config"]?.origin;
if (Origin) ReleaseLines.push(`Origin: ${Origin}`);
// Lebel
const Label = distConfig["apt-config"]?.label ?? repositoryConfig["apt-config"]?.label;
if (Label) ReleaseLines.push(`Label: ${Label}`);
// Codename if exists
const codename = distConfig["apt-config"]?.codename ?? repositoryConfig["apt-config"]?.codename;
if (codename) ReleaseLines.push(`Codename: ${codename}`);
// Date // Date
ReleaseLines.push(`Date: ${new Date().toUTCString()}`); Release.Date = new Date().toUTCString();
// Architectures // Origin
if (archs.length === 0) throw new Error("No architectures found"); const Origin = packageConfig["apt-config"]?.origin ?? packagesArray.find(x => x.aptConfig?.origin)?.aptConfig?.origin;
ReleaseLines.push(`Architectures: ${archs.join(" ")}`); if (Origin) Release.Origin = Origin;
// Lebel
const Label = packageConfig["apt-config"]?.label ?? packagesArray.find(x => x.aptConfig?.label)?.aptConfig?.label;
if (Label) Release.Label = Label;
// Codename
const Codename = packageConfig["apt-config"]?.codename ?? packagesArray.find(x => x.aptConfig?.codename)?.aptConfig?.codename;
if (Codename) Release.Codename = Codename;
// Archs
const Archs = ([...(new Set(packagesArray.map(x => x.control.Architecture)))]);
if (!Archs.length) throw new Error("Check is dist have packages");
Release.Architectures = Archs.join(" ");
// Components // Components
if (suites.length === 0) throw new Error("No suites found"); const Components = ([...(new Set(packagesArray.map(x => x.suite)))]);
ReleaseLines.push(`Components: ${suites.join(" ")}`); if (!Components.length) throw new Error("Check is dist have packages");
Release.Components = Components.join(" ");
const createPackagesHash = distConfig["apt-config"]?.enableHash ?? repositoryConfig["apt-config"]?.enableHash ?? true; // Description
if (createPackagesHash) {
ReleaseLines.push("Acquire-By-Hash: no"); // Sum's
const hashs = (await Promise.all(archs.map(async arch => Promise.all(suites.map(async suite => { const enableHash = Boolean(packageConfig["apt-config"]?.enableHash ?? packagesArray.find(x => x.aptConfig?.enableHash)?.aptConfig?.enableHash);
const [gzip, xz, raw] = await Promise.all([packInfos.createPackages({compress: "gzip", dist, arch, suite}), packInfos.createPackages({compress: "xz", dist, arch, suite}), packInfos.createPackages({dist, arch, suite})]); if (enableHash) {
return { Release.SHA256 = [];
gz: { Release.SHA1 = [];
sha256: { Release.MD5sum = [];
file: `${suite}/binary-${arch}/Packages.gz`, const files = await Promise.all(Archs.map(async Arch => Promise.all(Components.map(async Component => {
size: gzip.size, const {SUMs} = await createPackages(dist, Component, Arch);
hash: gzip.sha256 return [
}, {
sha1: { file: `${Component}/binary-${Arch}/Packages`,
file: `${suite}/binary-${arch}/Packages.gz`, hash: await SUMs.raw
size: gzip.size,
hash: gzip.sha1
},
md5: {
file: `${suite}/binary-${arch}/Packages.gz`,
size: gzip.size,
hash: gzip.md5
}
}, },
xz: { {
sha256: { file: `${Component}/binary-${Arch}/Packages.gz`,
file: `${suite}/binary-${arch}/Packages.xz`, hash: await SUMs.gzip
size: xz.size,
hash: xz.sha256
},
sha1: {
file: `${suite}/binary-${arch}/Packages.xz`,
size: xz.size,
hash: xz.sha1
},
md5: {
file: `${suite}/binary-${arch}/Packages.xz`,
size: xz.size,
hash: xz.md5
}
}, },
raw: { {
sha256: { file: `${Component}/binary-${Arch}/Packages.xz`,
file: `${suite}/binary-${arch}/Packages`, hash: await SUMs.lzma
size: raw.size,
hash: raw.sha256
},
sha1: {
file: `${suite}/binary-${arch}/Packages`,
size: raw.size,
hash: raw.sha1
},
md5: {
file: `${suite}/binary-${arch}/Packages`,
size: raw.size,
hash: raw.md5
}
} }
}; ]
}))))).flat(2); })))).then(f => f.flat(3));
const sha256 = hashs.map(hash => hash.raw.sha256).concat(hashs.map(hash => hash.gz.sha256)).concat(hashs.map(hash => hash.xz.sha256)); files.forEach(({file, hash}) => {
if (sha256.length > 0) ReleaseLines.push(`SHA256:${sha256.sort().map((hash) => `\n ${hash.hash} ${hash.size} ${hash.file}`).join("")}`); if (hash.hash.sha256) (Release.SHA256 as string[]).push(`${hash.hash.sha256} ${hash.size} ${file}`);
if (hash.hash.sha1) (Release.SHA1 as string[]).push(`${hash.hash.sha1} ${hash.size} ${file}`);
const sha1 = hashs.map(hash => hash.raw.sha1).concat(hashs.map(hash => hash.gz.sha1)).concat(hashs.map(hash => hash.xz.sha1)); if (hash.hash.md5) (Release.MD5sum as string[]).push(`${hash.hash.md5} ${hash.size} ${file}`);
if (sha1.length > 0) ReleaseLines.push(`SHA1:${sha1.sort().map((hash) => `\n ${hash.hash} ${hash.size} ${hash.file}`).join("")}`); });
const md5 = hashs.map(hash => hash.raw.md5).concat(hashs.map(hash => hash.gz.md5)).concat(hashs.map(hash => hash.xz.md5));
if (md5.length > 0) ReleaseLines.push(`MD5Sum:${md5.sort().map((hash) => `\n ${hash.hash} ${hash.size} ${hash.file}`).join("")}`);
} }
return ReleaseLines.join("\n"); return Object.keys(Release).reduce((old, key) => {
if (Array.isArray(Release[key])) old.push(`${key}:\n ${(Release[key] as string[]).join("\n ")}`);
else old.push(`${key}: ${Release[key]}`);
return old;
}, []).join("\n");
} }
app.get("/dists/(./)?:dist/Release", (req, res, next) => createReleaseV1(req.params.dist).then((data) => res.setHeader("Content-Type", "text/plain").send(data)).catch(next)); app.get("/dists/(./)?:dist/Release", ({params: {dist}}, res, next) => createRelease(dist).then(release => res.setHeader("Content-Type", "text/plain").send(release)).catch(next));
app.get("/dists/(./)?:dist/InRelease", (req, res, next) => {
const Key = repositoryConfig["apt-config"]?.pgpKey; const pgpKey = packageConfig["apt-config"]?.pgpKey;
if (!Key) return res.status(404).json({error: "No PGP key found"}); app.get("/dists/(./)?:dist/inRelease", async (req, res, next) => {
if (!pgpKey) return res.status(404).json({error: "No PGP key found"});
return Promise.resolve().then(async () => { return Promise.resolve().then(async () => {
const privateKey = Key.passphrase ? await openpgp.decryptKey({privateKey: await openpgp.readPrivateKey({ armoredKey: Key.private }), passphrase: Key.passphrase}) : await openpgp.readPrivateKey({ armoredKey: Key.private }); const privateKey = pgpKey.passphrase ? await openpgp.decryptKey({privateKey: await openpgp.readPrivateKey({ armoredKey: pgpKey.private }), passphrase: pgpKey.passphrase}) : await openpgp.readPrivateKey({ armoredKey: pgpKey.private });
const Release = await createReleaseV1(req.params.dist); const Release = await createRelease(req.params.dist);
return res.setHeader("Content-Type", "text/plain").send(await openpgp.sign({ return res.setHeader("Content-Type", "text/plain").send(await openpgp.sign({
signingKeys: privateKey, signingKeys: privateKey,
format: "armored", format: "armored",
@ -213,238 +176,27 @@ export default async function main(configPath: string) {
})); }));
}).catch(next); }).catch(next);
}); });
app.get("/dists/(./)?:dist/Release.gpg", (req, res, next) => { app.get("/dists/(./)?:dist/Release.gpg", async (req, res, next) => {
const Key = repositoryConfig["apt-config"]?.pgpKey; if (!pgpKey) return res.status(404).json({error: "No PGP key found"});
if (!Key) return res.status(404).json({error: "No PGP key found"});
return Promise.resolve().then(async () => { return Promise.resolve().then(async () => {
const privateKey = Key.passphrase ? await openpgp.decryptKey({privateKey: await openpgp.readPrivateKey({ armoredKey: Key.private }), passphrase: Key.passphrase}) : await openpgp.readPrivateKey({ armoredKey: Key.private }); const privateKey = pgpKey.passphrase ? await openpgp.decryptKey({privateKey: await openpgp.readPrivateKey({ armoredKey: pgpKey.private }), passphrase: pgpKey.passphrase}) : await openpgp.readPrivateKey({ armoredKey: pgpKey.private });
const Release = await createReleaseV1(req.params.dist); const Release = await createRelease(req.params.dist);
return res.setHeader("Content-Type", "text/plain").send(await openpgp.sign({ return res.setHeader("Content-Type", "text/plain").send(await openpgp.sign({
signingKeys: privateKey, signingKeys: privateKey,
message: await openpgp.createMessage({text: Release}), message: await openpgp.createMessage({text: Release}),
})); }));
}).catch(next); }).catch(next);
}); });
// Public key
// 404 handler if (pgpKey) app.get(["/public_key", "/public.gpg"], async ({res}) => {
app.use((_req, res) => { if (!pgpKey) return res.status(400).json({error: "This repository no sign Packages files"});
res.status(404).json({error: "Not found"}); const pubKey = (await openpgp.readKey({ armoredKey: pgpKey.public })).armor();
return res.setHeader("Content-Type", "application/pgp-keys").send(pubKey);
}); });
// Error handler return {
app.use((err, _req, res, _next) => { app,
console.error(err); packageManeger,
res.status(500).json({ packageConfig
error: err?.message||err, };
stack: err?.stack?.split("\n"),
});
});
// Listen HTTP server
const port = process.env.PORT ?? repositoryConfig["apt-config"].portListen ?? 0;
app.listen(port, function () {return console.log(`apt-repo listening at http://localhost:${this.address().port}`);});
// Loading and update packages
let cronJobs: CronJob[] = [];
const waitPromises: Promise<void>[] = [];
const saveFile = repositoryConfig["apt-config"]?.saveFiles;
const rootPool = repositoryConfig["apt-config"]?.poolPath;
for (const dist in repositoryConfig.repositories) {
const targets = repositoryConfig.repositories[dist].targets;
for (const repository of targets) {
const update = async () => {
if (repository.from === "mirror") {
return Promise.all(Object.keys(repository.dists).map(async distName => {
const distInfo = repository.dists[distName];
const packagesData = distInfo.suites ? await Promise.all(distInfo.suites.map(async suite => getPackages(repository.uri, {dist: distName, suite}))).then(U => U.flat()) : await getPackages(repository.uri, {dist: distName});
return packagesData.forEach(({Package: control}) => {
const filePool = path.join(rootPool, control.Package.slice(0, 1), `${control.Package}_${control.Architecture}_${control.Version}.deb`);
const getStream = async () => {
if (saveFile && await extendFs.exists(filePool)) return createReadStream(filePool);
if (saveFile) {
const mainPath = path.resolve(filePool, "..");
if (!await extendFs.exists(mainPath)) await fs.mkdir(mainPath, {recursive: true});
const fileStream = await httpRequest.pipeFetch(control.Filename);
fileStream.pipe(createWriteStream(filePool));
return fileStream;
}
return httpRequest.pipeFetch(control.Filename);
}
return packInfos.addPackage(dist, repository.suite ?? "main", {repositoryConfig: repository, control, getStream});
});
}));
} else if (repository.from === "oci") {
const registry = await DockerRegistry.Manifest.Manifest(repository.image, repository.platfom_target);
return registry.layersStream((data) => {
if (!(["gzip", "gz", "tar"]).some(ends => data.layer.mediaType.endsWith(ends))) return data.next();
data.stream.pipe(tar.list({
async onentry(entry) {
if (!entry.path.endsWith(".deb")) return null;
const control = await DebianPackage.extractControl(entry as any);
const suite = repository.suite ?? "main";
packInfos.addPackage(dist, suite, {
repositoryConfig: repository,
control,
async getStream() {
const filePool = path.join(rootPool, control.Package.slice(0, 1), `${control.Package}_${control.Architecture}_${control.Version}.deb`);
if (saveFile && await extendFs.exists(filePool)) return createReadStream(filePool);
return new Promise<Readable>((done, reject) => registry.blobLayerStream(data.layer.digest).then(stream => {
stream.on("error", reject);
stream.pipe(tar.list({
async onentry(getEntry) {
if (getEntry.path !== entry.path) return null;
if (saveFile) {
const mainPath = path.resolve(filePool, "..");
if (!await extendFs.exists(mainPath)) await fs.mkdir(mainPath, {recursive: true});
entry.pipe(createWriteStream(filePool));
}
return done(getEntry as any);
}
// @ts-ignore
}).on("error", reject));
}).catch(reject));
}
});
}
}));
});
} else if (repository.from === "github_release") {
if (repository.tags) {
const release = await Promise.all(repository.tags.map(async releaseTag => httpRequestGithub.getRelease({
owner: repository.owner,
repository: repository.repository,
token: repository.token,
releaseTag,
})));
return Promise.all(release.map(async release => Promise.all(release.assets.map(async ({browser_download_url, name}) => {
if (!name.endsWith(".deb")) return null;
const control = await DebianPackage.extractControl(await httpRequest.pipeFetch(browser_download_url));
const filePool = path.join(rootPool, control.Package.slice(0, 1), `${control.Package}_${control.Architecture}_${control.Version}.deb`);
const getStream = async () => {
if (saveFile && await extendFs.exists(filePool)) return createReadStream(filePool);
if (saveFile) {
const mainPath = path.resolve(filePool, "..");
if (!await extendFs.exists(mainPath)) await fs.mkdir(mainPath, {recursive: true});
const fileStream = await httpRequest.pipeFetch(browser_download_url);
fileStream.pipe(createWriteStream(filePool));
return fileStream;
}
return httpRequest.pipeFetch(browser_download_url);
}
return packInfos.addPackage(dist, repository.suite ?? release.tag_name, {repositoryConfig: repository, control, getStream});
})))).then(data => data.flat(2).filter(Boolean));
}
const release = await httpRequestGithub.getRelease({owner: repository.owner, repository: repository.repository, token: repository.token, peer: repository.assetsLimit, all: false});
return Promise.all(release.map(async release => Promise.all(release.assets.map(async ({browser_download_url, name}) => {
if (!name.endsWith(".deb")) return null;
const control = await DebianPackage.extractControl(await httpRequest.pipeFetch(browser_download_url));
const filePool = path.join(rootPool, control.Package.slice(0, 1), `${control.Package}_${control.Architecture}_${control.Version}.deb`);
const getStream = async () => {
if (saveFile && await extendFs.exists(filePool)) return createReadStream(filePool);
if (saveFile) {
const mainPath = path.resolve(filePool, "..");
if (!await extendFs.exists(mainPath)) await fs.mkdir(mainPath, {recursive: true});
const fileStream = await httpRequest.pipeFetch(browser_download_url);
fileStream.pipe(createWriteStream(filePool));
return fileStream;
}
return httpRequest.pipeFetch(browser_download_url);
}
return packInfos.addPackage(dist, repository.suite ?? release.tag_name, {repositoryConfig: repository, control, getStream});
})))).then(data => data.flat(2).filter(Boolean));
} else if (repository.from === "github_tree") {
const { tree } = await httpRequestGithub.githubTree(repository.owner, repository.repository, repository.tree);
const filtedTree = tree.filter(({path: remotePath}) => {
if (repository.path) return repository.path.some(repoPath => {
if (!remotePath.startsWith("/")) remotePath = "/" + remotePath;
if (typeof repoPath === "string") {
if (!repoPath.startsWith("/")) repoPath = "/" + repoPath;
return remotePath.startsWith(repoPath);
}
return false;
});
return true;
}).filter(({path, type}) => path.endsWith(".deb") && type === "blob");
return Promise.all(filtedTree.map(async ({path: filePath}) => {
const downloadUrl = `https://raw.githubusercontent.com/${repository.owner}/${repository.repository}/${repository.tree}/${filePath}`;
const control = await DebianPackage.extractControl(await httpRequest.pipeFetch(downloadUrl));
const filePool = path.join(rootPool, control.Package.slice(0, 1), `${control.Package}_${control.Architecture}_${control.Version}.deb`);
const getStream = async () => {
if (saveFile && await extendFs.exists(filePool)) return createReadStream(filePool);
if (saveFile) {
const mainPath = path.resolve(filePool, "..");
if (!await extendFs.exists(mainPath)) await fs.mkdir(mainPath, {recursive: true});
const fileStream = await httpRequest.pipeFetch(downloadUrl);
fileStream.pipe(createWriteStream(filePool));
return fileStream;
}
return httpRequest.pipeFetch(downloadUrl);
}
return packInfos.addPackage(dist, repository.suite ?? "main", {repositoryConfig: repository, control, getStream});
}));
} else if (repository.from === "google_drive") {
const client_id = repository.appSettings.client_id;
const client_secret = repository.appSettings.client_secret;
const token = repository.appSettings.token;
const googleDriver = await coreUtils.googleDriver.GoogleDriver(client_id, client_secret, {
token,
async authCallback(url, token) {
if (url) console.log("Please visit this url to auth google driver: %s", url);
else console.log("Google driver auth success, please save token to config file, token: %s", token);
},
});
const files = (repository.folderId ? (await Promise.all(repository.folderId.map(async folderId => await googleDriver.listFiles(folderId)))).flat() : await googleDriver.listFiles());
return Promise.all(files.filter(({name, isTrashedFile}) => !isTrashedFile && name.endsWith(".deb")).map(async fileData => {
const control = await DebianPackage.extractControl(await googleDriver.getFileStream(fileData.id));
const filePool = path.join(rootPool, control.Package.slice(0, 1), `${control.Package}_${control.Architecture}_${control.Version}.deb`);
const getStream = async () => {
if (saveFile && await extendFs.exists(filePool)) return createReadStream(filePool);
if (saveFile) {
const mainPath = path.resolve(filePool, "..");
if (!await extendFs.exists(mainPath)) await fs.mkdir(mainPath, {recursive: true});
const fileStream = await googleDriver.getFileStream(fileData.id);
fileStream.pipe(createWriteStream(filePool));
return fileStream;
}
return googleDriver.getFileStream(fileData.id);
}
return packInfos.addPackage(dist, repository.suite ?? "main", {repositoryConfig: repository, control, getStream});
}));
} else if (repository.from === "oracle_bucket") {
const oracleBucket = await coreUtils.oracleBucket(repository.region as any, repository.bucketName, repository.bucketNamespace, repository.auth);
return Promise.all((await oracleBucket.fileList()).filter(({name}) => name.endsWith(".deb")).map(async fileData => {
const control = await DebianPackage.extractControl(await oracleBucket.getFileStream(fileData.name));
const filePool = path.join(rootPool, control.Package.slice(0, 1), `${control.Package}_${control.Architecture}_${control.Version}.deb`);
const getStream = async () => {
if (saveFile && await extendFs.exists(filePool)) return createReadStream(filePool);
if (saveFile) {
const mainPath = path.resolve(filePool, "..");
if (!await extendFs.exists(mainPath)) await fs.mkdir(mainPath, {recursive: true});
const fileStream = await oracleBucket.getFileStream(fileData.name);
fileStream.pipe(createWriteStream(filePool));
return fileStream;
}
return oracleBucket.getFileStream(fileData.name);
}
return packInfos.addPackage(dist, repository.suite ?? "main", {repositoryConfig: repository, control, getStream});
}));
}
return null;
}
waitPromises.push(update().then(() => {
const cron = (repository.cronRefresh ?? []).map((cron) => new CronJob(cron, update));
cron.forEach((cron) => cron.start());
cronJobs.push(...cron);
}).catch(console.error));
}
}
// watch config file changes
watchFile(configPath, async () => {
console.info("Config file changed, reloading config and update packages...");
repositoryConfig = await getConfig(configPath);
cronJobs.forEach((cron) => cron.stop());
cronJobs = [];
});
// await Promise.all(waitPromises);
return app;
} }

@ -6,6 +6,8 @@ import yargs from "yargs";
import path from "node:path"; import path from "node:path";
import repo from "./express_route.js"; import repo from "./express_route.js";
import yaml from "yaml"; import yaml from "yaml";
import os from "node:os";
import cluster from "node:cluster";
yargs(process.argv.slice(2)).version(false).help().demandCommand().strictCommands().alias("h", "help").option("cofig-path", { yargs(process.argv.slice(2)).version(false).help().demandCommand().strictCommands().alias("h", "help").option("cofig-path", {
type: "string", type: "string",
@ -84,12 +86,52 @@ yargs(process.argv.slice(2)).version(false).help().demandCommand().strictCommand
}).parseSync(); }).parseSync();
const config = await getConfig(options.cofigPath); const config = await getConfig(options.cofigPath);
const base64 = Buffer.from(options.json ? JSON.stringify(config) : yaml.stringify(config)).toString("base64"); const base64 = Buffer.from(options.json ? JSON.stringify(config) : yaml.stringify(config)).toString("base64");
if (options.output) return writeFile(options.output, base64).then(() => console.log("Saved to '%s'", options.output)); if (options.output) return writeFile(options.output, "base64:"+base64).then(() => console.log("Saved to '%s'", options.output));
console.log("base64:%s", base64); console.log("base64:%s", base64);
}); });
}).command("server", "Run HTTP serber", yargs => { }).command("server", "Run HTTP serber", async yargs => {
const options = yargs.parseSync(); const options = yargs.parseSync();
const envs = Object.keys(process.env).filter(key => key.startsWith("APT_STREAM")); const envs = Object.keys(process.env).filter(key => key.startsWith("APT_STREAM"));
if (envs.length > 0) return Promise.all(envs.map(async env => repo(`env:${env}`))); const { app, packageConfig, packageManeger } = await repo(envs.length > 0 ? `env:${envs[0]}` : options.cofigPath);
return repo(options.cofigPath); app.all("*", ({res}) => res.status(404).json({
error: "Endpoint not exists",
message: "Endpoint not exists, check the documentation for more information"
}));
app.use((err, {}, res, {}) => {
console.error(err);
const stack: string = err?.stack ?? "No stack";
res.status(500).json({
error: "Internal Server Error",
message: "There was an error on our part, sorry for the inconvenience",
stack: {
forUser: "Create issue in apt-stream repository (https://github.com/Sirherobrine23/apt-stream/issues) with value of 'forDeveloper'",
forDeveloper: stack
},
});
});
const port = process.env.PORT ?? packageConfig["apt-config"]?.portListen ?? 3000;
if (!(Boolean(process.env["DISABLE_CLUSTER"]))) {
if (cluster.isWorker) {
app.listen(port, function() {console.log("Apt Stream Port listen on %f", this.address()?.port)});
return console.log("Worker %d running, PID: %f", cluster.worker?.id ?? "No ID", process.pid);
}
console.log("Master %f is running", process.pid);
os.cpus().forEach(() => cluster.fork());
cluster.on("exit", (worker, code, signal: NodeJS.Signals) => {
if (signal === "SIGKILL") return console.log("Worker %d was killed", worker?.id ?? "No ID");
else if (signal === "SIGTERM") return console.log("Worker %d was terminated", worker?.id ?? "No ID");
else if (code )
console.log("Worker %d died with code: %s, Signal: %s", worker?.id ?? "No ID", code, signal ?? "No Signal");
});
} else app.listen(port, function() {console.log("Apt Stream Port listen on %f", this.address()?.port)});
// large ram available
if (os.freemem() > 2 * 1024 * 1024 * 1024) return Promise.all(Object.keys(packageConfig.repositories).map(async distName => {const dist = packageConfig.repositories[distName]; return Promise.all(dist.targets.map(async target => packageManeger.loadRepository(distName, target, packageConfig["apt-config"], packageConfig).catch(console.error)));})).catch(console.error);
console.warn("Not enough RAM to load all repositories, loading one by one");
for (const distName in packageConfig.repositories) {
const dist = packageConfig.repositories[distName];
for (const target of dist.targets) {
await packageManeger.loadRepository(distName, target, packageConfig["apt-config"], packageConfig).catch(console.error);
}
}
}).parseAsync(); }).parseAsync();

382
src/packagesData.ts Normal file

@ -0,0 +1,382 @@
import coreUtils, { DebianPackage, DockerRegistry, extendFs, httpRequest, httpRequestGithub } from "@sirherobrine23/coreutils";
import { createReadStream, createWriteStream, promises as fs } from "node:fs";
import { MongoClient, ServerApiVersion, Filter } from "mongodb";
import { apt_config, backendConfig, repository } from "./repoConfig.js";
import { Readable } from "node:stream";
import { getPackages } from "./mirror.js";
import path from "node:path";
import tar from "tar";
export type packageSave = {
dist: string,
suite: string,
repository: repository,
aptConfig?: apt_config,
control: DebianPackage.debianControl,
restoreFileStream?: {
from: repository["from"],
[key: string]: any,
},
getFileStream: () => Promise<Readable>,
};
export type packageManegerV2 = {
loadRepository: (distName: string, repo: repository, packageAptConfig?: apt_config, aptConfig?: backendConfig) => Promise<any>,
getPackages: (dist?: string, suite?: string, Package?: string, Arch?: string, Version?: string) => Promise<packageSave[]>,
deletePackage: (repo: Partial<packageSave>) => Promise<packageSave>,
addPackage: (repo: packageSave) => Promise<void>,
};
/**
* Maneger and Load packages to Database or internal object (Nodejs Heap Memory, if large data use Database)
* @returns
*/
export default async function packageManeger(config: backendConfig): Promise<packageManegerV2> {
const partialConfig: Partial<packageManegerV2> = {};
partialConfig.loadRepository = async function loadRepository(distName: string, repository: repository, packageAptConfig?: apt_config, aptConfig?: backendConfig) {
const saveFile = aptConfig["apt-config"]?.saveFiles ?? false;
const rootPool = aptConfig["apt-config"]?.poolPath ?? path.join(process.cwd(), "pool");
if (repository.from === "mirror") {
return Promise.all(Object.keys(repository.dists).map(async distName => {
const distInfo = repository.dists[distName];
const packagesData = distInfo.suites ? await Promise.all(distInfo.suites.map(async suite => getPackages(repository.uri, {dist: distName, suite}))).then(U => U.flat()) : await getPackages(repository.uri, {dist: distName});
return packagesData.forEach(({Package: control}) => {
const filePool = path.join(rootPool, control.Package.slice(0, 1), `${control.Package}_${control.Architecture}_${control.Version}.deb`);
const getStream = async () => {
if (saveFile && await extendFs.exists(filePool)) return createReadStream(filePool);
if (saveFile) {
const mainPath = path.resolve(filePool, "..");
if (!await extendFs.exists(mainPath)) await fs.mkdir(mainPath, {recursive: true});
const fileStream = await httpRequest.pipeFetch(control.Filename);
fileStream.pipe(createWriteStream(filePool));
return fileStream;
}
return httpRequest.pipeFetch(control.Filename);
}
return partialConfig.addPackage({
dist: distName,
suite: repository.suite,
repository: repository,
control,
aptConfig: packageAptConfig ?? aptConfig["apt-config"],
getFileStream: getStream,
restoreFileStream: {
from: "mirror",
fileUrl: control.Filename,
}
});
});
}));
} else if (repository.from === "oci") {
const registry = await DockerRegistry.Manifest.Manifest(repository.image, repository.platfom_target);
return registry.layersStream((data) => {
if (!(["gzip", "gz", "tar"]).some(ends => data.layer.mediaType.endsWith(ends))) return data.next();
data.stream.pipe(tar.list({
async onentry(entry) {
if (!entry.path.endsWith(".deb")) return null;
const control = await DebianPackage.extractControl(entry as any);
const suite = repository.suite ?? "main";
async function getStream() {
const filePool = path.join(rootPool, control.Package.slice(0, 1), `${control.Package}_${control.Architecture}_${control.Version}.deb`);
if (saveFile && await extendFs.exists(filePool)) return createReadStream(filePool);
return new Promise<Readable>((done, reject) => registry.blobLayerStream(data.layer.digest).then(stream => {
stream.on("error", reject);
stream.pipe(tar.list({
async onentry(getEntry) {
if (getEntry.path !== entry.path) return null;
if (saveFile) {
const mainPath = path.resolve(filePool, "..");
if (!await extendFs.exists(mainPath)) await fs.mkdir(mainPath, {recursive: true});
entry.pipe(createWriteStream(filePool));
}
return done(getEntry as any);
}
// @ts-ignore
}).on("error", reject));
}).catch(reject));
}
return partialConfig.addPackage({
dist: distName,
suite,
repository: repository,
control,
aptConfig: packageAptConfig ?? aptConfig["apt-config"],
getFileStream: getStream,
restoreFileStream: {
from: "oci",
digest: data.layer.digest,
path: entry.path,
}
});
}
}));
});
} else if (repository.from === "github_release") {
if (repository.tags) {
const release = await Promise.all(repository.tags.map(async releaseTag => httpRequestGithub.getRelease({
owner: repository.owner,
repository: repository.repository,
token: repository.token,
releaseTag,
})));
return Promise.all(release.map(async release => Promise.all(release.assets.map(async ({browser_download_url, name}) => {
if (!name.endsWith(".deb")) return null;
const control = await DebianPackage.extractControl(await httpRequest.pipeFetch(browser_download_url));
const filePool = path.join(rootPool, control.Package.slice(0, 1), `${control.Package}_${control.Architecture}_${control.Version}.deb`);
const getStream = async () => {
if (saveFile && await extendFs.exists(filePool)) return createReadStream(filePool);
if (saveFile) {
const mainPath = path.resolve(filePool, "..");
if (!await extendFs.exists(mainPath)) await fs.mkdir(mainPath, {recursive: true});
const fileStream = await httpRequest.pipeFetch(browser_download_url);
fileStream.pipe(createWriteStream(filePool));
return fileStream;
}
return httpRequest.pipeFetch(browser_download_url);
}
return partialConfig.addPackage({
dist: distName,
suite: repository.suite ?? "main",
repository: repository,
control,
aptConfig: packageAptConfig ?? aptConfig["apt-config"],
getFileStream: getStream,
restoreFileStream: {
from: "github_release",
fileUrl: browser_download_url,
}
});
})))).then(data => data.flat(2).filter(Boolean));
}
const release = await httpRequestGithub.getRelease({owner: repository.owner, repository: repository.repository, token: repository.token, peer: repository.assetsLimit, all: false});
return Promise.all(release.map(async release => Promise.all(release.assets.map(async ({browser_download_url, name}) => {
if (!name.endsWith(".deb")) return null;
const control = await DebianPackage.extractControl(await httpRequest.pipeFetch(browser_download_url));
const filePool = path.join(rootPool, control.Package.slice(0, 1), `${control.Package}_${control.Architecture}_${control.Version}.deb`);
const getStream = async () => {
if (saveFile && await extendFs.exists(filePool)) return createReadStream(filePool);
if (saveFile) {
const mainPath = path.resolve(filePool, "..");
if (!await extendFs.exists(mainPath)) await fs.mkdir(mainPath, {recursive: true});
const fileStream = await httpRequest.pipeFetch(browser_download_url);
fileStream.pipe(createWriteStream(filePool));
return fileStream;
}
return httpRequest.pipeFetch(browser_download_url);
}
return partialConfig.addPackage({
dist: distName,
suite: repository.suite ?? "main",
repository: repository,
control,
aptConfig: packageAptConfig ?? aptConfig["apt-config"],
getFileStream: getStream,
restoreFileStream: {
from: "github_release",
fileUrl: browser_download_url,
}
});
})))).then(data => data.flat(2).filter(Boolean));
} else if (repository.from === "github_tree") {
const { tree } = await httpRequestGithub.githubTree(repository.owner, repository.repository, repository.tree);
const filtedTree = tree.filter(({path: remotePath}) => {
if (repository.path) return repository.path.some(repoPath => {
if (!remotePath.startsWith("/")) remotePath = "/" + remotePath;
if (typeof repoPath === "string") {
if (!repoPath.startsWith("/")) repoPath = "/" + repoPath;
return remotePath.startsWith(repoPath);
}
return false;
});
return true;
}).filter(({path, type}) => path.endsWith(".deb") && type === "blob");
return Promise.all(filtedTree.map(async ({path: filePath}) => {
const downloadUrl = `https://raw.githubusercontent.com/${repository.owner}/${repository.repository}/${repository.tree}/${filePath}`;
const control = await DebianPackage.extractControl(await httpRequest.pipeFetch(downloadUrl));
const filePool = path.join(rootPool, control.Package.slice(0, 1), `${control.Package}_${control.Architecture}_${control.Version}.deb`);
const getStream = async () => {
if (saveFile && await extendFs.exists(filePool)) return createReadStream(filePool);
if (saveFile) {
const mainPath = path.resolve(filePool, "..");
if (!await extendFs.exists(mainPath)) await fs.mkdir(mainPath, {recursive: true});
const fileStream = await httpRequest.pipeFetch(downloadUrl);
fileStream.pipe(createWriteStream(filePool));
return fileStream;
}
return httpRequest.pipeFetch(downloadUrl);
}
return partialConfig.addPackage({
dist: distName,
suite: repository.suite ?? "main",
repository: repository,
control,
aptConfig: packageAptConfig ?? aptConfig["apt-config"],
getFileStream: getStream,
restoreFileStream: {
from: "github_tree",
fileUrl: downloadUrl,
}
});
}));
} else if (repository.from === "google_drive") {
const client_id = repository.appSettings.client_id;
const client_secret = repository.appSettings.client_secret;
const token = repository.appSettings.token;
const googleDriver = await coreUtils.googleDriver.GoogleDriver(client_id, client_secret, {
token,
async authCallback(url, token) {
if (url) console.log("Please visit this url to auth google driver: %s", url);
else console.log("Google driver auth success, please save token to config file, token: %s", token);
},
});
const files = (repository.folderId ? (await Promise.all(repository.folderId.map(async folderId => await googleDriver.listFiles(folderId)))).flat() : await googleDriver.listFiles());
return Promise.all(files.filter(({name, isTrashedFile}) => !isTrashedFile && name.endsWith(".deb")).map(async fileData => {
const control = await DebianPackage.extractControl(await googleDriver.getFileStream(fileData.id));
const filePool = path.join(rootPool, control.Package.slice(0, 1), `${control.Package}_${control.Architecture}_${control.Version}.deb`);
const getStream = async () => {
if (saveFile && await extendFs.exists(filePool)) return createReadStream(filePool);
if (saveFile) {
const mainPath = path.resolve(filePool, "..");
if (!await extendFs.exists(mainPath)) await fs.mkdir(mainPath, {recursive: true});
const fileStream = await googleDriver.getFileStream(fileData.id);
fileStream.pipe(createWriteStream(filePool));
return fileStream;
}
return googleDriver.getFileStream(fileData.id);
}
return partialConfig.addPackage({
dist: distName,
suite: repository.suite ?? "main",
repository: repository,
control,
aptConfig: packageAptConfig ?? aptConfig["apt-config"],
getFileStream: getStream,
restoreFileStream: {
from: "google_drive",
fileId: fileData.id,
}
});
}));
} else if (repository.from === "oracle_bucket") {
const oracleBucket = await coreUtils.oracleBucket(repository.region as any, repository.bucketName, repository.bucketNamespace, repository.auth);
return Promise.all((await oracleBucket.fileList()).filter(({name}) => name.endsWith(".deb")).map(async fileData => {
const control = await DebianPackage.extractControl(await oracleBucket.getFileStream(fileData.name));
const filePool = path.join(rootPool, control.Package.slice(0, 1), `${control.Package}_${control.Architecture}_${control.Version}.deb`);
const getStream = async () => {
if (saveFile && await extendFs.exists(filePool)) return createReadStream(filePool);
if (saveFile) {
const mainPath = path.resolve(filePool, "..");
if (!await extendFs.exists(mainPath)) await fs.mkdir(mainPath, {recursive: true});
const fileStream = await oracleBucket.getFileStream(fileData.name);
fileStream.pipe(createWriteStream(filePool));
return fileStream;
}
return oracleBucket.getFileStream(fileData.name);
}
return partialConfig.addPackage({
dist: distName,
suite: repository.suite ?? "main",
repository: repository,
control,
aptConfig: packageAptConfig ?? aptConfig["apt-config"],
getFileStream: getStream,
restoreFileStream: {
from: "oracle_bucket",
fileName: fileData.name,
}
});
}));
}
throw new Error(`Unknown repository from: ${(repository as any)?.from ?? "undefined"}`);
}
if (config["apt-config"]?.mongodb) {
// Connect to database
const mongoConfig = config["apt-config"].mongodb;
const mongoClient = await (new MongoClient(mongoConfig.uri, {serverApi: ServerApiVersion.v1})).connect();
const collection = mongoClient.db(mongoConfig.db ?? "aptStream").collection<packageSave>(mongoConfig.collection ?? "packagesData");
// Add package to database
partialConfig.addPackage = async function addPackage(repo) {
const existsPackage = await collection.findOne({dist: repo.dist, suite: repo.suite, "control.Package": repo.control.Package, "control.Version": repo.control.Version, "control.Architecture": repo.control.Architecture});
if (existsPackage) await partialConfig.deletePackage(repo);
await collection.insertOne(repo);
}
// Delete package
partialConfig.deletePackage = async function deletePackage(repo) {
const packageDelete = await collection.findOneAndDelete({dist: repo.dist, suite: repo.suite, "control.Package": repo.control.Package, "control.Version": repo.control.Version, "control.Architecture": repo.control.Architecture});
if (!packageDelete.value) throw new Error("Package not found!");
return packageDelete.value;
}
// Packages
function fixPackage(data: packageSave): packageSave {
if (!data.restoreFileStream) throw new Error("cannot restore file stream!");
data.getFileStream = async function getFileStream() {
if (data.restoreFileStream.from === "github_release"|| data.restoreFileStream.from === "github_tree") return coreUtils.httpRequest.pipeFetch(data.restoreFileStream.url);
else if (data.restoreFileStream.from === "google_drive" && data.repository.from === "google_drive") {
const googleDriver = await coreUtils.googleDriver.GoogleDriver(data.repository.appSettings.client_id, data.repository.appSettings.client_secret, {token: data.repository.appSettings.token});
return googleDriver.getFileStream(data.restoreFileStream.fileId);
} else if (data.restoreFileStream.from === "oracle_bucket" && data.repository.from === "oracle_bucket") {
const oracleBucket = await coreUtils.oracleBucket(data.repository.region as any, data.repository.bucketName, data.repository.bucketNamespace, data.repository.auth);
return oracleBucket.getFileStream(data.restoreFileStream.fileName);
} else if (data.restoreFileStream.from === "mirror" && data.repository.from === "mirror") return coreUtils.httpRequest.pipeFetch(data.restoreFileStream.url);
else if (data.restoreFileStream.from === "oci" && data.repository.from === "oci") {
const oci = await coreUtils.DockerRegistry(data.repository.image);
return new Promise((done, reject) => {
oci.blobLayerStream(data.restoreFileStream.digest).then((stream) => {
stream.pipe(tar.list({
filter: (path) => path === data.restoreFileStream.fileName,
onentry: (entry) => done(entry as any)
}))
}).catch(reject);
});
}
throw new Error("Cannot restore file stream!");
}
return data;
}
partialConfig.getPackages = async function getPackages(dist, suite, Package, Arch, Version) {
const doc: Filter<packageSave> = {};
if (dist) doc.dist = dist;
if (suite) doc.suite = suite;
if (Package) doc["control.Package"] = Package;
if (Arch) doc["control.Architecture"] = Arch;
if (Version) doc["control.Version"] = Version;
const packageInfo = await collection.find(doc).toArray();
if (!packageInfo) throw new Error("Package not found!");
return packageInfo.map(fixPackage);
}
} else {
// Internal Object
let packagesArray: packageSave[] = [];
// Add package to array
partialConfig.addPackage = async function addPackage(repo) {
const existsPackage = packagesArray.find((x) => x.control.Package === repo.control.Package && x.control.Version === repo.control.Version && x.control.Architecture === repo.control.Architecture && x.dist === repo.dist && x.suite === repo.suite && x.repository === repo.repository);
if (existsPackage) await partialConfig.deletePackage(repo);
packagesArray.push(repo);
}
// Delete package
partialConfig.deletePackage = async function deletePackage(repo) {
const index = packagesArray.findIndex((x) => x.control.Package === repo.control.Package && x.control.Version === repo.control.Version && x.control.Architecture === repo.control.Architecture && x.dist === repo.dist && x.suite === repo.suite && x.repository === repo.repository);
if (index === -1) throw new Error("Package not found!");
const packageDelete = packagesArray.splice(index, 1);
return packageDelete.at(-1);
}
// Packages
partialConfig.getPackages = async function getPackages(dist, suite, Package, Arch, Version) {
const packageInfo = packagesArray.filter(x => (!dist || x.dist === dist) && (!suite || x.suite === suite) && (!Package || x.control.Package === Package) && (!Arch || x.control.Architecture === Arch) && (!Version || x.control.Version === Version));
if (!packageInfo.length) throw new Error("Package not found!");
return packageInfo;
}
}
// Return functions
return partialConfig as packageManegerV2;
}

@ -1,15 +1,8 @@
import coreUtils, { DebianPackage, DockerRegistry, extendFs, extendsCrypto, httpRequest } from "@sirherobrine23/coreutils"; import coreUtils, { DockerRegistry, extendFs, httpRequest } from "@sirherobrine23/coreutils";
import { createReadStream, createWriteStream } from "node:fs";
import { MongoClient, ServerApiVersion } from "mongodb";
import { Compressor as lzmaCompressor } from "lzma-native";
import { Readable, Writable } from "node:stream";
import { debianControl } from "@sirherobrine23/coreutils/src/deb.js";
import { createGzip } from "node:zlib";
import { format } from "node:util"; import { format } from "node:util";
import yaml from "yaml"; import yaml from "yaml";
import path from "node:path"; import path from "node:path";
import fs from "node:fs/promises"; import fs from "node:fs/promises";
import tar from "tar";
export type apt_config = { export type apt_config = {
origin?: string, origin?: string,
@ -108,7 +101,6 @@ export async function saveConfig(filePath: string, config: backendConfig) {
} }
export async function getConfig(config: string) { export async function getConfig(config: string) {
const fixedConfig: backendConfig = {};
let configData: backendConfig, avaiableToDirname = true; let configData: backendConfig, avaiableToDirname = true;
if (config.startsWith("http")) { if (config.startsWith("http")) {
avaiableToDirname = false; avaiableToDirname = false;
@ -136,15 +128,20 @@ export async function getConfig(config: string) {
} }
} }
} else { } else {
if (!await coreUtils.extendFs.exists(config)) throw new Error("config File not exists"); if (!await coreUtils.extendFs.exists(config)) throw new Error("config File not exists, return "+JSON.stringify(config));
configData = yaml.parse(await fs.readFile(config, "utf8")); configData = yaml.parse(await fs.readFile(config, "utf8"));
} }
fixedConfig["apt-config"] = {}; if (typeof configData !== "object") throw new Error("Invalid config file");
const fixedConfig: backendConfig = {
"apt-config": {},
repositories: {}
};
if (configData["apt-config"]) { if (configData["apt-config"]) {
const rootData = configData["apt-config"]; const rootData = configData["apt-config"];
fixedConfig["apt-config"].portListen = rootData.portListen ?? 3000; fixedConfig["apt-config"].portListen = rootData.portListen ?? 3000;
fixedConfig["apt-config"].poolPath = rootData.poolPath ?? path.join(process.cwd(), "apt-stream");
fixedConfig["apt-config"].saveFiles = rootData.saveFiles ?? false; fixedConfig["apt-config"].saveFiles = rootData.saveFiles ?? false;
if (rootData.poolPath) fixedConfig["apt-config"].poolPath = rootData.poolPath;
if (fixedConfig["apt-config"].poolPath && !await extendFs.exists(fixedConfig["apt-config"].poolPath)) await fs.mkdir(fixedConfig["apt-config"].poolPath, {recursive: true}); if (fixedConfig["apt-config"].poolPath && !await extendFs.exists(fixedConfig["apt-config"].poolPath)) await fs.mkdir(fixedConfig["apt-config"].poolPath, {recursive: true});
if (rootData.codename) fixedConfig["apt-config"].codename = rootData.codename; if (rootData.codename) fixedConfig["apt-config"].codename = rootData.codename;
if (rootData.origin) fixedConfig["apt-config"].origin = rootData.origin; if (rootData.origin) fixedConfig["apt-config"].origin = rootData.origin;
@ -289,307 +286,3 @@ export async function getConfig(config: string) {
}); });
return fixedConfig; return fixedConfig;
} }
export type packageData = {
control: debianControl,
getStream: () => Readable|Promise<Readable>,
repositoryConfig?: repository
}
type distObject = {
[distribuition: string]: {
[suite: string]: {
[arch: string]: packageData[]
}
}
}
type dbDist = {
control: debianControl,
repositoryConfig?: repository,
dist: string,
suite: string,
getfile: repository & {file: string, blobLayer?: string}
};
export class distManegerPackages {
public distribuitions: distObject = {};
public mongoClinte?: MongoClient;
public internalDist: distObject = {};
public config: backendConfig;
constructor(config: backendConfig) {
this.config = config;
if (config["apt-config"]?.mongodb) {
this.mongoClinte = new MongoClient(config["apt-config"]?.mongodb?.uri, {
serverApi: ServerApiVersion.v1,
});
this.mongoClinte.connect();
}
}
async getPackages(dist?: string, suite?: string): Promise<distObject> {
let repo: distObject = {};
if (!this.mongoClinte) repo = this.distribuitions;
else {
if (dist && typeof dist !== "string") throw new Error("dist must be a string");
if (suite && typeof suite !== "string") throw new Error("suite must be a string");
const saveFile = this.config["apt-config"]?.saveFiles;
const rootPool = this.config["apt-config"]?.poolPath;
const collection = this.mongoClinte.db(this.config["apt-config"]?.mongodb?.db ?? "packages").collection<dbDist>(this.config["apt-config"]?.mongodb.collection ?? "packages");
for (const dataDB of await collection.find({dist, suite}).toArray()) {
const repository = dataDB.getfile, control = dataDB.control;
if (dist && dist !== dataDB.dist) continue;
if (suite && suite !== dataDB.suite) continue;
async function getStream() {
if (repository.from === "mirror") {
const filePool = path.join(rootPool, control.Package.slice(0, 1), `${control.Package}_${control.Architecture}_${control.Version}.deb`);
if (saveFile && await extendFs.exists(filePool)) return createReadStream(filePool);
return coreUtils.httpRequest.pipeFetch(dataDB.getfile.file);
} else if (repository.from === "oci") {
const filePool = path.join(rootPool, control.Package.slice(0, 1), `${control.Package}_${control.Architecture}_${control.Version}.deb`);
if (saveFile && await extendFs.exists(filePool)) return createReadStream(filePool);
const registry = await coreUtils.DockerRegistry(repository.image, repository.platfom_target);
return new Promise<Readable>((done, reject) => registry.blobLayerStream(dataDB.getfile.blobLayer).then(stream => {
stream.on("error", reject);
stream.pipe(tar.list({
async onentry(getEntry) {
if (getEntry.path !== dataDB.getfile.file) return null;
if (saveFile) {
const mainPath = path.resolve(filePool, "..");
if (!await extendFs.exists(mainPath)) await fs.mkdir(mainPath, {recursive: true});
getEntry.pipe(createWriteStream(filePool));
}
return done(getEntry as any);
}
// @ts-ignore
}).on("error", reject));
}).catch(reject));
} else if (repository.from === "github_release" || repository.from === "github_tree") {
const filePool = path.join(rootPool, control.Package.slice(0, 1), `${control.Package}_${control.Architecture}_${control.Version}.deb`);
if (saveFile && await extendFs.exists(filePool)) return createReadStream(filePool);
return coreUtils.httpRequest.pipeFetch({
url: dataDB.getfile.file,
headers: (repository.token ? {"Authorization": `token ${repository.token}`} : {})
})
} else if (repository.from === "google_drive") {
const filePool = path.join(rootPool, control.Package.slice(0, 1), `${control.Package}_${control.Architecture}_${control.Version}.deb`);
if (saveFile && await extendFs.exists(filePool)) return createReadStream(filePool);
const client_id = repository.appSettings.client_id;
const client_secret = repository.appSettings.client_secret;
const token = repository.appSettings.token;
const googleDriver = await coreUtils.googleDriver.GoogleDriver(client_id, client_secret, {
token,
async authCallback(url, token) {
if (url) console.log("Please visit this url to auth google driver: %s", url);
else console.log("Google driver auth success, please save token to config file, token: %s", token);
},
});
return googleDriver.getFileStream(dataDB.getfile.file);
} else if (repository.from === "oracle_bucket") {
const filePool = path.join(rootPool, control.Package.slice(0, 1), `${control.Package}_${control.Architecture}_${control.Version}.deb`);
if (saveFile && await extendFs.exists(filePool)) return createReadStream(filePool);
const oracleBucket = await coreUtils.oracleBucket(repository.region as any, repository.bucketName, repository.bucketNamespace, repository.auth);
return oracleBucket.getFileStream(dataDB.getfile.file);
}
return null;
}
if (!repo[dataDB.dist]) repo[dataDB.dist] = {};
if (!repo[dataDB.dist][dataDB.suite]) repo[dataDB.dist][dataDB.suite] = {};
if (!repo[dataDB.dist][dataDB.suite][control.Architecture]) repo[dataDB.dist][dataDB.suite][control.Architecture] = [];
repo[dataDB.dist][dataDB.suite][control.Architecture].push({
getStream,
repositoryConfig: dataDB.repositoryConfig,
control,
});
console.log("add package %s %s %s %s", dataDB.dist, dataDB.suite, control.Architecture, control.Package);
};
}
return repo;
}
public async addDistribuition(distribuition: string) {
if (!this.distribuitions[distribuition]) this.distribuitions[distribuition] = {};
return this.distribuitions[distribuition];
}
public async addSuite(distribuition: string, suite: string) {
if (!this.distribuitions[distribuition][suite]) this.distribuitions[distribuition][suite] = {};
return this.distribuitions[distribuition][suite];
}
public async addArch(distribuition: string, suite: string, arch: string) {
if (!this.distribuitions[distribuition][suite][arch]) this.distribuitions[distribuition][suite][arch] = [];
return this.distribuitions[distribuition][suite][arch];
}
/**
* Register package in distribuition and suite
*
* @param distribuition
* @param suite
* @param arch
* @param control
* @param getStream
* @returns
*/
public async addPackage(distribuition: string, suite: string, packageData: packageData) {
const dist = await this.getPackages(distribuition, suite);
this.addDistribuition(distribuition);
this.addSuite(distribuition, suite);
this.addArch(distribuition, suite, packageData.control.Architecture);
const currentPackages = dist[distribuition]?.[suite]?.[packageData.control.Architecture] ?? [];
if (!this.mongoClinte) {
if (currentPackages.some(pkg => pkg.control.Package === packageData.control.Package)) {
if (currentPackages.some(pkg => pkg.control.Version === packageData.control.Version && pkg.control.Package === packageData.control.Package)) {
const index = currentPackages.findIndex(pkg => pkg.control.Version === packageData.control.Version && pkg.control.Package === packageData.control.Package);
return dist[distribuition][suite][packageData.control.Architecture][index] = packageData;
}
}
dist[distribuition][suite][packageData.control.Architecture].push(packageData);
} else {
const collection = this.mongoClinte.db(this.config["apt-config"]?.mongodb?.db ?? "packages").collection(this.config["apt-config"]?.mongodb.collection ?? "packages");
const currentPackages = await collection.findOne({
dist: distribuition,
suite,
cotrol: {
Package: packageData.control.Package,
Version: packageData.control.Version,
Architecture: packageData.control.Architecture,
}
});
if (currentPackages) await collection.deleteOne(currentPackages);
await collection.insertOne({
dist: distribuition,
suite,
control: packageData.control,
repository: packageData.repositoryConfig
});
}
return packageData;
}
public deletePackage(distribuition: string, suite: string, arch: string, packageName: string, version: string) {
if (!this.distribuitions[distribuition]) throw new Error("Distribuition not exists");
if (!this.distribuitions[distribuition][suite]) throw new Error("Suite not exists");
if (!this.distribuitions[distribuition][suite][arch]) throw new Error("Arch not exists");
const index = this.distribuitions[distribuition][suite][arch].findIndex(pkg => pkg.control.Package === packageName && pkg.control.Version === version);
if (index === -1) throw new Error("Package not exists");
const data = this.distribuitions[distribuition][suite][arch][index];
this.distribuitions[distribuition][suite][arch].splice(index, 1);
return data;
}
public async getDistribuition(distName: string) {
const dist = (await this.getPackages(distName))[distName];
if (!dist) return null;
const suites = Object.keys(dist);
const suiteData = suites.map(suite => {
const Packages = Object.keys(dist[suite]).map(arch => dist[suite][arch].map(packageInfo => packageInfo.control)).flat();
return {
Suite: suite,
Archs: Object.keys(dist[suite]),
Packages
};
});
return {
dist: distName,
suites,
archs: [...(new Set(suiteData.map(suite => suite.Archs).flat()))],
suiteData,
};
}
public async getAllDistribuitions() {
const dist = await this.getPackages();
return (await Promise.all(Object.keys(dist).map(dist => this.getDistribuition(dist)))).flat().filter(Boolean);
}
public async getPackageInfo(info: {dist: string, suite?: string, arch?: string, packageName?: string, version?: string}) {
const packageDateObject: {[k: string]: {[l: string]: {[a: string]: DebianPackage.debianControl[]}}} = {};
const distData = await this.getPackages(info.dist, info.suite);
for (const dist in distData) {
if (info.dist && info.dist !== dist) continue;
packageDateObject[dist] = {};
for (const suite in distData[dist]) {
if (info.suite && info.suite !== suite) continue;
packageDateObject[dist][suite] = {};
for (const arch in distData[dist][suite]) {
if (info.arch && info.arch !== arch) continue;
packageDateObject[dist][suite][arch] = distData[dist][suite][arch].map(pkg => pkg.control).filter(pkg => (!info.packageName || pkg.Package === info.packageName) && (!info.version || pkg.Version === info.version));
}
}
}
if (info.dist) {
const dist = packageDateObject[info.dist];
if (info.suite) {
const suite = dist[info.suite];
if (info.arch) {
const arch = suite[info.arch];
if (info.packageName) return arch.find(pkg => pkg.Package === info.packageName && (!info.version || pkg.Version === info.version));
return arch;
}
}
return dist;
}
return packageDateObject;
}
public async getPackageStream(distribuition: string, suite: string, arch: string, packageName: string, version: string) {
const dist = (await this.getPackages(distribuition))[distribuition];
if (!dist) throw new Error("Distribuition not exists");
if (!dist[suite]) throw new Error("Suite not exists");
if (!dist[suite][arch]) throw new Error("Arch not exists");
const packageData = dist[suite][arch].find(pkg => pkg.control.Package === packageName && pkg.control.Version === version);
if (!packageData) throw new Error("Package not exists");
return Promise.resolve(packageData.getStream()).then(stream => ({control: packageData.control, repository: packageData.repositoryConfig, stream}));
}
public async createPackages(options?: {compress?: "gzip" | "xz", writeStream?: Writable, singlePackages?: boolean, dist?: string, package?: string, arch?: string, suite?: string}) {
const distribuition = await this.getPackages(options?.dist);
const rawWrite = new Readable({read(){}});
let size = 0, addbreak = false, hash: ReturnType<typeof extendsCrypto.createHashAsync>|undefined;
if (options?.compress === "gzip") {
const gzip = rawWrite.pipe(createGzip({level: 9}));
if (options?.writeStream) gzip.pipe(options.writeStream);
hash = extendsCrypto.createHashAsync("all", gzip);
gzip.on("data", (chunk) => size += chunk.length);
} else if (options?.compress === "xz") {
const lzma = rawWrite.pipe(lzmaCompressor());
if (options?.writeStream) lzma.pipe(options.writeStream);
hash = extendsCrypto.createHashAsync("all", lzma);
lzma.on("data", (chunk) => size += chunk.length);
} else {
if (options?.writeStream) rawWrite.pipe(options.writeStream);
hash = extendsCrypto.createHashAsync("all", rawWrite);
rawWrite.on("data", (chunk) => size += chunk.length);
}
for (const dist in distribuition) {
if (options?.dist && options.dist !== dist) continue;
const suites = distribuition[dist];
for (const suite in suites) {
if (options?.suite && options.suite !== suite) continue;
const archs = suites[suite];
for (const arch in archs) {
if (arch !== "all" && (options?.arch && options.arch !== arch)) continue;
const packages = archs[arch];
for (const {control} of packages) {
if (!control.Size) continue;
if (!(control.SHA1 || control.SHA256 || control.MD5sum)) continue;
if (options?.package && options.package !== control.Package) continue;
if (addbreak) rawWrite.push("\n\n"); else addbreak = true;
control["Filename"] = poolLocationPackage(dist, suite, arch, control.Package, control.Version);
const Data = Object.keys(control).map(key => `${key}: ${control[key]}`);
rawWrite.push(Data.join("\n"));
if (options?.singlePackages) break;
}
}
}
}
rawWrite.push(null);
if (hash) return hash.then(hash => ({...hash, size}));
return null;
}
}