WIP: Migrate dpkg package #36

Draft
Sirherobrine23 wants to merge 2 commits from safeOverflow into main
10 changed files with 641 additions and 2356 deletions
Showing only changes of commit ec4fcd52e4 - Show all commits

@ -46,25 +46,25 @@
"devDependencies": {
"@types/express": "^4.17.17",
"@types/inquirer": "^9.0.3",
"@types/node": "^18.16.3",
"@types/node": "^20.3.2",
"@types/yargs": "^17.0.24",
"ts-node": "^10.9.1",
"typescript": "^5.0.4"
"typescript": "^5.1.6"
},
"dependencies": {
"@sirherobrine23/cloud": "^3.6.8",
"@sirherobrine23/decompress": "^3.6.8",
"@sirherobrine23/docker-registry": "^3.6.8",
"@sirherobrine23/dpkg": "^3.6.8",
"@sirherobrine23/extends": "^3.6.8",
"@sirherobrine23/http": "^3.6.8",
"@sirherobrine23/cloud": "^3.6.12",
"@sirherobrine23/decompress": "^3.6.12",
"@sirherobrine23/docker-registry": "^3.6.12",
"@sirherobrine23/dpkg": "^3.6.12",
"@sirherobrine23/extends": "^3.6.12",
"@sirherobrine23/http": "^3.6.12",
"express": "^4.18.2",
"express-rate-limit": "^6.7.0",
"inquirer": "^9.2.1",
"inquirer": "^9.2.7",
"inquirer-file-tree-selection-prompt": "^2.0.5",
"mongodb": "^5.4.0",
"openpgp": "^5.8.0",
"yaml": "^2.2.2",
"mongodb": "^5.6.0",
"openpgp": "^5.9.0",
"yaml": "^2.3.1",
"yargs": "^17.7.2"
}
}

File diff suppressed because it is too large Load Diff

@ -1,766 +0,0 @@
import { Repository, aptStreamConfig, repositorySource } from "./config.js";
import connectDb, { packageManeger } from "./packages.js";
import { googleDriver } from "@sirherobrine23/cloud";
import { readFile, readdir } from "fs/promises";
import { apt } from "@sirherobrine23/dpkg";
import inquirerFileTreeSelection from "inquirer-file-tree-selection-prompt";
import dockerRegistry from "@sirherobrine23/docker-registry";
import coreHTTP from "@sirherobrine23/http";
import inquirer from "inquirer";
import path from "node:path";
import { MongoClient } from "mongodb";
import { createServer } from "http";
import { githubToken } from "@sirherobrine23/http/src/github.js";
import dns from "node:dns/promises";
import os from "os";
inquirer.registerPrompt("file-tree-selection", inquirerFileTreeSelection);
export default async function main(configOrigin: string) {
const config = new aptStreamConfig(configOrigin);
if (!config.databaseAvaible()) await setDatabse(config);
const configManeger = await connectDb(config);
while(true) {
const action = (await inquirer.prompt<{initAction: "serverEdit"|"newRepo"|"del"|"editRepo"|"syncRepo"|"exit"}>({
name: "initAction",
type: "list",
message: "Select action:",
choices: [
{
name: "Edit repository",
value: "editRepo"
},
{
name: "Create new repository",
value: "newRepo"
},
{
name: "Delete repository",
value: "del"
},
{
name: "Edit server configs",
value: "serverEdit"
},
{
name: "Exit",
value: "exit"
}
]
})).initAction;
if (action === "exit") break;
else if (action === "newRepo") {
const repoName = (await inquirer.prompt({
name: "repoName",
message: "Repository name:",
type: "input",
validate: (name) => configManeger.hasSource(name.trim()) ? "Type other repository name, this are exist's" : true,
})).repoName.trim();
if (repoName) await editRepository(configManeger.createRepository(repoName), configManeger);
else console.log("The repository was not created, cancelling!");
} else if (action === "editRepo") {
const repo = configManeger.getRepositorys();
const repoSelected = (await inquirer.prompt({
name: "repo",
message: "Selecte repository:",
type: "list",
choices: [
{
name: "Cancel",
value: "exit"
},
...(repo.map(d => d.repositoryName))
],
})).repo;
if (repoSelected !== "exit") await editRepository(configManeger.getRepository(repoSelected), configManeger);
} else if (action === "del") {
const repo = configManeger.getRepositorys();
const repoSelected = (await inquirer.prompt({
name: "repo",
message: "Selecte repository:",
type: "list",
choices: [
{
name: "Cancel",
value: "exit"
},
...(repo.map(d => d.repositoryName))
],
})).repo;
if (repoSelected !== "exit") {
if (configManeger.deleteRepository(repoSelected)) console.log("Repository deleted");
else console.error("Fail to delete repository!");
}
} else if (action === "serverEdit") await serverConfig(configManeger);
await configManeger.saveConfig().catch(() => {});
}
return configManeger.close().then(async () => configManeger.saveConfig());
}
async function serverConfig(config: packageManeger) {
while (true) {
await config.saveConfig().catch(() => {});
const { action } = await inquirer.prompt({
name: "action",
type: "list",
choices: [
{
name: "Serve port",
value: "serverPort"
},
{
name: "Serve threads forks",
value: "serverThreads"
},
{
name: "Change mongodb URL",
value: "updateDB"
},
{
name: "Switch gzip release compressions",
value: "relGzip"
},
{
name: "Switch xz release compressions",
value: "relXz"
},
{
name: "Return",
value: "exit"
},
]
});
if (action === "exit") break;
else if (action === "relGzip") console.log("Set gzip to %O", config.setCompressRelease("gzip", !config.getCompressRelease("gzip")).getCompressRelease("gzip"));
else if (action === "relXz") console.log("Set xz to %O", config.setCompressRelease("xz", !config.getCompressRelease("xz")).getCompressRelease("xz"));
else if (action === "serverPort") {
await inquirer.prompt({
name: "port",
type: "number",
default: config.getPortListen(),
message: "Server port:",
validate(input: number) {
if (input < 0 || input > 65535) return "Port must be between 0 and 65535";
config.setPortListen(input);
return true;
}
});
} else if (action === "serverThreads") {
await inquirer.prompt({
name: "threads",
type: "number",
default: config.getClusterForks(),
message: "Server threads forks:",
validate(input: number) {
if (input < 0) return "Threads must be greater or equal 0";
if (input > os.availableParallelism()) console.warn("\nThe number of threads was greater than the system can handle, be careful!");
config.setClusterForks(input);
return true;
}
});
} else if (action === "updateDB") await setDatabse(config);
}
}
async function setDatabse(repo: aptStreamConfig) {
const promps = await inquirer.prompt({
name: "url",
type: "input",
message: "Mongodb URL:",
async validate(input) {
try {
await (await (new MongoClient(input)).connect()).close(true);
return true;
} catch (err) {
return err?.message || err;
}
},
});
repo.setDatabse(promps.url);
}
async function editRepository(repo: Repository, configManeger: packageManeger) {
let exitShowSync = false;
await configManeger.saveConfig().catch(() => {});
while (true) {
const action = (await inquirer.prompt({
name: "action",
message: "Repository actions:",
type: "list",
choices: [
{
name: "New repository sources",
value: "add"
},
{
name: "Delete sources",
value: "del"
},
{
name: "Delete all sources",
value: "delAll"
},
{
name: "Set repository codename",
value: "setCodename"
},
{
name: "Set repository description",
value: "setDescription"
},
{
name: "Set repository label",
value: "setLabel"
},
{
name: "Set repository origin",
value: "setOrigin"
},
{
name: "Set repository suite",
value: "setSuite"
},
{
name: "Return",
value: "exit"
},
]
})).action;
if (action === "exit") break;
else if (action === "setCodename") {
const input = (await inquirer.prompt({
name: "value",
type: "input",
message: "What is new Codename?"
})).value;
repo.setCodename(input);
} else if (action === "setDescription") {
const input = (await inquirer.prompt({
name: "value",
type: "input",
message: "Set description in one single line:"
})).value;
repo.setDescription(input);
} else if (action === "setLabel") {
const input = (await inquirer.prompt({
name: "value",
type: "input",
message: "Set label:"
})).value;
repo.setLabel(input);
} else if (action === "setOrigin") {
const input = (await inquirer.prompt([
{
when: () => !!repo.getOrigin(),
name: "confirm",
message: "Are you sure you want to change Origin?",
type: "confirm",
default: false
},
{
when: (ask) => !repo.getOrigin() || ask["confirm"],
name: "value",
type: "input",
message: "What is Origin name?"
}
])).value;
if (!input) console.log("Canceled set origin"); else repo.setOrigin(input);
} else if (action === "setSuite") {
const input = (await inquirer.prompt({
name: "value",
type: "input",
message: "What is Suite name?"
})).value;
repo.setSuite(input);
} else if (action === "delAll") {
exitShowSync = true;
repo.clear();
} else if (action === "del") {
const srcs = repo.getAllRepositorys();
if (!srcs.length) {
console.info("Not sources!");
continue;
}
const sel: string[] = (await inquirer.prompt({
name: "sel",
type: "checkbox",
message: "Select IDs:",
choices: repo.getAllRepositorys().map(d => ({name: `${d.repositoryID} (${d.type})`, value: d.repositoryID})),
})).sel;
exitShowSync = true;
sel.forEach(id => repo.delete(id));
} else if (action === "add") {
const root = async () => createSource().catch(err => {
console.error(err);
console.log("Try again");
return createSource();
});
repo.set(configManeger.createRepositoryID(), await root());
}
}
if (exitShowSync) console.info("Sync packages!");
return repo;
}
async function createSource(): Promise<repositorySource> {
let { srcType, componentName } = (await inquirer.prompt<{srcType: repositorySource["type"], componentName?: string}>([
{
name: "srcType",
type: "list",
choices: [
{
value: "http",
name: "HTTP Directly"
},
{
value: "mirror",
name: "APT Mirror"
},
{
value: "github",
name: "Github Release/Branch"
},
{
value: "googleDriver",
name: "Google Drive"
},
{
value: "oracleBucket",
name: "Oracle Cloud Infracture Bucket"
},
{
value: "docker",
name: "OCI (Open Container Iniciative)/Docker Image"
},
]
},
{
type: "confirm",
name: "addComp",
message: "Add component name?",
default: false
},
{
name: "componentName",
when: (answers) => answers["addComp"],
type: "input",
default: "main",
validate: (inputComp) => (/[\s]+/).test(inputComp) ? "Remove Spaces" : true
}
]));
componentName ||= "main";
if (srcType === "http") {
return {
type: "http", componentName,
url: (await inquirer.prompt({
name: "reqUrl",
type: "input",
async validate(urlInput) {
try {
const { hostname } = new URL(urlInput);
await dns.resolve(hostname);
return true
} catch (err) { return err?.message || String(err); }}
})).reqUrl,
};
} else if (srcType === "mirror") {
const promps = (await inquirer.prompt([
{
type: "list",
name: "sourceFrom",
choices: [
{name: "Select file", value: "fileSelect"},
{name: "Create from scrat", value: "createIt"}
]
},
{
when: (answers) => answers["sourceFrom"] === "fileSelect",
name: "fileSource",
type: "file-tree-selection",
message: "Select file source path:"
},
{
when: (answers) => answers["sourceFrom"] !== "fileSelect",
name: "fileSource",
type: "editor",
message: "creating sources",
default: "# This is comment\ndeb http://example.com example main",
}
]));
return {
type: "mirror", componentName,
config: (apt.parseSourceList(promps["sourceFrom"] !== "fileSelect" ? promps["fileSource"] : await readFile(promps["fileSource"], "utf8"))).filter(src => src.type === "packages")
};
} else if (srcType === "github") {
const promps = await inquirer.prompt([
{
name: "token",
type: "input",
message: "Github token to private repositorys (it is not necessary if it is public):",
default: githubToken,
validate(input: string) {
if (input.length > 0) if (!(input.startsWith("ghp_"))) return "Invalid token, if old token set manualy in Config file!";
return true;
},
},
{
name: "owner",
type: "input",
message: "Repository owner:",
async validate(input, ask) {
try {
const apiReq = new URL(path.posix.join("/users", path.posix.resolve("/", input)), "https://api.github.com");
await coreHTTP.jsonRequestBody(apiReq, {headers: ask.token ? {Authorization: `token ${ask.token}`}:{}});
return true;
} catch (err) {
return err?.body?.message || err?.message || String(err);
}
}
},
{
name: "repository",
type: "list",
message: "Select repository:",
async choices(answers) {
const apiReq = new URL(path.posix.join("/users", answers["owner"], "repos"), "https://api.github.com");
return (await coreHTTP.jsonRequestBody<{name: string}[]>(apiReq, {headers: answers.token ? {Authorization: `token ${answers.token}`}:{}})).map(({name}) => name);
},
},
{
name: "subType",
type: "list",
message: "Where to get the .deb files?",
choices: [
"Release",
"Branch"
]
}
]);
const { owner, repository, token } = promps;
if (promps["subType"] === "Branch") {
return {
type: "github", subType: "branch", componentName, enableUpload: false,
owner, repository, token,
branch: (await inquirer.prompt({
name: "branch",
type: "list",
message: "Select the branch:",
async choices() {
const apiReq = new URL(path.posix.join("/repos", owner, repository, "branches"), "https://api.github.com");
return (await coreHTTP.jsonRequestBody<{name: string}[]>(apiReq)).map(({name}) => name);
}
})).branch,
};
}
const { tag, enableUpload } = await inquirer.prompt([
{
when: () => !!token,
type: "confirm",
name: "enableUpload",
message: "Enable support to upload files to Github Release?",
default: false,
},
{
name: "tag",
type: "checkbox",
message: "Select tags:",
async choices() {
const apiReq = new URL(path.posix.join("/repos", owner, repository, "releases"), "https://api.github.com");
return (await coreHTTP.jsonRequestBody<{tag_name: string}[]>(apiReq)).map(({tag_name}) => tag_name);
}
},
]);
return {
type: "github", subType: "release", componentName, enableUpload,
owner, repository, token,
tag,
}
} else if (srcType === "googleDriver") {
let client_id: string, client_secret: string;
try {
const secretFile = (await readdir(process.cwd()).then(files => files.filter(file => file.endsWith(".json") && file.startsWith("client_secret")))).at(0);
if (secretFile) {
const cbb = JSON.parse(await readFile(secretFile, "utf8"));
if (typeof cbb.installed === "object") {
client_id = cbb.installed.client_id;
client_secret = cbb.installed.client_secret;
} else if (typeof cbb.CBe === "object") {
client_id = cbb.CBe.client_id;
client_secret = cbb.CBe.client_secret;
} else if (typeof cbb.client_id === "string" && typeof cbb.client_secret === "string") {
client_id = cbb.client_id;
client_secret = cbb.client_secret;
}
}
} catch {}
const clientPromp = await inquirer.prompt([
{
type: "input",
name: "id",
message: "Google oAuth Client ID:",
default: client_id
},
{
type: "input",
name: "secret",
message: "Google oAuth Client Secret:",
default: client_secret,
},
{
type: "confirm",
name: "enableUpload",
message: "Enable support to upload files to Google driver?",
default: false,
},
{
name: "listFiles",
type: "confirm",
message: "After authenticating Google Drive, will you want to select the files?"
},
{
when: (ask) => ask["listFiles"],
name: "folderID",
type: "input",
message: "Folder ID?"
}
]);
let clientToken: any;
const server = createServer();
const port = await new Promise<number>((resolve, reject) => {
server.once("error", reject);
server.listen(0, () => {
const addr = server.address();
server.removeListener("error", reject);
resolve(Number((typeof addr === "string" ? addr : addr?.port) || addr));
});
});
const gdrive = await googleDriver.GoogleDriver({
authConfig: {
clientSecret: clientPromp["secret"],
clientID: clientPromp["id"],
redirectURL: "http://localhost:" + port,
authUrlCallback(authUrl, callback) {
server.once("request", function call(req, res) {
const { searchParams } = new URL(String(req.url), "http://localhost:"+port);
if (!searchParams.has("code")) {
res.statusCode = 400;
res.end("No code");
server.once("request", call);
return;
}
res.statusCode = 200;
res.end(searchParams.get("code"));
callback(searchParams.get("code"))
});
console.error("Please open the following URL in your browser:", authUrl);
},
tokenCallback(token) {
clientToken = token;
console.log("Google Drive token:", token);
},
}
});
server.close();
let gIDs: string[];
if (clientPromp["listFiles"]) {
const folderID = clientPromp["folderID"]||undefined;
const files = (await gdrive.listFiles(folderID)).filter(file => file.name.endsWith(".deb"));
if (files.length <= 0) console.log("No files currently in you drive");
else gIDs = (await inquirer.prompt({
name: "ids",
type: "checkbox",
choices: files.map(file => ({name: file.name, value: file.id, checked: true}))
})).ids;
}
return {
type: "googleDriver", componentName, enableUpload: clientPromp["enableUpload"],
clientSecret: clientPromp["secret"],
clientId: clientPromp["id"],
clientToken,
gIDs
};
} else if (srcType === "oracleBucket") {
const ociPromps = await inquirer.prompt([
{
name: "namespace",
type: "input",
message: "OCI Bucket namespace:"
},
{
name: "name",
type: "input",
message: "Bucket name:"
},
{
name: "region",
type: "list",
message: "Select Bucket region:",
choices: [
"af-johannesburg-1",
"ap-chuncheon-1",
"ap-hyderabad-1",
"ap-melbourne-1",
"ap-mumbai-1",
"ap-osaka-1",
"ap-seoul-1",
"ap-singapore-1",
"ap-sydney-1",
"ap-tokyo-1",
"ca-montreal-1",
"ca-toronto-1",
"eu-amsterdam-1",
"eu-frankfurt-1",
"eu-madrid-1",
"eu-marseille-1",
"eu-milan-1",
"eu-paris-1",
"eu-stockholm-1",
"eu-zurich-1",
"il-jerusalem-1",
"me-abudhabi-1",
"me-jeddah-1",
"mx-queretaro-1",
"sa-santiago-1",
"sa-saopaulo-1",
"sa-vinhedo-1",
"uk-cardiff-1",
"uk-london-1",
"us-ashburn-1",
"us-chicago-1",
"us-phoenix-1",
"us-sanjose-1"
]
},
{
name: "authType",
type: "list",
choices: [
{name: "OCI Cli config", value: "preAuthentication"},
{name: "User", value: "user"},
]
},
{
when: (answers) => answers["authType"] !== "preAuthentication",
name: "tenancy",
type: "input"
},
{
when: (answers) => answers["authType"] !== "preAuthentication",
name: "user",
type: "input"
},
{
when: (answers) => answers["authType"] !== "preAuthentication",
name: "fingerprint",
type: "input"
},
{
when: (answers) => answers["authType"] !== "preAuthentication",
name: "privateKey",
type: "input"
},
{
when: (answers) => answers["authType"] !== "preAuthentication",
name: "passphase",
type: "confirm",
message: "Private key require password to decrypt?"
},
{
when: (answers) => answers["passphase"],
name: "passphase",
type: "password",
mask: "*"
},
{
type: "confirm",
name: "enableUpload",
message: "Enable support to upload files?",
default: false,
}
]);
const { namespace, name, region, enableUpload } = ociPromps;
if (ociPromps["authType"] === "preAuthentication") return {
type: "oracleBucket", componentName, enableUpload,
authConfig: {
namespace, name, region
}
};
const { fingerprint, privateKey, tenancy, user, passphase } = ociPromps;
return {
type: "oracleBucket", componentName, enableUpload,
authConfig: {
namespace, name, region,
auth: {
fingerprint, privateKey, tenancy, user, passphase
}
}
};
} else if (srcType === "docker") {
const basicConfig = await inquirer.prompt<{authConfirm: boolean, imageURI: string}>([
{
name: "imageURI",
type: "input",
message: "Image URI/URL:",
validate(input) {
try {
new dockerRegistry.parseImage(input);
return true;
} catch (err) {
return String(err?.message || err);
}
},
},
{
name: "authConfirm",
type: "confirm",
message: "This registry or image required authentication?",
default: false
}
]);
let auth: dockerRegistry.userAuth;
let enableUpload: boolean = false;
if (basicConfig.authConfirm) {
const authPrompts = await inquirer.prompt([
{
name: "user",
type: "input",
message: "Username:",
validate(input: string) {
if (input.trim().length > 1) return true;
return "Invalid username";
}
},
{
name: "pass",
type: "password",
mask: "*",
message: "Password or Token:"
},
{
name: "enableUpload",
type: "confirm",
message: "Allow publish packages in Docker registry?"
}
]);
enableUpload = authPrompts["enableUpload"];
auth = {
username: authPrompts.user,
password: authPrompts.pass
};
}
return {
type: "docker", componentName, enableUpload,
image: basicConfig.imageURI,
auth
};
}
console.log("Invalid select type!");
return createSource();
}

@ -1,445 +1,17 @@
#!/usr/bin/env node
import "./log.js";
import path from "node:path";
import yargs from "yargs";
import crypto from "node:crypto";
import cluster from "node:cluster";
import packages from "./packages.js";
import express from "express";
import expressRate from "express-rate-limit";
import streamPromise from "node:stream/promises";
import configManeger from "./configManeger.js";
import * as Debian from "@sirherobrine23/dpkg";
import oldFs, { createReadStream, promises as fs } from "node:fs";
import { aptStreamConfig } from "./config.js";
import { dockerRegistry } from "@sirherobrine23/docker-registry";
import { extendsFS } from "@sirherobrine23/extends";
import { dpkg } from "@sirherobrine23/dpkg";
// Set yargs config
const terminalSize = typeof process.stdout.getWindowSize === "function" ? process.stdout.getWindowSize()[0] : null;
yargs(process.argv.slice(2)).wrap(terminalSize).version(false).help(true).alias("h", "help").strictCommands().demandCommand()
// Edit/print configs interactive mode
.command(["config", "maneger", "$0"], "Maneger config", yargs => yargs.option("config", {
string: true,
alias: "c",
type: "string",
description: "Config file path",
default: "aptStream.yml",
}).option("print", {
description: "print config in stdout and select targets to print. Default is yaml",
alias: "p",
array: false,
string: true,
choices: [
"", // if set only "--print"
"yaml", "yml", "json", // without encode
"yaml64", "yml64", "json64", // Encode in base64
"yamlhex", "ymlhex", "jsonhex", // encode in hexadecimal (hex)
],
}), async options => {
if (options.print !== undefined) {
let out = String(options.print);
if (typeof options.print === "boolean"||options.print === "") out = "yaml";
const config = new aptStreamConfig(options.config);
const target = out.startsWith("json") ? "json" : "yaml", encode = out.endsWith("64") ? "base64" : out.endsWith("hex") ? "hex" : "utf8";
return console.log((config.toString(encode, target)));
}
if (!process.stdin.isTTY) throw new Error("Run with TTY to maneger config!");
return configManeger(options.config);
})
// Sync repository packages
.command(["sync", "synchronize"], "Sync packges directly from CLI", yargs => yargs.option("config", {
string: true,
alias: "c",
type: "string",
description: "Config file path",
default: "aptStream.yml",
}).option("verbose", {
type: "boolean",
boolean: true,
description: "Enable verbose errors",
default: false,
alias: ["v", "vv", "dd"]
}), async options => {
console.log("Starting...");
const packageManeger = await packages(options.config);
let i = 0;
await packageManeger.syncRepositorys((err, db) => {
process.stdout.moveCursor(0, -1);
console.log("Packages loaded %f", i++);
if (!!err) {
if (options.verbose) return console.error(err);
return console.error(err.message || err);
}
console.log("Added %s: %s/%s (%s)", db.repositoryID, db.controlFile.Package, db.controlFile.Architecture, db.controlFile.Version);
});
console.log("End!");
return packageManeger.close();
})
// Pack debian package
.command(["pack", "pack-deb", "create", "c"], "Create package", yargs => yargs.option("package-path", {
type: "string",
string: true,
alias: "s",
default: process.cwd(),
description: "Debian package source",
}).option("output", {
type: "string",
string: true,
alias: "o",
}).option("compress", {
type: "string",
string: true,
alias: [
"data-compress",
"c"
],
description: "data.tar compress file",
default: "gzip",
choices: [
"passThrough",
"gzip",
"zst",
"xz",
]
}).option("control-compress", {
type: "string",
string: true,
description: "control.tar compress file",
alias: [
"d"
],
default: "gzip",
choices: [
"gzip",
"passThrough",
"xz"
]
}), async options => {
let debianConfig: string;
if (!(await extendsFS.exists(debianConfig = path.resolve(process.cwd(), options.packagePath, "DEBIAN"))||await extendsFS.exists(debianConfig = path.resolve(process.cwd(), options.packagePath, "debian")))) throw new Error("Create valid package Structure!");
if (!(await extendsFS.exists(path.join(debianConfig, "control")))) throw new Error("Require control file");
const control = dpkg.parseControl(await fs.readFile(path.join(debianConfig, "control")));
if (!options.output) options.output = path.join(process.cwd(), `${control.Package}_${control.Architecture}_${control.Version}.deb`); else options.output = path.resolve(process.cwd(), options.output);
const scriptsFile = (await fs.readdir(debianConfig)).filter(file => (["preinst", "prerm", "postinst", "postrm"]).includes(file));
console.log("Creating debian package");
await streamPromise.finished(dpkg.createPackage({
control,
dataFolder: path.resolve(debianConfig, ".."),
compress: {
data: options.compress as any||"gzip",
control: options.controlCompress as any||"gzip",
},
scripts: scriptsFile.reduce<dpkg.packageConfig["scripts"]>((acc, file) => {acc[file] = path.join(debianConfig, file); return acc;}, {})
}).pipe(oldFs.createWriteStream(options.output)));
console.log("File saved %O", options.output);
})
// Upload to registry
.command(["upload", "u"], "Upload package to repoitory allow uploads", yargs => yargs.strictCommands(false).option("config", {
string: true,
alias: "c",
type: "string",
description: "Config file path",
default: "aptStream.yml",
}).option("repositoryID", {
type: "string",
string: true,
alias: ["repoID", "id", "i"],
demandOption: true,
description: "Repository to upload files"
}).option("tag", {
type: "string",
string: true,
description: "Docker/Github release tag name",
alias: ["dockerTAG", "ociTAG", "oci_tag", "release_tag"]
}), async options => {
const files = options._.slice(1).map((file: string) => path.resolve(process.cwd(), file));
if (!files.length) throw new Error("Required one file to Upload");
const config = new aptStreamConfig(options.config);
if (!(config.getRepository(options.repositoryID).get(options.repositoryID)).enableUpload) throw new Error("Repository not support upload file!");
const up = await config.getRepository(options.repositoryID).uploadFile(options.repositoryID);
if (up.githubUpload) {
for (const filePath of files) {
if (!(await extendsFS.exists(filePath))) {console.error("%O not exsists!"); continue;}
const stats = await fs.lstat(filePath);
const filename = path.basename(filePath);
await streamPromise.finished(createReadStream(filePath).pipe(await up.githubUpload(filename, stats.size, options.tag)));
}
} else if (up.gdriveUpload) {
for (const filePath of files) {
if (!(await extendsFS.exists(filePath))) {console.error("%O not exsists!"); continue;}
const filename = path.basename(filePath);
await streamPromise.finished(createReadStream(filePath).pipe(await up.gdriveUpload(filename)));
}
} else if (up.ociUpload) {
for (const filePath of files) {
if (!(await extendsFS.exists(filePath))) {console.error("%O not exsists!"); continue;}
const filename = path.basename(filePath);
await streamPromise.finished(createReadStream(filePath).pipe(await up.ociUpload(filename)));
}
} else if (up.dockerUpload) {
for (const filePath of files) {
if (!(await extendsFS.exists(filePath))) {console.error("%O not exsists!"); continue;}
const { controlFile } = await dpkg.parsePackage(createReadStream(filePath));
const filename = path.basename(filePath);
const tr = await up.dockerUpload(dockerRegistry.debianArchToDockerPlatform(controlFile.Architecture));
tr.annotations.set("org.opencontainers.image.description", controlFile.Description);
tr.annotations.set("org.opencontainers.image.version", controlFile.Version);
tr.annotations.set("org.sirherobrine23.aptstream.control", JSON.stringify(controlFile));
tr.annotations.set("com.github.package.type", "aptstream_package");
await streamPromise.finished(createReadStream(filePath).pipe(tr.addEntry({
name: filename,
type: "file",
size: (await fs.lstat(filePath)).size
})));
const img_info = await tr.finalize(options.tag||controlFile.Version);
console.log("Image digest: %O", img_info.digest);
}
}
await config.saveConfig().catch(() => {});
})
// APT Server
.command(["server", "serve", "s"], "Run http Server", yargs => yargs.option("config", {
string: true,
alias: "c",
type: "string",
description: "Config file path",
default: "aptStream.yml"
}).option("port", {
number: true,
alias: "p",
type: "number",
description: "Alternative port to Run http server"
}).option("cluster", {
number: true,
type: "number",
description: "Enable cluster mode for perfomace",
alias: "t"
}).option("data", {
string: true,
alias: "C",
type: "string",
description: "data files"
}).option("db", {
string: true,
type: "string",
alias: "d",
description: "database url"
}).option("auto-sync", {
type: "boolean",
boolean: true,
alias: "z",
default: false,
description: "Enable backgroud sync packages"
}).option("disable-release-compress", {
type: "boolean",
boolean: true,
default: false,
description: "Disable Release generate Packages.gz and Packages.gz to calculate hash",
alias: "L"
}), async options => {
let packageManegerInit = new aptStreamConfig(options.config);
if (!!options.data) packageManegerInit.setDataStorage(options.data);
if (!!options.port) packageManegerInit.setPortListen(options.port);
if (!!options.db) packageManegerInit.setDatabse(options.db);
if (!!options["disable-release-compress"]) packageManegerInit.setCompressRelease("gzip", false).setCompressRelease("xz", false);
if (!!options.cluster && options.cluster > 0) packageManegerInit.setClusterForks(options.cluster);
const packageManeger = await packages(packageManegerInit);
let forks = packageManeger.getClusterForks();
if (cluster.isPrimary) {
if (!!(options.autoSync ?? options["auto-sync"])) (async () => {
while (true) {
console.info("Initing package sync!");
await packageManeger.syncRepositorys((_err, db) => {
if (!db) return;
const {repositoryID, controlFile: { Package, Architecture, Version }} = db;
console.log("Sync/Add: %s -> %s %s/%s (%s)", repositoryID, Package, Architecture, Version)
});
console.log("Next sync after 30 Minutes");
await new Promise(done => setTimeout(done, 1800000));
}
})().catch(err => {
console.info("Auto sync packages disabled!");
console.error(err);
});
if (forks > 0) {
const forkProcess = async (count = 0): Promise<number> => new Promise((done, reject) => {
const fk = cluster.fork();
return fk.on("error", err => {
console.error(err);
return reject(err);
}).on("online", () => done(fk.id)).once("exit", (code, signal) => {
count++;
if (!signal && code === 0) return console.info("Cluster %s: exited and not restarting", fk.id);
else if (count > 5) return console.warn("Cluster get max count retrys!");
console.info("Cluster %s: Catch %O, and restating with this is restating count %f", fk.id, code||signal, count);
return forkProcess(count);
});
});
for (let i = 0; i < forks; i++) await forkProcess().then(id => console.info("Cluster %s is online", id));
return
}
}
// Serve
const app = express();
app.disable("x-powered-by").disable("etag");
app.use(express.json(), (_req, res, next) => {
res.setHeader("cluster-id", String(cluster.isPrimary ? 1 : cluster.worker.id));
res.json = (body) => res.setHeader("Content-Type", "application/json").send(JSON.stringify(body, null, 2)); return next();
});
// Serve info
app.get("/", async ({res}) => {
return res.json({
cluster: cluster.worker?.id ?? 1,
sourcesCount: packageManeger.getRepositorys().length,
packagesRegistred: await packageManeger.packagesCount(),
db: packageManeger.getClientInfo(),
});
});
// Public key
app.get("/public(_key|)(|.gpg|.dearmor)", async (req, res) => res.setHeader("Content-Type", req.path.endsWith(".dearmor") ? "octect/stream" : "text/plain").send(await packageManeger.getPublicKey(req.path.endsWith(".dearmor") ? "dearmor" : "armor")));
// Get dists
app.get("/dists", async ({res}) => res.json(Array.from(new Set(packageManeger.getRepositorys().map(d => d.repositoryName)))));
app.get("/dists/:distName/info", async (req, res) => res.json(await packageManeger.repoInfo(req.params.distName)));
app.get("/dists/(:distName)(|/InRelease|/Release(.gpg)?)?", async (req, res) => {
const lowerPath = req.path.toLowerCase(), aptRoot = path.posix.resolve("/", path.posix.join(req.baseUrl, req.path), "../../../..");
let Release = await packageManeger.createRelease(req.params["distName"], aptRoot);
let releaseText: string;
if (lowerPath.endsWith("inrelease")||lowerPath.endsWith("release.gpg")) releaseText = await Release.inRelease(req.path.endsWith(".gpg") ? "clearMessage" : "sign");
else if (lowerPath.endsWith("release")) releaseText = Release.toString();
else return res.json(Release.toJSON());
return res.status(200).setHeader("Content-Type", "text/plain").setHeader("Content-Length", String(Buffer.byteLength(releaseText))).send(releaseText);
});
app.get("/dists/:distName/:componentName/binary-:Arch/Packages(.(gz|xz))?", async (req, res) => {
const { distName, componentName, Arch } = req.params;
const reqPath = req.path;
return packageManeger.createPackage(distName, componentName, Arch, path.posix.resolve("/", path.posix.join(req.baseUrl, req.path), "../../../../../.."), {
compress: reqPath.endsWith(".gz") ? "gz" : reqPath.endsWith(".xz") ? "xz" : undefined,
callback: (str) => str.pipe(res.writeHead(200, {}))
});
});
// Send package hashs
app.get("/pool", async ({res}) => res.json(await packageManeger.getPackagesHash()));
app.get("/pool/(:hash)(|/data.tar|.deb)", async (req, res) => {
const packageID = (await packageManeger.pkgQuery({"controlFile.MD5sum": req.params.hash})).at(0);
if (!packageID) return res.status(404).json({error: "Package not exist"});
if (req.path.endsWith("/data.tar")||req.path.endsWith(".deb")) {
const str = await packageManeger.getPackageStream(packageID);
if (req.path.endsWith(".deb")) return str.pipe(res.writeHead(200, {}));
return (await Debian.getPackageData(str)).pipe(res.writeHead(200, {}));
}
return res.json({...packageID.controlFile, Filename: undefined});
});
// Upload file
const uploadIDs = new Map<string, {createAt: Date, deleteAt: Date, uploading: boolean, repositoryID: string, filename: string}>();
const uploadRoute = express.Router();
app.use("/upload", uploadRoute);
uploadRoute.get("/", ({res}) => res.json({available: true}));
uploadRoute.use(expressRate({
skipSuccessfulRequests: true,
windowMs: 1000 * 60 * 40,
max: 1000,
})).post("/", async ({body, headers: { authorization }}, res) => {
if (!authorization) return res.status(401).json({error: "Require authorization/Authorization header"});
else if (!(authorization.startsWith("Bearer "))) return res.status(401).json({error: "Invalid authorization schema"});
else if (!(await packageManeger.userAs(authorization.replace("Bearer", "").trim()))) return res.status(401).json({error: "Invalid token!"});
if (!body) return res.status(400).json({error: "Required JSON or YAML to set up upload"});
const { repositoryID, control } = body as {repositoryID: string, control: Debian.debianControl};
if (!repositoryID) return res.status(400).json({error: "Required repository ID"});
if (!control) return res.status(400).json({error: "Required debian control JSON"});
const repo = packageManeger.getRepository(repositoryID).get(repositoryID);
if (!repo.enableUpload) return res.status(401).json({message: "This repository not support upload or not setup to Upload files!"});
let reqID: string;
while (true) if (!(uploadIDs.has(reqID = crypto.randomBytes(12).toString("hex")))) break;
const { Package: packageName, Architecture, Version } = control;
const createAt = new Date(), deleteAt = new Date(createAt.getTime() + (1000 * 60 * 5));
setTimeout(() => {if (uploadIDs.has(reqID)) uploadIDs.delete(reqID);}, createAt.getTime() - deleteAt.getTime())
uploadIDs.set(reqID, {
createAt, deleteAt,
repositoryID,
uploading: false,
filename: `${packageName}_${Architecture}_${Version}.deb`,
});
return res.status(201).json({
repositoryType: repo.type,
uploadID: reqID,
config: uploadIDs.get(reqID),
});
}).put("/:uploadID", async (req, res) => {
if (!(uploadIDs.has(req.params.uploadID))) return res.status(401).json({error: "Create uploadID fist!"});
if (uploadIDs.get(req.params.uploadID).uploading) return res.status(401).json({error: "Create new uploadID, this in use"});
else if (!(req.headers["content-type"].includes("application/octet-stream"))) return res.status(400).json({error: "Send octet stream file"});
else if (!(req.headers["content-length"])) return res.status(422).json({error: "Required file size"});
else if (Number(req.headers["content-length"]) < 10) return res.status(422).json({error: "The file too small!"});
uploadIDs.get(req.params.uploadID).uploading = true;
let { repositoryID, filename } = uploadIDs.get(req.params.uploadID);
try {
const up = await packageManeger.getRepository(repositoryID).uploadFile(repositoryID);
const tagName = (Array.isArray(req.query.tagName) ? req.query.tagName.at(0).toString() : req.query.tagName.toString());
if (up.githubUpload) {
if (!tagName) res.setHeader("warning", "Using latest github release tag!");
await streamPromise.finished(req.pipe(await up.githubUpload(filename, Number(req.headers["content-length"]), tagName)));
return res.status(201).json({
type: "Github release"
});
} else if (up.gdriveUpload) {
const id = (Array.isArray(req.query.id) ? req.query.id.at(0).toString() : req.query.id.toString());
await streamPromise.finished(req.pipe(await up.gdriveUpload(filename, id)));
return res.status(201).json({
type: "Google driver"
});
} else if (up.ociUpload) {
if (typeof req.query.path === "string") filename = path.posix.resolve("/", req.query.path, filename);
await streamPromise.finished(req.pipe(await up.ociUpload(filename)));
return res.status(201).json({
type: "Oracle cloud bucket",
filename
});
} else if (up.dockerUpload) {
const tar = await up.dockerUpload({
os: "linux",
architecture: req.query.arch||"generic" as any,
});
await streamPromise.finished(req.pipe(tar.addEntry({name: filename, size: Number(req.headers["content-length"])})));
return res.status(201).json({
type: "Oracle cloud bucket",
image: await tar.finalize(tagName),
});
}
return res.status(502).json({
message: "Sorry, our error was caught"
});
} finally {
uploadIDs.delete(req.params.uploadID);
}
});
app.all("*", ({res}) => res.status(404).json({message: "Page not exists"}));
app.use((err, _req, res, _next) => {
console.error(err);
return res.status(400).json({error: err?.message || String(err)});
}).listen(packageManeger.getPortListen(), function () {
const address = this.address();
console.log("Port Listen on %O", typeof address === "object" ? address.port : address);
});
}).parseAsync().catch(err => {
console.error(err);
process.exit(-1);
});
#!/usr/bin/env node
import { Config, Connect, Source, generateGPG } from "./config.js";
import { createRoute } from "./server.js";
// import yargs from "yargs";
// const terminalSize = typeof process.stdout.getWindowSize === "function" ? process.stdout.getWindowSize()[0] : null;
// yargs(process.argv.slice(2)).wrap(terminalSize).version(false).help(true).alias("h", "help").strictOptions();
const initialConfig = new Config();
const gpg = await generateGPG();
initialConfig.publicGPG = gpg.publicKey;
initialConfig.privateGPG = gpg.privateKey;
initialConfig.set("google", new Source());
const db = await Connect(initialConfig);
const app = await createRoute(db);
app.listen(3000, () => console.log("Listen on 3000"));

38
src/localFile.ts Normal file

@ -0,0 +1,38 @@
import fs from "node:fs/promises";
import { createWriteStream, createReadStream } from "node:fs";
import { Connection } from "./config.js";
import { debianControl, dpkg } from "@sirherobrine23/dpkg";
import path from "node:path";
import { extendsFS } from "@sirherobrine23/extends";
import { finished } from "node:stream/promises";
import { compressStream } from "@sirherobrine23/decompress";
export async function createPackage(db: Connection, repository: string) {
const repo = db.repoConfig.get(repository);
const packageArray = await db.packageCollection.find({ $and: Array.from(repo.keys()).map(i => ({ repositorys: [i] })) }).toArray();
const cc = packageArray.reduce<{ [k: string]: { [c: string]: debianControl[] } }>((acc, info) => {
info.repositorys.filter(info => info.repository === repository).forEach(repoID => {
acc[repo.get(repoID.origim).componentName] ??= {};
acc[repo.get(repoID.origim).componentName][info.control.Architecture] ??= [];
acc[repo.get(repoID.origim).componentName][info.control.Architecture].push(info.control);
});
return acc;
}, {});
const repositoryRoot = path.join(db.repoConfig.tmpFolder, "dists", repository);
for (const componentName in cc) {
for (const arch in cc[componentName]) {
if (!(await extendsFS.exists(path.join(repositoryRoot, componentName, arch)))) await fs.mkdir(path.join(repositoryRoot, componentName, arch), { recursive: true });
const file = path.join(repositoryRoot, componentName, arch, "packages");
const wr = createWriteStream(file);
for (const index in cc[componentName][arch]) {
const control = cc[componentName][arch][index];
if (Number(index) > 0) wr.write(Buffer.from("\n"));
await new Promise<void>((done, reject) => wr.write(dpkg.createControl(control), err => err ? reject(err) : done()));
}
wr.close();
await finished(wr);
await finished(createReadStream(file).pipe(compressStream("gzip")).pipe(createWriteStream(file + ".gz")));
await finished(createReadStream(file).pipe(compressStream("xz")).pipe(createWriteStream(file + ".xz")));
}
}
}

@ -1,43 +0,0 @@
import { formatWithOptions, InspectOptions } from "node:util";
import cluster from "node:cluster";
import expressLayer from "express/lib/router/layer.js";
// Patch promise handler to express 4.x
expressLayer.prototype.handle_request = async function handle_request_promised(...args) {
var fn = this.handle;
if (fn.length > 3) return args.at(-1)();
await Promise.resolve().then(() => fn.call(this, ...args)).catch(args.at(-1));
}
// Set default custom log to Cluster workers
if (cluster.isWorker) {
const { log, error, debug, info, warn } = console;
const { id } = cluster.worker ?? {};
const defaultOptions: InspectOptions = {
colors: true,
showHidden: false,
depth: null
};
console.clear = console.clear ?? function () {console.warn("cannot clear tty");}
console.log = function(...args) {
log("[LOG%s]: %s", id ? ` Cluster ${id}` : "", formatWithOptions(defaultOptions, ...args));
}
console.error = function(...args) {
error("[ERROR%s]: %s", id ? ` Cluster ${id}` : "", formatWithOptions(defaultOptions, ...args));
}
console.debug = function(...args) {
debug("[DEBUG%s]: %s", id ? ` Cluster ${id}` : "", formatWithOptions(defaultOptions, ...args));
}
console.info = function(...args) {
info("[INFO%s]: %s", id ? ` Cluster ${id}` : "", formatWithOptions(defaultOptions, ...args));
}
console.warn = function(...args) {
warn("[WARNING%s]: %s", id ? ` Cluster ${id}` : "", formatWithOptions(defaultOptions, ...args));
}
}

@ -1,462 +0,0 @@
import { aptStreamConfig, configJSON, repositorySource } from "./config.js";
import { decompressStream, compressStream } from "@sirherobrine23/decompress";
import { googleDriver, oracleBucket } from "@sirherobrine23/cloud";
import { extendsCrypto, extendsFS } from "@sirherobrine23/extends";
import { apt, dpkg } from "@sirherobrine23/dpkg";
import { tmpdir } from "node:os";
import { format } from "node:util";
import oldFs, { promises as fs } from "node:fs";
import coreHTTP, { Github } from "@sirherobrine23/http";
import streamPromise, { finished } from "node:stream/promises";
import dockerRegistry from "@sirherobrine23/docker-registry";
import mongoDB from "mongodb";
import openpgp from "openpgp";
import stream from "node:stream";
import crypto from "node:crypto";
import path from "node:path";
export interface dbStorage {
repositoryID: string;
restoreFile: any;
controlFile: dpkg.debianControl;
}
export interface userAuth {
createAt: Date;
username: string;
token: string[];
}
export default async function main(initConfig: string|configJSON|aptStreamConfig) {
return new Promise<packageManeger>((done, reject) => {
const pkg = new packageManeger(initConfig, (err) => {
if (err) return reject(err);
return done(pkg);
});
});
}
export class packageManeger extends aptStreamConfig {
#client: mongoDB.MongoClient;
#collection: mongoDB.Collection<dbStorage>;
#authCollection: mongoDB.Collection<userAuth>;
async close() {this.#client.close()}
constructor(initConfig: string|configJSON|aptStreamConfig, connectionCallback?: (err?: any) => void) {
connectionCallback ||= (err) => {if(err) process.emit("warning", err);}
super(initConfig);
(async () => {
const database = this.getDatabase();
const mongoClient = this.#client = await (new mongoDB.MongoClient(database.url)).connect();
mongoClient.on("error", err => console.error(err));
this.#authCollection = mongoClient.db(database.databaseName || "aptStream").collection<userAuth>("auth");
this.#collection = mongoClient.db(database.databaseName || "aptStream").collection<dbStorage>("packages");
})().then(() => connectionCallback(), err => connectionCallback(err));
}
getClientInfo() {
const connection = this.#client["topology"];
return {
connections: {
max: Number(connection.s.options.maxConnecting),
current: Number(connection.client.s.activeSessions?.size),
}
}
}
async createToken(username: string) {
let token: string;
while (true) {
token = crypto.randomBytes(8).toString("hex");
if (!(await this.#authCollection.findOne({token}))) break;
}
if (!(await this.#authCollection.findOne({username}))) await this.#authCollection.insertOne({username, createAt: new Date(), token: []});
await this.#authCollection.findOneAndUpdate({username}, {$inc: {token: token as never}});
return token;
}
async userAs(token: string) {
return !!(await this.#authCollection.findOne({token: [String(token)]}));
}
async pkgQuery(query: mongoDB.Filter<dbStorage>) {
return this.#collection.find(query).toArray();
}
async packagesCount() {
return (await this.#collection.stats()).count;
}
async getPackagesHash() {
return this.#collection.distinct("controlFile.MD5sum");
}
async repoInfo(repositoryName: string) {
const repositorys = this.getRepository(repositoryName).getAllRepositorys();
if (!repositorys.length) throw new Error("Repository or Component name not exists!");
return {
packagesCount: (await Promise.all(repositorys.map(async ({repositoryID}) => this.#collection.countDocuments({repositoryID})))).reduce((acc, count) => acc+count, 0),
sources: repositorys.length,
};
}
async createPackage(repositoryName: string, componentName: string, Arch: string, appRoot: string = "", options?: {compress?: "gz"|"xz", callback: (str: stream.Readable) => void}): Promise<{filePath: string; fileSize: number; sha512: string; sha256: string; sha1: string; md5: string;}[]> {
const repositorys = this.getRepository(repositoryName).getAllRepositorys().filter(pkg => pkg.componentName === componentName);
if (!repositorys.length) throw new Error("Repository or Component name not exists!");
const str = new stream.Readable({autoDestroy: true, emitClose: true, read(_s){}});
const gg: (Promise<{filePath: string; fileSize: number; sha512: string; sha256: string; sha1: string; md5: string;}>)[] = [];
if (typeof options?.callback === "function") (async () => options.callback(str.pipe(compressStream(options.compress === "gz" ? "gzip" : options.compress === "xz" ? "xz" : "passThrough"))))().catch(err => str.emit("error", err));
else {
async function getHash(compress?: "gz"|"xz") {
const com = stream.Readable.from(str.pipe(compressStream(compress === "gz" ? "gzip" : compress === "xz" ? "xz" : "passThrough")));
return extendsCrypto.createHashAsync(com).then(({hash, byteLength}) => ({
filePath: path.posix.join(componentName, "binary-"+Arch, "Packages"+(compress === "gz" ? ".gz" : compress === "xz" ? ".xz" : "")),
fileSize: byteLength,
sha512: hash.sha512,
sha256: hash.sha256,
sha1: hash.sha1,
md5: hash.md5,
}));
}
gg.push(getHash());
if (this.getCompressRelease("gzip")) gg.push(getHash("gz"));
if (this.getCompressRelease("xz")) gg.push(getHash("xz"));
}
(async () => {
let breakLine = false;
for (const repo of repositorys) {
let pkgs: mongoDB.WithId<dbStorage>[], page = 0;
while ((pkgs = await this.#collection.find({repositoryID: repo.repositoryID, "controlFile.Architecture": Arch}).skip(page).limit(2500).toArray()).length > 0) {
page += pkgs.length;
for (const {controlFile: pkg} of pkgs) {
let pkgHash: string;
if (!(pkgHash = pkg.MD5sum)) continue;
if (breakLine) str.push("\n\n"); else breakLine = true;
str.push(dpkg.createControl({
...pkg,
Filename: path.posix.join("/", appRoot, "pool", `${pkgHash}.deb`).slice(1),
}));
}
}
}
str.push(null);
})().catch(err => str.emit("error", err));
return Promise.all(gg);
}
async createRelease(repositoryName: string, appRoot: string) {
const source = this.getRepository(repositoryName);
const repositorys = source.getAllRepositorys();
const releaseDate = (new Date()).toUTCString();
const Architectures = await this.#collection.distinct("controlFile.Architecture", {repositoryID: {$in: repositorys.map(a => a.repositoryID)}});
const Components = Array.from(new Set(repositorys.map(rpm => rpm.componentName)));
const MD5Sum = new Set<{hash: string, size: number, path: string}>();
const SHA1 = new Set<{hash: string, size: number, path: string}>();
const SHA256 = new Set<{hash: string, size: number, path: string}>();
const SHA512 = new Set<{hash: string, size: number, path: string}>();
await Promise.all(Architectures.map(async arch => Promise.all(Components.map(async comp => this.createPackage(repositoryName, comp, arch, appRoot).then(res => res.forEach(({fileSize, filePath, md5, sha1, sha256, sha512}) => {
MD5Sum.add({size: fileSize, path: filePath, hash: md5});
SHA1.add({size: fileSize, path: filePath, hash: sha1});
SHA256.add({size: fileSize, path: filePath, hash: sha256});
SHA512.add({size: fileSize, path: filePath, hash: sha512});
}), err => console.log(err))))));
const toJSON = () => {
if ((!Architectures.length) && (!Components.length)) throw new Error("Invalid config repository or not loaded to database!");
const data = {
Date: releaseDate,
acquireByHash: false,
Codename: source.getCodename(),
Suite: source.getSuite(),
Origin: source.getOrigin(),
Label: source.getLabel(),
Description: source.getDescription(),
Architectures,
Components,
MD5Sum: Array.from(MD5Sum.values()).sort((a, b) => b.size - a.size),
SHA1: Array.from(SHA1.values()).sort((a, b) => b.size - a.size),
SHA256: Array.from(SHA256.values()).sort((a, b) => b.size - a.size),
SHA512: Array.from(SHA512.values()).sort((a, b) => b.size - a.size),
};
if (!data.Architectures.length) throw new Error("Require one packages loaded to database!");
return data;
}
const toString = () => {
const reljson = toJSON();
let configString: string[] = [
"Date: "+(reljson.Date),
"Acquire-By-Hash: no",
"Architectures: "+(reljson.Architectures.join(" ")),
"Components: "+(reljson.Components.join(" ")),
];
if (reljson.Codename) configString.push(`Codename: ${reljson.Codename}`);
if (reljson.Suite) configString.push(`Suite: ${reljson.Suite}`);
if (reljson.Origin) configString.push(`Origin: ${reljson.Origin}`);
if (reljson.Label) configString.push(`Label: ${reljson.Label}`);
if (reljson.Description) configString.push(`Description: ${reljson.Description}`);
const insertHash = (name: string, hashes: typeof reljson.MD5Sum) => {
configString.push(name+":");
const sizeLength = hashes.at(0).size.toString().length+2;
for (const data of hashes) configString.push((" "+data.hash + " "+(Array(Math.max(1, Math.abs(sizeLength - (data.size.toString().length)))).fill("").join(" ")+(data.size.toString()))+" "+data.path))
}
if (reljson.MD5Sum.length > 0) insertHash("MD5Sum", reljson.MD5Sum);
if (reljson.SHA1.length > 0) insertHash("SHA1", reljson.SHA1);
if (reljson.SHA256.length > 0) insertHash("SHA256", reljson.SHA256);
if (reljson.SHA512.length > 0) insertHash("SHA512", reljson.SHA512);
return configString.join("\n");
}
const inRelease = async (type: "sign"|"clearMessage" = "sign"): Promise<string> => {
if (!(source.getCodename()||source.getSuite())) throw new Error("Required Suite or Codename to create InRelease file");
else if (!(MD5Sum.size||SHA256.size)) throw new Error("Require MD5 or SHA256 to create InRelease file");
const gpgSign = this.getPGPKey();
const privateKey = gpgSign.gpgPassphrase ? await openpgp.decryptKey({privateKey: await openpgp.readPrivateKey({ armoredKey: gpgSign.privateKey.keyContent }), passphrase: gpgSign.gpgPassphrase}) : await openpgp.readPrivateKey({ armoredKey: gpgSign.privateKey.keyContent });
const text = toString();
if (type === "clearMessage") return Buffer.from(await openpgp.sign({
signingKeys: privateKey,
format: "armored",
message: await openpgp.createMessage({text})
}) as any).toString("utf8");
return openpgp.sign({
signingKeys: privateKey,
format: "armored",
message: await openpgp.createCleartextMessage({text})
});
}
return {
toJSON,
toString,
inRelease
}
}
async getPackageStream(packageTarget: dbStorage) {
const source = this.getRepository(packageTarget.repositoryID).get(packageTarget.repositoryID);
if (!source) throw new Error("Package Source no more avaible please sync packages!");
let saveCache: string;
if (await this.getDataStorage()) {
const cacheFolder = path.join(await this.getDataStorage(), "deb_cache");
if (!(await extendsFS.exists(cacheFolder))) await fs.mkdir(cacheFolder, {recursive: true});
const { MD5sum, SHA1, SHA256, SHA512 } = packageTarget.controlFile;
for (const hash of ([MD5sum, SHA1, SHA256, SHA512])) {
if (!hash) continue
const filePath = path.join(cacheFolder, `${hash}.deb`);
if (await extendsFS.exists(filePath)) return oldFs.createReadStream(filePath);
else if (!saveCache) saveCache = filePath;
}
}
if (source.type === "http") {
const { url, auth: { header: headers, query } } = source;
return coreHTTP.streamRequest(url, {headers, query}).then(src => {
if (saveCache) src.pipe(oldFs.createWriteStream(saveCache));
return stream.Readable.from(src);
});
} else if (source.type === "mirror") {
const { debUrl } = packageTarget.restoreFile;
return coreHTTP.streamRequest(debUrl).then(src => {
if (saveCache) src.pipe(oldFs.createWriteStream(saveCache));
return stream.Readable.from(src);
});
} else if (source.type === "github") {
const { token } = source, { url } = packageTarget.restoreFile;
return coreHTTP.streamRequest(url, {headers: token ? {"Authorization": "token "+token} : {}}).then(src => {
if (saveCache) src.pipe(oldFs.createWriteStream(saveCache));
return stream.Readable.from(src);
});
} else if (source.type === "oracleBucket") {
const { authConfig } = source, { restoreFile: { path } } = packageTarget;
const bucket = await oracleBucket.oracleBucket(authConfig);
return bucket.getFileStream(path).then(src => {
if (saveCache) src.pipe(oldFs.createWriteStream(saveCache));
return stream.Readable.from(src);
});
} else if (source.type === "googleDriver") {
const { clientId, clientSecret, clientToken } = source, { restoreFile: { id } } = packageTarget;
const gdrive = await googleDriver.GoogleDriver({authConfig: {clientID: clientId, clientSecret, token: clientToken, redirectURL: "http://localhost", authUrlCallback(){throw new Error("Set up fist")}, tokenCallback() {}}});
return gdrive.getFileStream(id).then(src => {
if (saveCache) src.pipe(oldFs.createWriteStream(saveCache));
return stream.Readable.from(src);
});
} else if (source.type === "docker") {
const { image, auth } = source, { ref, path: debPath } = packageTarget.restoreFile;
const registry = new dockerRegistry.v2(image, auth);
return new Promise<stream.Readable>((done, reject) => registry.extractLayer(ref).then(tar => tar.on("error", reject).on("File", entry => entry.path === debPath ? done(entry.stream) : null))).then(src => {
if (saveCache) src.pipe(oldFs.createWriteStream(saveCache));
return stream.Readable.from(src);
});
}
throw new Error("Check package type");
}
async addPackage(repositoryID: string, control: dpkg.debianControl, restore: any): Promise<dbStorage> {
if (Boolean(await this.#collection.findOne({
repositoryID,
"controlFile.Package": control.Package,
"controlFile.Version": control.Version,
"controlFile.Architecture": control.Architecture
}))) {
const { Package, Architecture, Version } = control;
throw new Error(format("%s -> %s/%s (%s) are exists in database", repositoryID, Package, Architecture, Version));
}
await this.#collection.insertOne({
repositoryID,
restoreFile: restore,
controlFile: control
});
return {
repositoryID,
restoreFile: restore,
controlFile: control
};
}
async syncRepositorys(callback?: (error?: any, dbStr?: dbStorage) => void) {
const sources = this.getRepositorys().map(({repositoryManeger}) => repositoryManeger.getAllRepositorys()).flat(2);
const toDelete = (await this.#collection.distinct("repositoryID")).filter(key => !sources.find(d => d.repositoryID === key));
if (toDelete.length > 0) await this.#collection.deleteMany({repositoryID: toDelete});
for (const repo of sources) await this.registerSource(repo.repositoryID, repo, callback);
return toDelete;
}
async registerSource(repositoryID: string, target: repositorySource, callback?: (error?: any, dbStr?: dbStorage) => void) {
callback ??= (_void1, _void2) => {};
if (target.type === "http") {
try {
const control = (await dpkg.parsePackage(await coreHTTP.streamRequest(target.url, {headers: target.auth?.header, query: target.auth?.query}))).controlFile;
callback(null, await this.addPackage(repositoryID, control, {}));
} catch (err) {
callback(err, null);
}
} else if (target.type === "oracleBucket") {
const { authConfig, path = [] } = target;
const bucket = await oracleBucket.oracleBucket(authConfig);
try {
if (path.length === 0) path.push(...((await bucket.listFiles()).filter(k => k.name.endsWith(".deb")).map(({name}) => name)));
for (const file of path) {
const control = (await dpkg.parsePackage(await bucket.getFileStream(file))).controlFile;
callback(null, await this.addPackage(repositoryID, control, {path: file}));
}
} catch (err) {
callback(err, null);
}
} else if (target.type === "googleDriver") {
const { clientId, clientSecret, clientToken, gIDs = [] } = target;
const gdrive = await googleDriver.GoogleDriver({authConfig: {clientID: clientId, clientSecret, token: clientToken, redirectURL: "http://localhost", authUrlCallback(){throw new Error("Set up fist")}, tokenCallback() {}}});
if (gIDs.length === 0) gIDs.push(...((await gdrive.listFiles()).filter(rel => rel.name.endsWith(".deb")).map(({id}) => id)));
for (const file of gIDs) {
try {
const control = (await dpkg.parsePackage(await gdrive.getFileStream(file))).controlFile;
callback(null, await this.addPackage(repositoryID, control, {id: file}));
} catch (err) {
callback(err, null);
}
}
} else if (target.type === "github") {
const { owner, repository, token } = target;
const gh = await Github.repositoryManeger(owner, repository, {token});
if (target.subType === "branch") {
const { branch = (await gh.repository.listBranchs()).at(0)?.name ?? "main" } = target;
for (const { path: filePath } of (await gh.git.getTree(branch)).tree.filter(file => file.type === "tree" ? false : (file.size > 10) && file.path.endsWith(".deb"))) {
try {
const rawURL = new URL(path.posix.join(owner, repository, branch, filePath), "https://raw.githubusercontent.com");
const control = (await dpkg.parsePackage(gh.git.getRawFile(branch, filePath))).controlFile;
callback(null, await this.addPackage(repositoryID, control, {url: rawURL.toString()}));
} catch (err) {
callback(err, null);
}
}
} else {
const { tag = [] } = target;
if (!tag.length) tag.push(...((await gh.release.getRelease()).map(d => d.tag_name)));
for (const tagName of tag) {
try {
const assets = (await gh.release.getRelease(tagName)).assets.filter(({name}) => name.endsWith(".deb"));
for (const asset of assets) {
const control = (await dpkg.parsePackage(await coreHTTP.streamRequest(asset.browser_download_url, {headers: token ? {Authorization: `token ${token}`} : {}}))).controlFile;
callback(null, await this.addPackage(repositoryID, control, {url: asset.browser_download_url}));
}
} catch (err) {
callback(err, null);
}
}
}
} else if (target.type === "docker") {
const { image, auth, tags = [] } = target;
const registry = new dockerRegistry.v2(image, auth);
if (tags.length === 0) {
const { sha256, tag } = registry.image;
if (sha256) tags.push(sha256);
else if (tag) tags.push(tag);
else tags.push(...((await registry.getTags()).reverse().slice(0, 6)));
}
for (const tag of tags) {
const manifestManeger = new dockerRegistry.Utils.Manifest(await registry.getManifets(tag), registry);
const addPckage = async () => {
for (const layer of manifestManeger.getLayers()) {
const blob = await registry.extractLayer(layer.digest);
blob.on("error", err => callback(err, null)).on("entry", async (entry, str, next) => {
next();
if (!(entry.name.endsWith(".deb"))) return null;
try {
const control = (await dpkg.parsePackage(stream.Readable.from(str))).controlFile;
callback(null, await this.addPackage(repositoryID, control, {ref: layer.digest, path: entry.path}));
} catch (err) {callback(err, null);}
});
await finished(blob);
}
}
if (manifestManeger.multiArch) {
for (const platform of manifestManeger.platforms) {
await manifestManeger.setPlatform(platform as any);
await addPckage();
}
} else await addPckage();
}
} else if (target.type === "mirror") {
const { config = [] } = target;
const readFile = (path: string, start: number, end: number) => new Promise<Buffer>((done, reject) => {
let buf: Buffer[] = [];
oldFs.createReadStream(path, { start, end }).on("error", reject).on("data", (data: Buffer) => buf.push(data)).on("close", () => {done(Buffer.concat(buf)); buf = null;});
});
for (const aptSrc of config.filter(d => d.type === "packages")) {
const main_url = new URL(aptSrc.src);
const distMain = new URL(path.posix.join(main_url.pathname, "dists", aptSrc.distname), main_url);
const release = apt.parseRelease(await coreHTTP.bufferRequestBody(distMain.toString()+"/InRelease").then(async data => (await openpgp.readCleartextMessage({cleartextMessage: data.toString()})).getText()).catch(() => coreHTTP.bufferRequestBody(distMain.toString()+"/Release").then(data => data.toString())));
for (const Component of release.Components) for (const Arch of release.Architectures.filter(arch => arch !== "all")) {
for (const ext of (["", ".gz", ".xz"])) {
const mainReq = new URL(path.posix.join(distMain.pathname, Component, `binary-${Arch}`, `Packages${ext}`), distMain);
const tmpFile = (path.join(tmpdir(), Buffer.from(mainReq.toString(), "utf8").toString("hex")))+".package";
try {
await streamPromise.finished((await coreHTTP.streamRequest(mainReq)).pipe(decompressStream()).pipe(oldFs.createWriteStream(tmpFile)));
const packagesLocation: {start: number, end: number}[] = [];
let start: number = 0, currentChuck = 0;
await streamPromise.finished(oldFs.createReadStream(tmpFile).on("data", (chunk: Buffer) => {
for (let i = 0; i < chunk.length; i++) if ((chunk[i - 1] === 0x0A) && (chunk[i] === 0x0A)) {
packagesLocation.push({
start,
end: i + currentChuck,
});
start = (i + currentChuck)+1;
}
currentChuck += Buffer.byteLength(chunk, "binary");
}));
for (const { start, end } of packagesLocation) {
const control = dpkg.parseControl(await readFile(tmpFile, start, end));
callback(null, await this.addPackage(repositoryID, control, {
debUrl: (new URL(path.posix.join(main_url.pathname, control.Filename), main_url)).toString()
}));
}
await fs.rm(tmpFile);
break;
} catch (err) {
callback(err, null);
}
}
}
}
}
}
}

13
src/patchExpress.ts Normal file

@ -0,0 +1,13 @@
/**
* Fix Promises catch to express send correct error to client and dont crash server
*/
import expressLayer from "express/lib/router/layer.js";
expressLayer.prototype.handle_request = async function handle_request_promised(...args) {
var fn = this.handle;
if (fn.length > 3) return args.at(-1)();
try {
await fn(...args);
} catch (err) {
args.at(-1)(err);
}
}

164
src/server.ts Normal file

@ -0,0 +1,164 @@
import { extendsFS } from "@sirherobrine23/extends";
import express from "express";
import cluster from "node:cluster";
import crypto from "node:crypto";
import { createWriteStream } from "node:fs";
import fs from "node:fs/promises";
import path from "node:path";
import stream from "node:stream";
import { finished } from "node:stream/promises";
import { parse } from "node:url";
import { Connection } from "./config.js";
import "./patchExpress.js";
import { http } from "@sirherobrine23/http";
import { googleDriver, oracleBucket } from "@sirherobrine23/cloud";
const Range = (max: number) => {
if (max < 0 || isNaN(max)) return new stream.PassThrough();
return new stream.Transform({
write(chunk, encoding, callback) {
if (!(Buffer.isBuffer(chunk))) chunk = Buffer.from(chunk, encoding);
this.push(chunk.subarray(Math.max(0, max)));
max += Buffer.byteLength(chunk);
if (0 <= max) this.push(null);
callback();
},
});
}
export async function createRoute(configManeger: Connection) {
const app = express();
app.disable("x-powered-by").disable("etag");
app.use(express.json(), (_req, res, next) => {
res.setHeader("cluster-id", String(cluster.isPrimary ? 1 : cluster.worker.id));
res.json = (body) => res.setHeader("Content-Type", "application/json").send(JSON.stringify(body, null, 2)); return next();
});
// Public key
app.get("/public(_key|)(|.gpg|.dearmor)", async (req, res) => {
if (!configManeger.repoConfig.publicGPG) return res.status(400).json({ error: "GPG Key is disabled" });
return res.setHeader("Content-Type", req.path.endsWith(".dearmor") ? "octect/stream" : "text/plain").send(await configManeger.repoConfig.getPulicKey(req.path.endsWith(".dearmor") ? "dearmor" : "armor"));
});
app.get("/dists/(:distName)(|/InRelease|/Release(.gpg)?)?", (req, res) => {
const { distName } = req.params;
if (!(configManeger.repoConfig.has(distName))) return res.status(404).json({ error: "Ditribuition not exist" });
return res.json({ distName });
});
app.get("/dists/:distName/:componentName/binary-:Arch/Packages(.(gz|xz))?", (req, res) => {
const { distName, componentName, Arch } = req.params;
const compression = req.path.endsWith(".gz") ? "gzip" : req.path.endsWith(".xz") ? "lzma" : "none";
if (!(configManeger.repoConfig.has(distName))) return res.status(404).json({ error: "Ditribuition not exist" });
const sources = configManeger.repoConfig.get(distName).toArray().filter(info => info.componentName === componentName);
if (!sources.length) return res.status(404).json({ error: "This component not exists" });
return res.json({
distName,
componentName,
Arch,
compression
});
});
app.post("/pool/upload", async (req, res) => {
const { repository, destID } = (req.body || {});
if (!(configManeger.repoConfig.has(repository))) return res.status(400).json({ error: "Add valid repository name" });
else if (!(configManeger.repoConfig.get(repository).has(destID))) return res.status(400).json({ error: "Add valid source id" });
else if (!(configManeger.repoConfig.get(repository).get(destID).enableUpload)) return res.status(401).json({ error: "the source has upload disabled or not supported" });
const ID = crypto.randomUUID(), token = ([
crypto.randomBytes(4).toString("hex"),
crypto.randomBytes(crypto.randomInt(4, 16)).toString("hex"),
crypto.randomBytes(crypto.randomInt(2, 8)).toString("hex"),
]).join("-");
let filePath: string;
while (true) {
filePath = path.join(configManeger.repoConfig.tmpFolder, crypto.randomBytes(crypto.randomInt(8, 16)).toString("hex"));
if (!(await extendsFS.exists(filePath))) break;
}
await fs.writeFile(filePath, ""); // Touch file
await configManeger.uploadCollection.insertOne({
ID,
repository,
destID,
validAt: Date.now() + 1000 * 60 * 60 * 30,
token,
filePath,
});
return res.setHeader("Location", path.posix.join(parse(req.url).pathname, ID)).status(201).json({
token,
ID
});
});
/**
* PUT data to file to package to later upload to Dest
*
* to add data Set `Content-Range` and `Content-Type: application/octet-stream` to Upload
* to submit, delete this headers up
*/
app.put("/pool/upload/:sessionID", async (req, res) => {
const { sessionID } = req.params;
const info = await configManeger.uploadCollection.findOne({ ID: sessionID });
const isPut = (req.headers["content-type"]||"").startsWith("application/octet-stream");
if (!info) return res.status(400).json({ error: "Require upload ID" });
else if (req.headers.authorization.slice(5).trim() !== info.token) return res.status(400).json({ error: "invalid token" });
else if (isPut && !(req.headers["content-range"])) return res.status(400).json({ error: "set Content-Range to put file" });
if (isPut) {
if (req.headers["content-range"].startsWith("bytes ")) req.headers["content-range"] = req.headers["content-range"].slice(5).trim();
if (req.headers["content-range"].trim() === "*") req.headers["content-range"] = "0";
const [start, _end] = req.headers["content-range"].split("-"), [end] = _end.split("/");
if (Number(end) < Number(start)) return res.status(400).json({ error: "Require file more that " + start })
await finished(req.pipe(Range(Number(end || -1))).pipe(createWriteStream(info.filePath, { start: Number(start) })));
await configManeger.uploadCollection.findOneAndUpdate({ ID: sessionID }, { $set: { validAt: 1000 * 60 * 60 * 30 } })
return res.status(202).end();
}
const upload = await configManeger.repoConfig.get(info.repository).uploadFile(info.destID, info.filePath);
await fs.rm(info.filePath, { force: true });
await configManeger.uploadCollection.findOneAndDelete({ ID: info.ID });
return res.setHeader("Location", `/pool/${upload.controlFile.MD5sum}.deb`).status(201).end();
});
app.get("/pool/(:packageHASH)(|.deb)?", async (req, res, next) => {
const download = req.path.endsWith(".deb"), { packageHASH } = req.params;
const info = await configManeger.packageCollection.findOne({ $or: [{ "control.MD5sum": packageHASH }, { "control.SHA1": packageHASH }, { "control.SHA256": packageHASH }, { "control.SHA512": packageHASH }] });
if (!info) return res.status(404).json({ error: "Package not registred" });
else if (!download) return res.json(info.control);
const origem = info.repositorys.find(info => configManeger.repoConfig.has(info.repository) && configManeger.repoConfig.get(info.repository).has(info.origim));
if (!origem) return res.status(400).json({ error: "Cannot get origem source" });
const src = configManeger.repoConfig.get(origem.repository).get(origem.origim);
if (src.type === "http") {
return http.streamRequest(src.url, {
query: src.query,
headers: src.header
}).then(src => src.pipe(res)).catch(next);
} else if (src.type === "github") {
const download: { url: string } = info.restoreFile;
return http.streamRequest(download.url, {
headers: src.token ? { Authorization: `token ${src.token}` } : {},
query: { token: src.token },
}).then(src => src.pipe(res)).catch(next);
} else if (src.type === "mirror") {
const download: { url: string } = info.restoreFile;
return http.streamRequest(download.url).then(src => src.pipe(res)).catch(next);
} else if (src.type === "oracleBucket") {
const download: { filePath: string } = info.restoreFile;
const oci = await oracleBucket.oracleBucket(src.authConfig);
return oci.getFileStream(download.filePath).then(src => src.pipe(res)).catch(next);
} else if (src.type === "googleDriver") {
const download: { id: string } = info.restoreFile;
const gdrive = await googleDriver.GoogleDriver({ oauth: await googleDriver.createAuth({ clientID: src.clientId, clientSecret: src.clientSecret, token: src.clientToken, authUrlCallback: () => { throw new Error("Auth disabled"); }, tokenCallback: () => { }, redirectURL: null }) });
return gdrive.getFileStream(download.id).then(src => src.pipe(res)).catch(next);
} else if (src.type === "docker") {
throw new Error("CLOSE");
} else return res.status(404).json({ error: "Source origem is unknown" });
});
return app;
}

@ -13,7 +13,6 @@
"noFallthroughCasesInSwitch": true,
"skipLibCheck": true,
"allowJs": true,
"composite": true,
"lib": [
"ESNext",
"ES7"