WIP: Migrate dpkg package #36
24
package.json
24
package.json
@ -46,25 +46,25 @@
|
||||
"devDependencies": {
|
||||
"@types/express": "^4.17.17",
|
||||
"@types/inquirer": "^9.0.3",
|
||||
"@types/node": "^18.16.3",
|
||||
"@types/node": "^20.3.2",
|
||||
"@types/yargs": "^17.0.24",
|
||||
"ts-node": "^10.9.1",
|
||||
"typescript": "^5.0.4"
|
||||
"typescript": "^5.1.6"
|
||||
},
|
||||
"dependencies": {
|
||||
"@sirherobrine23/cloud": "^3.6.8",
|
||||
"@sirherobrine23/decompress": "^3.6.8",
|
||||
"@sirherobrine23/docker-registry": "^3.6.8",
|
||||
"@sirherobrine23/dpkg": "^3.6.8",
|
||||
"@sirherobrine23/extends": "^3.6.8",
|
||||
"@sirherobrine23/http": "^3.6.8",
|
||||
"@sirherobrine23/cloud": "^3.6.12",
|
||||
"@sirherobrine23/decompress": "^3.6.12",
|
||||
"@sirherobrine23/docker-registry": "^3.6.12",
|
||||
"@sirherobrine23/dpkg": "^3.6.12",
|
||||
"@sirherobrine23/extends": "^3.6.12",
|
||||
"@sirherobrine23/http": "^3.6.12",
|
||||
"express": "^4.18.2",
|
||||
"express-rate-limit": "^6.7.0",
|
||||
"inquirer": "^9.2.1",
|
||||
"inquirer": "^9.2.7",
|
||||
"inquirer-file-tree-selection-prompt": "^2.0.5",
|
||||
"mongodb": "^5.4.0",
|
||||
"openpgp": "^5.8.0",
|
||||
"yaml": "^2.2.2",
|
||||
"mongodb": "^5.6.0",
|
||||
"openpgp": "^5.9.0",
|
||||
"yaml": "^2.3.1",
|
||||
"yargs": "^17.7.2"
|
||||
}
|
||||
}
|
||||
|
724
src/config.ts
724
src/config.ts
@ -1,16 +1,18 @@
|
||||
import { googleDriver, oracleBucket } from "@sirherobrine23/cloud";
|
||||
import { extendsFS } from "@sirherobrine23/extends";
|
||||
import { Github } from "@sirherobrine23/http";
|
||||
import { apt } from "@sirherobrine23/dpkg";
|
||||
import oldFs, { promises as fs } from "node:fs";
|
||||
import dockerRegistry from "@sirherobrine23/docker-registry";
|
||||
import openpgp from "openpgp";
|
||||
import { apt, dpkg } from "@sirherobrine23/dpkg";
|
||||
import { extendsCrypto } from "@sirherobrine23/extends";
|
||||
import { Github } from "@sirherobrine23/http";
|
||||
import { Collection, Db, MongoClient } from "mongodb";
|
||||
import crypto from "node:crypto";
|
||||
import stream from "node:stream";
|
||||
import oldFs, { createReadStream } from "node:fs";
|
||||
import { tmpdir } from "node:os";
|
||||
import path from "node:path";
|
||||
import { finished } from "node:stream/promises";
|
||||
import openpgp from "openpgp";
|
||||
import yaml from "yaml";
|
||||
|
||||
export type repositorySource = {
|
||||
export type repositorySouce = {
|
||||
/**
|
||||
* Dist component
|
||||
* @default main
|
||||
@ -23,12 +25,10 @@ export type repositorySource = {
|
||||
enableUpload?: boolean;
|
||||
} & ({
|
||||
type: "http",
|
||||
url: string | URL,
|
||||
enableUpload?: false;
|
||||
url: string,
|
||||
auth?: {
|
||||
header?: { [key: string]: string },
|
||||
query?: { [key: string]: string }
|
||||
}
|
||||
} | {
|
||||
type: "mirror",
|
||||
enableUpload?: false;
|
||||
@ -55,7 +55,7 @@ export type repositorySource = {
|
||||
} | {
|
||||
subType: "branch",
|
||||
enableUpload?: false;
|
||||
branch: string,
|
||||
branch: string[],
|
||||
}) | {
|
||||
type: "googleDriver",
|
||||
|
||||
@ -74,6 +74,9 @@ export type repositorySource = {
|
||||
*/
|
||||
clientToken: googleDriver.googleCredential,
|
||||
|
||||
/** Folder id to add files upload */
|
||||
uploadFolderID?: string;
|
||||
|
||||
/**
|
||||
* Files or Folders ID's
|
||||
*/
|
||||
@ -86,6 +89,9 @@ export type repositorySource = {
|
||||
*/
|
||||
authConfig: oracleBucket.oracleOptions,
|
||||
|
||||
/** Folder to upload files if enabled */
|
||||
uploadFolderPath?: string;
|
||||
|
||||
/**
|
||||
* Files or Folders path
|
||||
*/
|
||||
@ -97,419 +103,174 @@ export type repositorySource = {
|
||||
tags?: string[]
|
||||
});
|
||||
|
||||
export interface repositorySources {
|
||||
export type SourceJson = {
|
||||
Description?: string;
|
||||
Codename?: string;
|
||||
Suite?: string;
|
||||
Origin?: string;
|
||||
Label?: string;
|
||||
sources: {
|
||||
[key: string]: repositorySource;
|
||||
repositorys?: {
|
||||
[src: string]: repositorySouce
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export class Repository extends Map<string, repositorySource> {
|
||||
#Description?: string;
|
||||
setDescription(value: string) {this.#Description = value; return this;}
|
||||
getDescription() {return this.#Description}
|
||||
|
||||
#Codename?: string;
|
||||
setCodename(value: string) {this.#Codename = value; return this;}
|
||||
getCodename() {return this.#Codename}
|
||||
|
||||
#Suite?: string;
|
||||
setSuite(value: string) {this.#Suite = value; return this;}
|
||||
getSuite() {return this.#Suite}
|
||||
|
||||
#Origin?: string;
|
||||
setOrigin(value: string) {this.#Origin = value; return this;}
|
||||
getOrigin() {return this.#Origin}
|
||||
|
||||
#Label?: string;
|
||||
setLabel(value: string) {this.#Label = value; return this;}
|
||||
getLabel() {return this.#Label}
|
||||
|
||||
constructor(src?: repositorySources) {
|
||||
export class Source extends Map<string, repositorySouce> {
|
||||
constructor(src: SourceJson = {}) {
|
||||
super();
|
||||
if (src) {
|
||||
if (Array.isArray(src["source"])) {
|
||||
console.warn("Migrating old repository to new Version");
|
||||
const aptConfig = src["aptConfig"] || {};
|
||||
this.#Description = aptConfig.Description;
|
||||
this.#Codename = aptConfig.Codename;
|
||||
this.#Origin = aptConfig.Origin;
|
||||
this.#Suite = aptConfig.Suite;
|
||||
this.#Label = aptConfig.Label;
|
||||
const old: any[] = src["source"];
|
||||
old.forEach(repo => {try {repo.type = repo.type.replace(/_([A-Z])/, (_sub, key: string) => key.toUpperCase()) as any; this.set(repo.id, repo as any)} catch {}});
|
||||
return;
|
||||
}
|
||||
this.#Description = src.Description;
|
||||
this.#Codename = src.Codename;
|
||||
this.#Origin = src.Origin;
|
||||
this.#Suite = src.Suite;
|
||||
this.#Label = src.Label;
|
||||
src.sources ||= {};
|
||||
for (const key in src.sources) {
|
||||
try {this.set(key, src.sources[key]);} catch {}
|
||||
if (!src) return;
|
||||
const {
|
||||
Codename,
|
||||
Description,
|
||||
Label,
|
||||
Origin,
|
||||
Suite,
|
||||
repositorys = {}
|
||||
} = src;
|
||||
|
||||
this.Description = Description;
|
||||
this.Codename = Codename;
|
||||
this.Suite = Suite;
|
||||
this.Origin = Origin;
|
||||
this.Label = Label;
|
||||
Object.keys(repositorys).forEach(key => this.set(key, repositorys[key]));
|
||||
}
|
||||
|
||||
Description?: string;
|
||||
Codename?: string;
|
||||
Suite?: string;
|
||||
Origin?: string;
|
||||
Label?: string;
|
||||
|
||||
toJSON() {
|
||||
return Array.from(this.keys()).reduce<SourceJson>((acc, key) => {
|
||||
acc.repositorys[key] = this.get(key);
|
||||
return acc;
|
||||
}, {
|
||||
Description: this.Description,
|
||||
Codename: this.Codename,
|
||||
Suite: this.Suite,
|
||||
Label: this.Label,
|
||||
Origin: this.Origin,
|
||||
repositorys: {}
|
||||
});
|
||||
}
|
||||
|
||||
toArray(): (repositorySouce & { id: string })[] {
|
||||
return Array.from(this.keys()).map(id => ({ id, ...(this.get(id)) }));
|
||||
}
|
||||
|
||||
/**
|
||||
* Add new repository source
|
||||
* Add new source origin to Repository and check if valid config, else throw Error.
|
||||
*
|
||||
* @param key - Repository ID
|
||||
* @param repo - Source config
|
||||
* @returns
|
||||
* @param srcID - optional ID to source
|
||||
* @param value - Repository source to Repository
|
||||
*/
|
||||
set(key: string, repo: repositorySource) {
|
||||
if (this.has(key)) throw new Error("ID are exists");
|
||||
if (repo["id"]) delete repo["id"];
|
||||
if (repo.type === "http") {
|
||||
if (!repo.url) throw new Error("Required URL to add this source");
|
||||
else {
|
||||
if (!(Object.keys(repo.auth?.header||{}).length) && repo.auth?.header) delete repo.auth.header;
|
||||
if (!(Object.keys(repo.auth?.query||{}).length) && repo.auth?.query) delete repo.auth.query;
|
||||
}
|
||||
if (!(Object.keys(repo.auth||{}).length) && repo.auth) delete repo.auth;
|
||||
repo.enableUpload = false;
|
||||
} else if (repo.type === "mirror") {
|
||||
if (!repo.config) throw new Error("Require Mirror sources");
|
||||
else if (!((repo.config = repo.config.filter(at => at.type === "packages" && at.distname?.trim?.() && at.src?.trim?.())).length)) throw new Error("To mirror the repository you only need a source");
|
||||
repo.enableUpload = false;
|
||||
} else if (repo.type === "github") {
|
||||
if (!(repo.owner && repo.repository)) throw new Error("github Sources require owner and repository");
|
||||
if (!repo.token) delete repo.token;
|
||||
if (repo.subType === "release") {
|
||||
if (!(repo.tag?.length)) delete repo.tag;
|
||||
} else if (repo.subType === "branch") {
|
||||
if (!(repo.branch)) delete repo.branch;
|
||||
repo.enableUpload = false;
|
||||
} else throw new Error("invalid github source");
|
||||
} else if (repo.type === "googleDriver") {
|
||||
if (!(repo.clientId && repo.clientSecret && (typeof repo.clientToken?.access_token === "string" && repo.clientToken.access_token.trim().length > 0))) throw new Error("Invalid settings to Google oAuth");
|
||||
if (!(repo.gIDs?.length)) delete repo.gIDs;
|
||||
} else if (repo.type === "oracleBucket") {
|
||||
if (!repo.authConfig) throw new Error("Required auth config to Oracle bucket");
|
||||
if (repo.authConfig.auth) {
|
||||
if (Array.isArray(repo.authConfig.auth)) {
|
||||
if (!(repo.authConfig.auth.length)) throw new Error("Require auth to Oracle Cloud");
|
||||
const backup = repo.authConfig.auth.slice(0, 2);
|
||||
set(srcID: string | undefined, value: repositorySouce) {
|
||||
if (!value) throw new Error("Require value");
|
||||
else if (!(typeof value === "object" && !Array.isArray(value))) throw new Error("Require Object");
|
||||
else if (typeof value === "string" && this.has(srcID)) throw new Error("Source ID are add");
|
||||
value.componentName ||= "main";
|
||||
value.enableUpload ??= false;
|
||||
srcID ||= value.type + "_" + ([crypto.randomBytes(6).toString("hex"), crypto.randomBytes(crypto.randomInt(4, 16)).toString("hex"), crypto.randomUUID()]).join("-");
|
||||
|
||||
if (value.type === "http") {
|
||||
if (!value.url) throw new Error("Require debian package file");
|
||||
else if (!(value.url instanceof URL || typeof value.url === "string" && value.url.startsWith("http"))) throw new Error("Invalid URL");
|
||||
value.enableUpload = false;
|
||||
|
||||
// Test string to is valid URL
|
||||
let protocol: string;
|
||||
if (typeof value.url === "string") protocol = (new URL(value.url)).protocol;
|
||||
else protocol = value.url.protocol;
|
||||
if (!(protocol === "http:" || protocol === "https:")) throw new Error("Invalid URL, require HTTP protocol");
|
||||
|
||||
value.header ||= {};
|
||||
value.query ||= {};
|
||||
} else if (value.type === "github") {
|
||||
if (!(value.owner && value.repository)) throw new Error("Require valid repository and owner");
|
||||
else if (!(typeof value.owner === "string" && value.owner.length > 1)) throw new Error("Require valid owner username");
|
||||
else if (!(typeof value.repository === "string" && value.repository.length > 1)) throw new Error("Require valid repository name");
|
||||
value.token ||= Github.githubToken;
|
||||
|
||||
if (value.subType === "release") {
|
||||
value.tag ||= [];
|
||||
} else if (value.subType === "branch") {
|
||||
value.enableUpload = false;
|
||||
value.branch ||= [];
|
||||
if (!value.branch.length) throw new Error("Require at one Branch");
|
||||
} else throw new Error("Invalid Github subtype");
|
||||
} else if (value.type === "googleDriver") {
|
||||
if (!(value.clientId && value.clientSecret && value.clientToken)) throw new Error("Require Client ID, Secret and Token auth");
|
||||
else if (!(typeof value.clientId === "string" && value.clientId.length > 5)) throw new Error("Require valid clientID");
|
||||
else if (!(typeof value.clientSecret === "string" && value.clientSecret.length > 5)) throw new Error("Require valid clientSecret");
|
||||
else if (!(typeof value.clientToken === "object" && typeof value.clientToken.access_token === "string")) throw new Error("Require valid token");
|
||||
value.gIDs ||= [];
|
||||
} else if (value.type === "oracleBucket") {
|
||||
if (!value.authConfig.region) throw new Error("Require Bucket region");
|
||||
if (value.authConfig.auth) {
|
||||
if (Array.isArray(value.authConfig.auth)) {
|
||||
if (!(value.authConfig.auth.length)) throw new Error("Require auth to Oracle Cloud");
|
||||
const backup = value.authConfig.auth.slice(0, 2);
|
||||
if (!(oldFs.existsSync(path.resolve(process.cwd(), backup.at(0))))) throw new Error("Invalid Oracle auth path, Path not exists");
|
||||
backup[0] = path.resolve(process.cwd(), backup.at(0));
|
||||
if (typeof backup.at(1) === "string") {
|
||||
if (!(backup[1] = backup[1].trim())) delete backup[1];
|
||||
} else delete backup[1];
|
||||
repo.authConfig.auth = backup.filter(Boolean);
|
||||
value.authConfig.auth = backup.filter(Boolean);
|
||||
} else {
|
||||
const { tenancy, user, fingerprint, privateKey, passphase } = repo.authConfig.auth;
|
||||
const { tenancy, user, fingerprint, privateKey, passphase } = value.authConfig.auth;
|
||||
if (!(tenancy && user && fingerprint && privateKey)) throw new Error("Invalid auth to Oracle Cloud");
|
||||
if (!passphase) delete repo.authConfig.auth.passphase;
|
||||
if (!passphase) delete value.authConfig.auth.passphase;
|
||||
}
|
||||
}
|
||||
if (!(repo.path?.length)) delete repo.path;
|
||||
} else if (repo.type === "docker") {
|
||||
if (!repo.image) throw new Error("Require docker image");
|
||||
if (repo.auth) if (!(repo.auth.username && repo.auth.password)) throw new Error("Required valid auth to Docker image");
|
||||
if (!(repo.tags?.length)) delete repo.tags;
|
||||
value.path ||= [];
|
||||
} else if (value.type === "mirror") {
|
||||
value.enableUpload = false;
|
||||
if (!value.config) throw new Error("Require Mirror sources");
|
||||
else if (!((value.config = value.config.filter(at => at.type === "packages" && at.distname?.trim?.() && at.src?.trim?.())).length)) throw new Error("To mirror the repository you only need a source");
|
||||
} else if (value.type === "docker") {
|
||||
if (!value.image) throw new Error("Require docker image");
|
||||
if (value.auth) if (!(value.auth.username && value.auth.password)) throw new Error("Required valid auth to Docker image");
|
||||
value.tags ||= [];
|
||||
} else throw new Error("Invalid source type");
|
||||
repo.componentName ||= "main";
|
||||
repo.enableUpload ??= false;
|
||||
super.set(key, repo);
|
||||
super.set(srcID, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get repository source
|
||||
*
|
||||
* @param repoID - Repository ID
|
||||
* @returns repository source
|
||||
*/
|
||||
get(repoID: string) {
|
||||
if (!(this.has(repoID))) throw new Error("Repository not exists");
|
||||
return super.get(repoID);
|
||||
}
|
||||
async uploadFile(srcID: string, filePath: string) {
|
||||
if (!(this.has(srcID))) throw new Error("ID not exists");
|
||||
const info = this.get(srcID);
|
||||
if (!(info.enableUpload)) throw new Error("Cannot upload package");
|
||||
const { controlFile } = await dpkg.parsePackage(createReadStream(filePath));
|
||||
const debInfo = await extendsCrypto.createHashAsync(createReadStream(filePath)), fileName = `${controlFile.Package}_${controlFile.Architecture}_${controlFile.Version}.deb`;
|
||||
|
||||
/** Get all repository sources with repository ID */
|
||||
getAllRepositorys(): ({repositoryID: string} & repositorySource)[] {
|
||||
return Array.from(this.keys()).map(key => ({repositoryID: key, ...(this.get(key))}));
|
||||
}
|
||||
if (info.type === "github") await finished(createReadStream(filePath).pipe((await (await Github.repositoryManeger(info.owner, info.repository, { token: info.token })).release.manegerRelease(controlFile.Version)).uploadAsset(fileName, debInfo.byteLength)), { error: true });
|
||||
else if (info.type === "oracleBucket") await finished(createReadStream(filePath).pipe((await oracleBucket.oracleBucket(info.authConfig)).uploadFile(path.posix.join(info.uploadFolderPath || "/", fileName))));
|
||||
else if (info.type === "googleDriver") {
|
||||
const gdrive = await googleDriver.GoogleDriver({oauth: await googleDriver.createAuth({ clientID: info.clientId, clientSecret: info.clientSecret, token: info.clientToken, authUrlCallback: () => { throw new Error("Auth disabled"); }, tokenCallback: () => { }, redirectURL: null })});
|
||||
await finished(gdrive.uploadFile(fileName, info.uploadFolderID));
|
||||
} else if (info.type === "docker") {
|
||||
const oci = new dockerRegistry.v2(info.image, info.auth);
|
||||
const img = await oci.createImage(dockerRegistry.debianArchToDockerPlatform(controlFile.Architecture));
|
||||
await finished(createReadStream(filePath).pipe(img.createBlob("gzip").addEntry({ name: fileName, size: debInfo.byteLength })));
|
||||
info.tags.push((await img.finalize()).digest);
|
||||
super.set(srcID, info);
|
||||
} else throw new Error("Not implemented upload");
|
||||
|
||||
/**
|
||||
* Upload debian file to repository source if avaible
|
||||
*
|
||||
* @param repositoryID - Repository ID
|
||||
* @returns
|
||||
*/
|
||||
async uploadFile(repositoryID: string) {
|
||||
const repo = this.get(repositoryID);
|
||||
if (!repo.enableUpload) throw new Error("Repository not allow or not support to upload files!");
|
||||
if (repo.type === "github") {
|
||||
if (!repo.token) throw new Error("Cannot create upload file to Github Release, required Token to upload files!");
|
||||
const { owner, repository, token } = repo;
|
||||
const gh = await Github.repositoryManeger(owner, repository, {token});
|
||||
return {
|
||||
async githubUpload(filename: string, fileSize: number, tagName?: string): Promise<stream.Writable> {
|
||||
if (!tagName) tagName = (await gh.release.getRelease("__latest__").catch(async () => (await gh.release.getRelease()).at(0)))?.tag_name||"v1";
|
||||
return (await gh.release.manegerRelease(tagName)).uploadAsset(filename, fileSize);
|
||||
}
|
||||
};
|
||||
} else if (repo.type === "googleDriver") {
|
||||
const { clientId: clientID, clientSecret, clientToken } = repo;
|
||||
const gdrive = await googleDriver.GoogleDriver({
|
||||
authConfig: {
|
||||
clientID,
|
||||
clientSecret,
|
||||
token: clientToken,
|
||||
redirectURL: "http://localhost",
|
||||
authUrlCallback(){throw new Error("Set up fist")},
|
||||
tokenCallback() {},
|
||||
}
|
||||
});
|
||||
return {
|
||||
gdriveUpload: async (filename: string, folderId?: string) => gdrive.uploadFile(filename, folderId),
|
||||
};
|
||||
} else if (repo.type === "oracleBucket") {
|
||||
const oci = await oracleBucket.oracleBucket(repo.authConfig);
|
||||
return {ociUpload: oci.uploadFile};
|
||||
} else if (repo.type === "docker") {
|
||||
return {
|
||||
dockerUpload: async (platform: dockerRegistry.dockerPlatform) => {
|
||||
const dockerRepo = new dockerRegistry.v2(repo.image, repo.auth);
|
||||
const img = await dockerRepo.createImage(platform);
|
||||
const blob = img.createBlob("gzip");
|
||||
return {
|
||||
...blob,
|
||||
annotations: img.annotations,
|
||||
async finalize(tagName?: string) {
|
||||
await blob.finalize();
|
||||
const dockerRepo = await img.finalize(tagName);
|
||||
repo.tags ||= [];
|
||||
repo.tags.push(dockerRepo.digest);
|
||||
return dockerRepo;
|
||||
}
|
||||
};
|
||||
},
|
||||
dockerUploadV2() {
|
||||
return new dockerRegistry.v2(repo.image, repo.auth);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
throw new Error("Not implemented");
|
||||
}
|
||||
|
||||
toJSON(): repositorySources {
|
||||
return {
|
||||
Description: this.#Description,
|
||||
Codename: this.#Codename,
|
||||
Origin: this.#Origin,
|
||||
Label: this.#Label,
|
||||
sources: Array.from(this.keys()).reduce<{[key: string]: repositorySource}>((acc, key) => {acc[key] = this.get(key); return acc;}, {}),
|
||||
};
|
||||
return { controlFile, debInfo };
|
||||
}
|
||||
}
|
||||
|
||||
interface serverConfig {
|
||||
portListen: number;
|
||||
clusterForks: number;
|
||||
dataStorage?: string;
|
||||
release?: {
|
||||
gzip?: boolean;
|
||||
xz?: boolean;
|
||||
};
|
||||
database?: {
|
||||
url: string;
|
||||
databaseName?: string;
|
||||
};
|
||||
gpgSign?: {
|
||||
export type ConfigJSON = {
|
||||
mongoURL?: string;
|
||||
gpg?: {
|
||||
gpgPassphrase?: string;
|
||||
privateKey: {
|
||||
keyContent: string;
|
||||
filePath?: string;
|
||||
privateKey: string;
|
||||
publicKey: string;
|
||||
};
|
||||
publicKey: {
|
||||
keyContent: string;
|
||||
filePath?: string;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
export interface configJSON extends serverConfig {
|
||||
repository: {[repoName: string]: repositorySources};
|
||||
}
|
||||
|
||||
export class aptStreamConfig {
|
||||
#internalServerConfig: serverConfig = { portListen: 0, clusterForks: 0 };
|
||||
#internalRepository: {[repositoryName: string]: Repository} = {};
|
||||
toJSON(): configJSON {
|
||||
const config: configJSON = Object(this.#internalServerConfig);
|
||||
if (config.dataStorage) config.dataStorage = path.relative(process.cwd(), config.dataStorage);
|
||||
config.repository = {};
|
||||
Object.keys(this.#internalRepository).forEach(repoName => config.repository[repoName] = this.#internalRepository[repoName].toJSON());
|
||||
return config;
|
||||
}
|
||||
|
||||
toString(encode?: BufferEncoding, type?: "json"|"yaml") {
|
||||
encode ||= "utf8";
|
||||
type ||= "json";
|
||||
return ((["hex", "base64", "base64url"]).includes(encode) ? (encode+":") : "")+(Buffer.from((type === "yaml" ? yaml : JSON).stringify(this.toJSON(), null, (["hex", "base64", "base64url"]).includes(encode) ? undefined : 2), "utf8").toString(encode || "utf8"));
|
||||
}
|
||||
|
||||
#configPath?: string;
|
||||
async saveConfig(configPath?: string, type?: "json"|"yaml") {
|
||||
if (!(configPath||this.#configPath)) throw new Error("Current config only memory");
|
||||
if (this.#configPath) type ||= path.extname(this.#configPath) === ".json" ? "json" : "yaml";
|
||||
else if (configPath) type ||= path.extname(configPath) === ".json" ? "json" : "yaml";
|
||||
await fs.writeFile((configPath||this.#configPath), this.toString("utf8", type));
|
||||
}
|
||||
|
||||
constructor(config?: string|configJSON|aptStreamConfig) {
|
||||
if (config) {
|
||||
let nodeConfig: configJSON;
|
||||
if (config instanceof aptStreamConfig) {
|
||||
this.#configPath = config.#configPath;
|
||||
config = config.toJSON();
|
||||
}
|
||||
if (typeof config === "string") {
|
||||
let indexofEncoding: number;
|
||||
if (config.startsWith("env:")) config = process.env[config.slice(4)];
|
||||
if (path.isAbsolute(path.resolve(process.cwd(), config))) {
|
||||
if (oldFs.existsSync(path.resolve(process.cwd(), config))) config = oldFs.readFileSync((this.#configPath = path.resolve(process.cwd(), config)), "utf8")
|
||||
else {
|
||||
this.#configPath = path.resolve(process.cwd(), config);
|
||||
config = undefined;
|
||||
}
|
||||
} else if ((["hex:", "base64:", "base64url:"]).find(rel => config.toString().startsWith(rel))) config = Buffer.from(config.slice(indexofEncoding+1).trim(), config.slice(0, indexofEncoding) as BufferEncoding).toString("utf8");
|
||||
else config = undefined;
|
||||
if (!!config) {
|
||||
try {
|
||||
nodeConfig = JSON.parse(config as string);
|
||||
} catch {
|
||||
try {
|
||||
nodeConfig = yaml.parse(config as string);
|
||||
} catch {
|
||||
throw new Error("Invalid config, not is YAML or JSON");
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (typeof config === "object") nodeConfig = config;
|
||||
|
||||
// Add sources
|
||||
nodeConfig ||= {clusterForks: 0, portListen: 0, repository: {}};
|
||||
nodeConfig.repository ||= {};
|
||||
Object.keys(nodeConfig.repository).forEach(keyName => this.#internalRepository[keyName] = new Repository(nodeConfig.repository[keyName]));
|
||||
|
||||
// Add server config
|
||||
delete nodeConfig.repository;
|
||||
this.#internalServerConfig = {clusterForks: Number(nodeConfig.clusterForks || 0), portListen: Number(nodeConfig.portListen || 0)};
|
||||
if (nodeConfig.dataStorage) this.#internalServerConfig.dataStorage = path.resolve(process.cwd(), nodeConfig.dataStorage);
|
||||
this.#internalServerConfig.release = {};
|
||||
this.#internalServerConfig.release.gzip = !!(nodeConfig.release?.gzip ?? true);
|
||||
this.#internalServerConfig.release.xz = !!(nodeConfig.release?.xz ?? true);
|
||||
if (nodeConfig.database?.url) this.#internalServerConfig.database = {
|
||||
url: nodeConfig.database.url,
|
||||
databaseName: nodeConfig.database.databaseName || "aptStream"
|
||||
};
|
||||
if (nodeConfig.gpgSign?.privateKey && nodeConfig.gpgSign?.publicKey) {
|
||||
const { gpgPassphrase, privateKey, publicKey } = nodeConfig.gpgSign;
|
||||
if (privateKey.filePath && publicKey.filePath) {
|
||||
privateKey.keyContent = oldFs.readFileSync(privateKey.filePath, "utf8");
|
||||
publicKey.keyContent = oldFs.readFileSync(publicKey.filePath, "utf8");
|
||||
}
|
||||
this.#internalServerConfig.gpgSign = {
|
||||
gpgPassphrase: String(gpgPassphrase||""),
|
||||
privateKey: {
|
||||
keyContent: privateKey.keyContent,
|
||||
filePath: privateKey.filePath
|
||||
},
|
||||
publicKey: {
|
||||
keyContent: publicKey.keyContent,
|
||||
filePath: publicKey.filePath
|
||||
}
|
||||
};
|
||||
}
|
||||
if (!this.#internalServerConfig.gpgSign?.gpgPassphrase && typeof this.#internalServerConfig.gpgSign?.gpgPassphrase === "string") delete this.#internalServerConfig.gpgSign.gpgPassphrase;
|
||||
}
|
||||
}
|
||||
|
||||
setCompressRelease(target: keyof configJSON["release"], value: boolean) {
|
||||
this.#internalServerConfig.release[target] = !!value;
|
||||
return this;
|
||||
}
|
||||
|
||||
getCompressRelease(target: keyof configJSON["release"]) {
|
||||
return !!(this.#internalServerConfig.release?.[target]);
|
||||
}
|
||||
|
||||
databaseAvaible() {return !!this.#internalServerConfig.database;}
|
||||
getDatabase() {
|
||||
if (!this.databaseAvaible()) throw new Error("No Database set up");
|
||||
return this.#internalServerConfig.database;
|
||||
}
|
||||
|
||||
setDatabse(url: string, databaseName?: string) {
|
||||
this.#internalServerConfig.database = {
|
||||
url,
|
||||
databaseName
|
||||
};
|
||||
return this;
|
||||
}
|
||||
|
||||
getClusterForks() {return Number(this.#internalServerConfig.clusterForks || 0);}
|
||||
setClusterForks(value: number) {
|
||||
if (value > 0 && value < 256) this.#internalServerConfig.clusterForks = value;
|
||||
else this.#internalServerConfig.clusterForks = 0;
|
||||
return this;
|
||||
}
|
||||
|
||||
setDataStorage(folderPath: string) {
|
||||
if (path.isAbsolute(folderPath)) this.#internalServerConfig.dataStorage = folderPath; else throw new Error("Require absolute path");
|
||||
return this;
|
||||
}
|
||||
async getDataStorage() {
|
||||
if (!this.#internalServerConfig.dataStorage) return undefined;
|
||||
if (!(await extendsFS.exists(this.#internalServerConfig.dataStorage))) await fs.mkdir(this.#internalServerConfig.dataStorage, {recursive: true});
|
||||
return this.#internalServerConfig.dataStorage;
|
||||
}
|
||||
|
||||
getPortListen() {return Number(this.#internalServerConfig.portListen || 0);}
|
||||
setPortListen(port: number) {
|
||||
if (port >= 0 && port <= ((2**16) - 1)) this.#internalServerConfig.portListen = port;
|
||||
else throw new Error(`Invalid port range (0 - ${(2**16) - 1})`);
|
||||
return this;
|
||||
}
|
||||
|
||||
setPGPKey(gpgSign: configJSON["gpgSign"]) {
|
||||
const { gpgPassphrase, privateKey, publicKey } = gpgSign;
|
||||
if (privateKey.filePath && publicKey.filePath) {
|
||||
privateKey.keyContent = oldFs.readFileSync(privateKey.filePath, "utf8");
|
||||
publicKey.keyContent = oldFs.readFileSync(publicKey.filePath, "utf8");
|
||||
}
|
||||
this.#internalServerConfig.gpgSign = {
|
||||
gpgPassphrase: String(gpgPassphrase||""),
|
||||
privateKey: {
|
||||
keyContent: privateKey.keyContent,
|
||||
filePath: privateKey.filePath
|
||||
},
|
||||
publicKey: {
|
||||
keyContent: publicKey.keyContent,
|
||||
filePath: publicKey.filePath
|
||||
}
|
||||
repositorys: { [repoName: string]: SourceJson };
|
||||
};
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate Private and Public PGP/GPG Keys to signing repository (InRelease and Release.gpg)
|
||||
*
|
||||
* @param options - Gpg Options
|
||||
* @returns
|
||||
*/
|
||||
async generateGpgKeys(options?: {passphrase?: string, email?: string, name?: string}) {
|
||||
const { passphrase, email, name } = options || {};
|
||||
export async function generateGPG({ passphrase, email, name }: { passphrase?: string, email?: string, name?: string } = {}) {
|
||||
const { privateKey, publicKey } = await openpgp.generateKey({
|
||||
rsaBits: 4094,
|
||||
type: "rsa",
|
||||
@ -522,107 +283,116 @@ export class aptStreamConfig {
|
||||
}
|
||||
]
|
||||
});
|
||||
this.#internalServerConfig.gpgSign = {
|
||||
gpgPassphrase: passphrase,
|
||||
privateKey: {keyContent: privateKey},
|
||||
publicKey: {keyContent: publicKey}
|
||||
|
||||
return {
|
||||
privateKey,
|
||||
publicKey,
|
||||
passphrase
|
||||
};
|
||||
if (this.#internalServerConfig.dataStorage) {
|
||||
this.#internalServerConfig.gpgSign.privateKey.filePath = path.join(this.#internalServerConfig.dataStorage, "privateKey.gpg");
|
||||
this.#internalServerConfig.gpgSign.publicKey.filePath = path.join(this.#internalServerConfig.dataStorage, "publicKey.gpg");
|
||||
await fs.writeFile(this.#internalServerConfig.gpgSign.privateKey.filePath, this.#internalServerConfig.gpgSign.privateKey.keyContent);
|
||||
await fs.writeFile(this.#internalServerConfig.gpgSign.publicKey.filePath, this.#internalServerConfig.gpgSign.publicKey.keyContent);
|
||||
}
|
||||
|
||||
return this.#internalServerConfig.gpgSign;
|
||||
export class Config extends Map<string, Source> {
|
||||
/** Mongo Server URL to connect */
|
||||
public mongoConnection: URL = new URL("mongodb://127.0.0.1/aptStream");
|
||||
public tmpFolder = tmpdir();
|
||||
|
||||
public gpgPassphrase?: string;
|
||||
public privateGPG?: string;
|
||||
public publicGPG?: string;
|
||||
|
||||
constructor(src?: ConfigJSON | string) {
|
||||
super();
|
||||
if (!src) return;
|
||||
|
||||
if (typeof src === "string") {
|
||||
const srcc: string = src;
|
||||
try {
|
||||
src = yaml.parse(srcc);
|
||||
} catch {
|
||||
src = JSON.parse(srcc);
|
||||
}
|
||||
}
|
||||
|
||||
getPGPKey() {
|
||||
if (!this.#internalServerConfig.gpgSign) throw new Error("PGP/GPG Key not set");
|
||||
return this.#internalServerConfig.gpgSign;
|
||||
const {
|
||||
mongoURL,
|
||||
gpg,
|
||||
repositorys = {},
|
||||
} = src as ConfigJSON;
|
||||
if (mongoURL) this.mongoConnection = new URL(mongoURL);
|
||||
if (gpg) {
|
||||
this.gpgPassphrase = gpg.gpgPassphrase;
|
||||
this.privateGPG = gpg.privateKey;
|
||||
this.publicGPG = gpg.publicKey;
|
||||
}
|
||||
Object.keys(repositorys).forEach(key => this.set(key, new Source(repositorys[key])));
|
||||
}
|
||||
|
||||
async getPublicKey(type: "dearmor"|"armor"): Promise<string|Buffer> {
|
||||
const { publicKey } = this.getPGPKey();
|
||||
/**
|
||||
* Get YAML config to easy edit
|
||||
* @returns - yaml Config
|
||||
*/
|
||||
toString() { return yaml.stringify(this.toJSON()); }
|
||||
|
||||
toJSON() {
|
||||
return Object.keys(this.keys()).reduce<ConfigJSON>((acc, key) => {
|
||||
acc.repositorys[key] = this.get(key).toJSON();
|
||||
return acc;
|
||||
}, {
|
||||
mongoURL: this.mongoConnection.toString(),
|
||||
...(!(this.privateGPG && this.publicGPG) ? {} : {
|
||||
gpg: {
|
||||
gpgPassphrase: this.gpgPassphrase,
|
||||
privateKey: this.privateGPG,
|
||||
publicKey: this.publicGPG,
|
||||
}
|
||||
}),
|
||||
repositorys: {},
|
||||
});
|
||||
}
|
||||
|
||||
async getPulicKey(fileType: "dearmor" | "armor" = "armor") {
|
||||
// same to gpg --dearmor
|
||||
if (type === "dearmor") return Buffer.from((await openpgp.unarmor(publicKey.keyContent)).data as any);
|
||||
return (await openpgp.readKey({ armoredKey: publicKey.keyContent })).armor();
|
||||
if (fileType === "dearmor") return Buffer.from((await openpgp.unarmor(this.publicGPG)).data as any);
|
||||
return (await openpgp.readKey({ armoredKey: this.publicGPG })).armor();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new source to repository.
|
||||
*
|
||||
* @param repositoryName - Repository name
|
||||
* @returns Repository class
|
||||
*/
|
||||
createRepository(repositoryName: string) {
|
||||
if (this.#internalRepository[repositoryName]) throw new Error("Repository name are exists");
|
||||
this.#internalRepository[repositoryName] = new Repository();
|
||||
this.#internalRepository[repositoryName].setCodename(repositoryName).setOrigin(repositoryName);
|
||||
return this.#internalRepository[repositoryName];
|
||||
type packageCollection = {
|
||||
repositorys: {repository: string, origim: string}[];
|
||||
restoreFile: any;
|
||||
control: dpkg.debianControl;
|
||||
};
|
||||
|
||||
export type uploadInfo = {
|
||||
ID: string;
|
||||
token: string;
|
||||
validAt: number;
|
||||
filePath: string;
|
||||
repository: string;
|
||||
destID: string;
|
||||
};
|
||||
|
||||
export class Connection {
|
||||
constructor(public repoConfig: Config, public client: MongoClient) {
|
||||
this.database = client.db(repoConfig.mongoConnection.pathname.slice(1) || "aptStream");
|
||||
this.packageCollection = this.database.collection<packageCollection>("packages");
|
||||
this.uploadCollection = this.database.collection<uploadInfo>("uploads");
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param repositoryName - Repository name, if not exists create this.
|
||||
* @param pkgSource - Packages source
|
||||
* @returns
|
||||
*/
|
||||
addToRepository(repositoryName: string, pkgSource: repositorySource) {
|
||||
this.#internalRepository[repositoryName] ||= new Repository();
|
||||
this.#internalRepository[repositoryName].set(this.createRepositoryID(), pkgSource);
|
||||
return this;
|
||||
public database: Db;
|
||||
public packageCollection: Collection<packageCollection>;
|
||||
public uploadCollection: Collection<uploadInfo>;
|
||||
|
||||
public getConnections() {
|
||||
const connection = this.client["topology"];
|
||||
return {
|
||||
current: Number(connection.client.s.activeSessions?.size),
|
||||
max: Number(connection.s.options.maxConnecting),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
createRepositoryID() {
|
||||
let repoID: string;
|
||||
while (!repoID) {
|
||||
repoID = ("aptS__")+(crypto.randomBytes(16).toString("hex"));
|
||||
if (this.getRepositorys().find(key => key.repositoryManeger.has(repoID))) repoID = undefined;
|
||||
}
|
||||
return repoID;
|
||||
}
|
||||
|
||||
hasSource(repositoryName: string) {
|
||||
return !!(this.#internalRepository[repositoryName]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get repository source
|
||||
* @param repositoryName - Repository name or Codename
|
||||
* @returns
|
||||
*/
|
||||
getRepository(repositoryName: string) {
|
||||
if (repositoryName.startsWith("aptS__")) {
|
||||
const bc = repositoryName;
|
||||
repositoryName = undefined;
|
||||
for (const repo of Object.keys(this.#internalRepository)) if (this.#internalRepository[repo].has(bc)) {repositoryName = repo; break;}
|
||||
} else if (!this.#internalRepository[repositoryName]) {
|
||||
const bc = repositoryName;
|
||||
repositoryName = undefined;
|
||||
for (const repo of Object.keys(this.#internalRepository)) if (this.#internalRepository[repo].getCodename() === bc) {repositoryName = repo; break;}
|
||||
}
|
||||
if (!repositoryName) throw new Error("Repository not exists");
|
||||
return this.#internalRepository[repositoryName];
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete repository
|
||||
*
|
||||
* @param repositoryName - Repository name or Codename
|
||||
* @returns return a boolean to indicate delete status
|
||||
*/
|
||||
deleteRepository(repositoryName: string) {
|
||||
if (!this.#internalRepository[repositoryName]) {
|
||||
const bc = repositoryName;
|
||||
repositoryName = undefined;
|
||||
for (const repo of Object.keys(this.#internalRepository)) if (this.#internalRepository[repo].getCodename() === bc) {repositoryName = repo; break;}
|
||||
if (!repositoryName) throw new Error("Repository not exists");
|
||||
}
|
||||
return delete this.#internalRepository[repositoryName];
|
||||
}
|
||||
|
||||
getRepositorys() {
|
||||
return Object.keys(this.#internalRepository).map(repositoryName => ({repositoryName, repositoryManeger: this.#internalRepository[repositoryName]}));
|
||||
}
|
||||
export async function Connect(repoConfig: Config) {
|
||||
const client = await (new MongoClient(repoConfig.mongoConnection.toString())).connect();
|
||||
return new Connection(repoConfig, client);
|
||||
}
|
@ -1,766 +0,0 @@
|
||||
import { Repository, aptStreamConfig, repositorySource } from "./config.js";
|
||||
import connectDb, { packageManeger } from "./packages.js";
|
||||
import { googleDriver } from "@sirherobrine23/cloud";
|
||||
import { readFile, readdir } from "fs/promises";
|
||||
import { apt } from "@sirherobrine23/dpkg";
|
||||
import inquirerFileTreeSelection from "inquirer-file-tree-selection-prompt";
|
||||
import dockerRegistry from "@sirherobrine23/docker-registry";
|
||||
import coreHTTP from "@sirherobrine23/http";
|
||||
import inquirer from "inquirer";
|
||||
import path from "node:path";
|
||||
import { MongoClient } from "mongodb";
|
||||
import { createServer } from "http";
|
||||
import { githubToken } from "@sirherobrine23/http/src/github.js";
|
||||
import dns from "node:dns/promises";
|
||||
import os from "os";
|
||||
inquirer.registerPrompt("file-tree-selection", inquirerFileTreeSelection);
|
||||
|
||||
export default async function main(configOrigin: string) {
|
||||
const config = new aptStreamConfig(configOrigin);
|
||||
if (!config.databaseAvaible()) await setDatabse(config);
|
||||
const configManeger = await connectDb(config);
|
||||
while(true) {
|
||||
const action = (await inquirer.prompt<{initAction: "serverEdit"|"newRepo"|"del"|"editRepo"|"syncRepo"|"exit"}>({
|
||||
name: "initAction",
|
||||
type: "list",
|
||||
message: "Select action:",
|
||||
choices: [
|
||||
{
|
||||
name: "Edit repository",
|
||||
value: "editRepo"
|
||||
},
|
||||
{
|
||||
name: "Create new repository",
|
||||
value: "newRepo"
|
||||
},
|
||||
{
|
||||
name: "Delete repository",
|
||||
value: "del"
|
||||
},
|
||||
{
|
||||
name: "Edit server configs",
|
||||
value: "serverEdit"
|
||||
},
|
||||
{
|
||||
name: "Exit",
|
||||
value: "exit"
|
||||
}
|
||||
]
|
||||
})).initAction;
|
||||
if (action === "exit") break;
|
||||
else if (action === "newRepo") {
|
||||
const repoName = (await inquirer.prompt({
|
||||
name: "repoName",
|
||||
message: "Repository name:",
|
||||
type: "input",
|
||||
validate: (name) => configManeger.hasSource(name.trim()) ? "Type other repository name, this are exist's" : true,
|
||||
})).repoName.trim();
|
||||
if (repoName) await editRepository(configManeger.createRepository(repoName), configManeger);
|
||||
else console.log("The repository was not created, cancelling!");
|
||||
} else if (action === "editRepo") {
|
||||
const repo = configManeger.getRepositorys();
|
||||
const repoSelected = (await inquirer.prompt({
|
||||
name: "repo",
|
||||
message: "Selecte repository:",
|
||||
type: "list",
|
||||
choices: [
|
||||
{
|
||||
name: "Cancel",
|
||||
value: "exit"
|
||||
},
|
||||
...(repo.map(d => d.repositoryName))
|
||||
],
|
||||
})).repo;
|
||||
if (repoSelected !== "exit") await editRepository(configManeger.getRepository(repoSelected), configManeger);
|
||||
} else if (action === "del") {
|
||||
const repo = configManeger.getRepositorys();
|
||||
const repoSelected = (await inquirer.prompt({
|
||||
name: "repo",
|
||||
message: "Selecte repository:",
|
||||
type: "list",
|
||||
choices: [
|
||||
{
|
||||
name: "Cancel",
|
||||
value: "exit"
|
||||
},
|
||||
...(repo.map(d => d.repositoryName))
|
||||
],
|
||||
})).repo;
|
||||
if (repoSelected !== "exit") {
|
||||
if (configManeger.deleteRepository(repoSelected)) console.log("Repository deleted");
|
||||
else console.error("Fail to delete repository!");
|
||||
}
|
||||
} else if (action === "serverEdit") await serverConfig(configManeger);
|
||||
await configManeger.saveConfig().catch(() => {});
|
||||
}
|
||||
|
||||
return configManeger.close().then(async () => configManeger.saveConfig());
|
||||
}
|
||||
|
||||
async function serverConfig(config: packageManeger) {
|
||||
while (true) {
|
||||
await config.saveConfig().catch(() => {});
|
||||
const { action } = await inquirer.prompt({
|
||||
name: "action",
|
||||
type: "list",
|
||||
choices: [
|
||||
{
|
||||
name: "Serve port",
|
||||
value: "serverPort"
|
||||
},
|
||||
{
|
||||
name: "Serve threads forks",
|
||||
value: "serverThreads"
|
||||
},
|
||||
{
|
||||
name: "Change mongodb URL",
|
||||
value: "updateDB"
|
||||
},
|
||||
{
|
||||
name: "Switch gzip release compressions",
|
||||
value: "relGzip"
|
||||
},
|
||||
{
|
||||
name: "Switch xz release compressions",
|
||||
value: "relXz"
|
||||
},
|
||||
{
|
||||
name: "Return",
|
||||
value: "exit"
|
||||
},
|
||||
]
|
||||
});
|
||||
if (action === "exit") break;
|
||||
else if (action === "relGzip") console.log("Set gzip to %O", config.setCompressRelease("gzip", !config.getCompressRelease("gzip")).getCompressRelease("gzip"));
|
||||
else if (action === "relXz") console.log("Set xz to %O", config.setCompressRelease("xz", !config.getCompressRelease("xz")).getCompressRelease("xz"));
|
||||
else if (action === "serverPort") {
|
||||
await inquirer.prompt({
|
||||
name: "port",
|
||||
type: "number",
|
||||
default: config.getPortListen(),
|
||||
message: "Server port:",
|
||||
validate(input: number) {
|
||||
if (input < 0 || input > 65535) return "Port must be between 0 and 65535";
|
||||
config.setPortListen(input);
|
||||
return true;
|
||||
}
|
||||
});
|
||||
} else if (action === "serverThreads") {
|
||||
await inquirer.prompt({
|
||||
name: "threads",
|
||||
type: "number",
|
||||
default: config.getClusterForks(),
|
||||
message: "Server threads forks:",
|
||||
validate(input: number) {
|
||||
if (input < 0) return "Threads must be greater or equal 0";
|
||||
if (input > os.availableParallelism()) console.warn("\nThe number of threads was greater than the system can handle, be careful!");
|
||||
config.setClusterForks(input);
|
||||
return true;
|
||||
}
|
||||
});
|
||||
} else if (action === "updateDB") await setDatabse(config);
|
||||
}
|
||||
}
|
||||
|
||||
async function setDatabse(repo: aptStreamConfig) {
|
||||
const promps = await inquirer.prompt({
|
||||
name: "url",
|
||||
type: "input",
|
||||
message: "Mongodb URL:",
|
||||
async validate(input) {
|
||||
try {
|
||||
await (await (new MongoClient(input)).connect()).close(true);
|
||||
return true;
|
||||
} catch (err) {
|
||||
return err?.message || err;
|
||||
}
|
||||
},
|
||||
});
|
||||
repo.setDatabse(promps.url);
|
||||
}
|
||||
|
||||
async function editRepository(repo: Repository, configManeger: packageManeger) {
|
||||
let exitShowSync = false;
|
||||
await configManeger.saveConfig().catch(() => {});
|
||||
while (true) {
|
||||
const action = (await inquirer.prompt({
|
||||
name: "action",
|
||||
message: "Repository actions:",
|
||||
type: "list",
|
||||
choices: [
|
||||
{
|
||||
name: "New repository sources",
|
||||
value: "add"
|
||||
},
|
||||
{
|
||||
name: "Delete sources",
|
||||
value: "del"
|
||||
},
|
||||
{
|
||||
name: "Delete all sources",
|
||||
value: "delAll"
|
||||
},
|
||||
{
|
||||
name: "Set repository codename",
|
||||
value: "setCodename"
|
||||
},
|
||||
{
|
||||
name: "Set repository description",
|
||||
value: "setDescription"
|
||||
},
|
||||
{
|
||||
name: "Set repository label",
|
||||
value: "setLabel"
|
||||
},
|
||||
{
|
||||
name: "Set repository origin",
|
||||
value: "setOrigin"
|
||||
},
|
||||
{
|
||||
name: "Set repository suite",
|
||||
value: "setSuite"
|
||||
},
|
||||
{
|
||||
name: "Return",
|
||||
value: "exit"
|
||||
},
|
||||
]
|
||||
})).action;
|
||||
|
||||
if (action === "exit") break;
|
||||
else if (action === "setCodename") {
|
||||
const input = (await inquirer.prompt({
|
||||
name: "value",
|
||||
type: "input",
|
||||
message: "What is new Codename?"
|
||||
})).value;
|
||||
repo.setCodename(input);
|
||||
} else if (action === "setDescription") {
|
||||
const input = (await inquirer.prompt({
|
||||
name: "value",
|
||||
type: "input",
|
||||
message: "Set description in one single line:"
|
||||
})).value;
|
||||
repo.setDescription(input);
|
||||
} else if (action === "setLabel") {
|
||||
const input = (await inquirer.prompt({
|
||||
name: "value",
|
||||
type: "input",
|
||||
message: "Set label:"
|
||||
})).value;
|
||||
repo.setLabel(input);
|
||||
} else if (action === "setOrigin") {
|
||||
const input = (await inquirer.prompt([
|
||||
{
|
||||
when: () => !!repo.getOrigin(),
|
||||
name: "confirm",
|
||||
message: "Are you sure you want to change Origin?",
|
||||
type: "confirm",
|
||||
default: false
|
||||
},
|
||||
{
|
||||
when: (ask) => !repo.getOrigin() || ask["confirm"],
|
||||
name: "value",
|
||||
type: "input",
|
||||
message: "What is Origin name?"
|
||||
}
|
||||
])).value;
|
||||
if (!input) console.log("Canceled set origin"); else repo.setOrigin(input);
|
||||
} else if (action === "setSuite") {
|
||||
const input = (await inquirer.prompt({
|
||||
name: "value",
|
||||
type: "input",
|
||||
message: "What is Suite name?"
|
||||
})).value;
|
||||
repo.setSuite(input);
|
||||
} else if (action === "delAll") {
|
||||
exitShowSync = true;
|
||||
repo.clear();
|
||||
} else if (action === "del") {
|
||||
const srcs = repo.getAllRepositorys();
|
||||
if (!srcs.length) {
|
||||
console.info("Not sources!");
|
||||
continue;
|
||||
}
|
||||
const sel: string[] = (await inquirer.prompt({
|
||||
name: "sel",
|
||||
type: "checkbox",
|
||||
message: "Select IDs:",
|
||||
choices: repo.getAllRepositorys().map(d => ({name: `${d.repositoryID} (${d.type})`, value: d.repositoryID})),
|
||||
})).sel;
|
||||
exitShowSync = true;
|
||||
sel.forEach(id => repo.delete(id));
|
||||
} else if (action === "add") {
|
||||
const root = async () => createSource().catch(err => {
|
||||
console.error(err);
|
||||
console.log("Try again");
|
||||
return createSource();
|
||||
});
|
||||
repo.set(configManeger.createRepositoryID(), await root());
|
||||
}
|
||||
}
|
||||
if (exitShowSync) console.info("Sync packages!");
|
||||
return repo;
|
||||
}
|
||||
|
||||
async function createSource(): Promise<repositorySource> {
|
||||
let { srcType, componentName } = (await inquirer.prompt<{srcType: repositorySource["type"], componentName?: string}>([
|
||||
{
|
||||
name: "srcType",
|
||||
type: "list",
|
||||
choices: [
|
||||
{
|
||||
value: "http",
|
||||
name: "HTTP Directly"
|
||||
},
|
||||
{
|
||||
value: "mirror",
|
||||
name: "APT Mirror"
|
||||
},
|
||||
{
|
||||
value: "github",
|
||||
name: "Github Release/Branch"
|
||||
},
|
||||
{
|
||||
value: "googleDriver",
|
||||
name: "Google Drive"
|
||||
},
|
||||
{
|
||||
value: "oracleBucket",
|
||||
name: "Oracle Cloud Infracture Bucket"
|
||||
},
|
||||
{
|
||||
value: "docker",
|
||||
name: "OCI (Open Container Iniciative)/Docker Image"
|
||||
},
|
||||
]
|
||||
},
|
||||
{
|
||||
type: "confirm",
|
||||
name: "addComp",
|
||||
message: "Add component name?",
|
||||
default: false
|
||||
},
|
||||
{
|
||||
name: "componentName",
|
||||
when: (answers) => answers["addComp"],
|
||||
type: "input",
|
||||
default: "main",
|
||||
validate: (inputComp) => (/[\s]+/).test(inputComp) ? "Remove Spaces" : true
|
||||
}
|
||||
]));
|
||||
componentName ||= "main";
|
||||
if (srcType === "http") {
|
||||
return {
|
||||
type: "http", componentName,
|
||||
url: (await inquirer.prompt({
|
||||
name: "reqUrl",
|
||||
type: "input",
|
||||
async validate(urlInput) {
|
||||
try {
|
||||
const { hostname } = new URL(urlInput);
|
||||
await dns.resolve(hostname);
|
||||
return true
|
||||
} catch (err) { return err?.message || String(err); }}
|
||||
})).reqUrl,
|
||||
};
|
||||
} else if (srcType === "mirror") {
|
||||
const promps = (await inquirer.prompt([
|
||||
{
|
||||
type: "list",
|
||||
name: "sourceFrom",
|
||||
choices: [
|
||||
{name: "Select file", value: "fileSelect"},
|
||||
{name: "Create from scrat", value: "createIt"}
|
||||
]
|
||||
},
|
||||
{
|
||||
when: (answers) => answers["sourceFrom"] === "fileSelect",
|
||||
name: "fileSource",
|
||||
type: "file-tree-selection",
|
||||
message: "Select file source path:"
|
||||
},
|
||||
{
|
||||
when: (answers) => answers["sourceFrom"] !== "fileSelect",
|
||||
name: "fileSource",
|
||||
type: "editor",
|
||||
message: "creating sources",
|
||||
default: "# This is comment\ndeb http://example.com example main",
|
||||
}
|
||||
]));
|
||||
|
||||
return {
|
||||
type: "mirror", componentName,
|
||||
config: (apt.parseSourceList(promps["sourceFrom"] !== "fileSelect" ? promps["fileSource"] : await readFile(promps["fileSource"], "utf8"))).filter(src => src.type === "packages")
|
||||
};
|
||||
} else if (srcType === "github") {
|
||||
const promps = await inquirer.prompt([
|
||||
{
|
||||
name: "token",
|
||||
type: "input",
|
||||
message: "Github token to private repositorys (it is not necessary if it is public):",
|
||||
default: githubToken,
|
||||
validate(input: string) {
|
||||
if (input.length > 0) if (!(input.startsWith("ghp_"))) return "Invalid token, if old token set manualy in Config file!";
|
||||
return true;
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "owner",
|
||||
type: "input",
|
||||
message: "Repository owner:",
|
||||
async validate(input, ask) {
|
||||
try {
|
||||
const apiReq = new URL(path.posix.join("/users", path.posix.resolve("/", input)), "https://api.github.com");
|
||||
await coreHTTP.jsonRequestBody(apiReq, {headers: ask.token ? {Authorization: `token ${ask.token}`}:{}});
|
||||
return true;
|
||||
} catch (err) {
|
||||
return err?.body?.message || err?.message || String(err);
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
name: "repository",
|
||||
type: "list",
|
||||
message: "Select repository:",
|
||||
async choices(answers) {
|
||||
const apiReq = new URL(path.posix.join("/users", answers["owner"], "repos"), "https://api.github.com");
|
||||
return (await coreHTTP.jsonRequestBody<{name: string}[]>(apiReq, {headers: answers.token ? {Authorization: `token ${answers.token}`}:{}})).map(({name}) => name);
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "subType",
|
||||
type: "list",
|
||||
message: "Where to get the .deb files?",
|
||||
choices: [
|
||||
"Release",
|
||||
"Branch"
|
||||
]
|
||||
}
|
||||
]);
|
||||
|
||||
const { owner, repository, token } = promps;
|
||||
if (promps["subType"] === "Branch") {
|
||||
return {
|
||||
type: "github", subType: "branch", componentName, enableUpload: false,
|
||||
owner, repository, token,
|
||||
branch: (await inquirer.prompt({
|
||||
name: "branch",
|
||||
type: "list",
|
||||
message: "Select the branch:",
|
||||
async choices() {
|
||||
const apiReq = new URL(path.posix.join("/repos", owner, repository, "branches"), "https://api.github.com");
|
||||
return (await coreHTTP.jsonRequestBody<{name: string}[]>(apiReq)).map(({name}) => name);
|
||||
}
|
||||
})).branch,
|
||||
};
|
||||
}
|
||||
const { tag, enableUpload } = await inquirer.prompt([
|
||||
{
|
||||
when: () => !!token,
|
||||
type: "confirm",
|
||||
name: "enableUpload",
|
||||
message: "Enable support to upload files to Github Release?",
|
||||
default: false,
|
||||
},
|
||||
{
|
||||
name: "tag",
|
||||
type: "checkbox",
|
||||
message: "Select tags:",
|
||||
async choices() {
|
||||
const apiReq = new URL(path.posix.join("/repos", owner, repository, "releases"), "https://api.github.com");
|
||||
return (await coreHTTP.jsonRequestBody<{tag_name: string}[]>(apiReq)).map(({tag_name}) => tag_name);
|
||||
}
|
||||
},
|
||||
]);
|
||||
return {
|
||||
type: "github", subType: "release", componentName, enableUpload,
|
||||
owner, repository, token,
|
||||
tag,
|
||||
}
|
||||
} else if (srcType === "googleDriver") {
|
||||
let client_id: string, client_secret: string;
|
||||
try {
|
||||
const secretFile = (await readdir(process.cwd()).then(files => files.filter(file => file.endsWith(".json") && file.startsWith("client_secret")))).at(0);
|
||||
if (secretFile) {
|
||||
const cbb = JSON.parse(await readFile(secretFile, "utf8"));
|
||||
if (typeof cbb.installed === "object") {
|
||||
client_id = cbb.installed.client_id;
|
||||
client_secret = cbb.installed.client_secret;
|
||||
} else if (typeof cbb.CBe === "object") {
|
||||
client_id = cbb.CBe.client_id;
|
||||
client_secret = cbb.CBe.client_secret;
|
||||
} else if (typeof cbb.client_id === "string" && typeof cbb.client_secret === "string") {
|
||||
client_id = cbb.client_id;
|
||||
client_secret = cbb.client_secret;
|
||||
}
|
||||
}
|
||||
} catch {}
|
||||
|
||||
const clientPromp = await inquirer.prompt([
|
||||
{
|
||||
type: "input",
|
||||
name: "id",
|
||||
message: "Google oAuth Client ID:",
|
||||
default: client_id
|
||||
},
|
||||
{
|
||||
type: "input",
|
||||
name: "secret",
|
||||
message: "Google oAuth Client Secret:",
|
||||
default: client_secret,
|
||||
},
|
||||
{
|
||||
type: "confirm",
|
||||
name: "enableUpload",
|
||||
message: "Enable support to upload files to Google driver?",
|
||||
default: false,
|
||||
},
|
||||
{
|
||||
name: "listFiles",
|
||||
type: "confirm",
|
||||
message: "After authenticating Google Drive, will you want to select the files?"
|
||||
},
|
||||
{
|
||||
when: (ask) => ask["listFiles"],
|
||||
name: "folderID",
|
||||
type: "input",
|
||||
message: "Folder ID?"
|
||||
}
|
||||
]);
|
||||
let clientToken: any;
|
||||
const server = createServer();
|
||||
const port = await new Promise<number>((resolve, reject) => {
|
||||
server.once("error", reject);
|
||||
server.listen(0, () => {
|
||||
const addr = server.address();
|
||||
server.removeListener("error", reject);
|
||||
resolve(Number((typeof addr === "string" ? addr : addr?.port) || addr));
|
||||
});
|
||||
});
|
||||
const gdrive = await googleDriver.GoogleDriver({
|
||||
authConfig: {
|
||||
clientSecret: clientPromp["secret"],
|
||||
clientID: clientPromp["id"],
|
||||
redirectURL: "http://localhost:" + port,
|
||||
authUrlCallback(authUrl, callback) {
|
||||
server.once("request", function call(req, res) {
|
||||
const { searchParams } = new URL(String(req.url), "http://localhost:"+port);
|
||||
if (!searchParams.has("code")) {
|
||||
res.statusCode = 400;
|
||||
res.end("No code");
|
||||
server.once("request", call);
|
||||
return;
|
||||
}
|
||||
res.statusCode = 200;
|
||||
res.end(searchParams.get("code"));
|
||||
callback(searchParams.get("code"))
|
||||
});
|
||||
console.error("Please open the following URL in your browser:", authUrl);
|
||||
},
|
||||
tokenCallback(token) {
|
||||
clientToken = token;
|
||||
console.log("Google Drive token:", token);
|
||||
},
|
||||
}
|
||||
});
|
||||
server.close();
|
||||
let gIDs: string[];
|
||||
if (clientPromp["listFiles"]) {
|
||||
const folderID = clientPromp["folderID"]||undefined;
|
||||
const files = (await gdrive.listFiles(folderID)).filter(file => file.name.endsWith(".deb"));
|
||||
if (files.length <= 0) console.log("No files currently in you drive");
|
||||
else gIDs = (await inquirer.prompt({
|
||||
name: "ids",
|
||||
type: "checkbox",
|
||||
choices: files.map(file => ({name: file.name, value: file.id, checked: true}))
|
||||
})).ids;
|
||||
}
|
||||
|
||||
return {
|
||||
type: "googleDriver", componentName, enableUpload: clientPromp["enableUpload"],
|
||||
clientSecret: clientPromp["secret"],
|
||||
clientId: clientPromp["id"],
|
||||
clientToken,
|
||||
gIDs
|
||||
};
|
||||
} else if (srcType === "oracleBucket") {
|
||||
const ociPromps = await inquirer.prompt([
|
||||
{
|
||||
name: "namespace",
|
||||
type: "input",
|
||||
message: "OCI Bucket namespace:"
|
||||
},
|
||||
{
|
||||
name: "name",
|
||||
type: "input",
|
||||
message: "Bucket name:"
|
||||
},
|
||||
{
|
||||
name: "region",
|
||||
type: "list",
|
||||
message: "Select Bucket region:",
|
||||
choices: [
|
||||
"af-johannesburg-1",
|
||||
"ap-chuncheon-1",
|
||||
"ap-hyderabad-1",
|
||||
"ap-melbourne-1",
|
||||
"ap-mumbai-1",
|
||||
"ap-osaka-1",
|
||||
"ap-seoul-1",
|
||||
"ap-singapore-1",
|
||||
"ap-sydney-1",
|
||||
"ap-tokyo-1",
|
||||
"ca-montreal-1",
|
||||
"ca-toronto-1",
|
||||
"eu-amsterdam-1",
|
||||
"eu-frankfurt-1",
|
||||
"eu-madrid-1",
|
||||
"eu-marseille-1",
|
||||
"eu-milan-1",
|
||||
"eu-paris-1",
|
||||
"eu-stockholm-1",
|
||||
"eu-zurich-1",
|
||||
"il-jerusalem-1",
|
||||
"me-abudhabi-1",
|
||||
"me-jeddah-1",
|
||||
"mx-queretaro-1",
|
||||
"sa-santiago-1",
|
||||
"sa-saopaulo-1",
|
||||
"sa-vinhedo-1",
|
||||
"uk-cardiff-1",
|
||||
"uk-london-1",
|
||||
"us-ashburn-1",
|
||||
"us-chicago-1",
|
||||
"us-phoenix-1",
|
||||
"us-sanjose-1"
|
||||
]
|
||||
},
|
||||
{
|
||||
name: "authType",
|
||||
type: "list",
|
||||
choices: [
|
||||
{name: "OCI Cli config", value: "preAuthentication"},
|
||||
{name: "User", value: "user"},
|
||||
]
|
||||
},
|
||||
{
|
||||
when: (answers) => answers["authType"] !== "preAuthentication",
|
||||
name: "tenancy",
|
||||
type: "input"
|
||||
},
|
||||
{
|
||||
when: (answers) => answers["authType"] !== "preAuthentication",
|
||||
name: "user",
|
||||
type: "input"
|
||||
},
|
||||
{
|
||||
when: (answers) => answers["authType"] !== "preAuthentication",
|
||||
name: "fingerprint",
|
||||
type: "input"
|
||||
},
|
||||
{
|
||||
when: (answers) => answers["authType"] !== "preAuthentication",
|
||||
name: "privateKey",
|
||||
type: "input"
|
||||
},
|
||||
{
|
||||
when: (answers) => answers["authType"] !== "preAuthentication",
|
||||
name: "passphase",
|
||||
type: "confirm",
|
||||
message: "Private key require password to decrypt?"
|
||||
},
|
||||
{
|
||||
when: (answers) => answers["passphase"],
|
||||
name: "passphase",
|
||||
type: "password",
|
||||
mask: "*"
|
||||
},
|
||||
{
|
||||
type: "confirm",
|
||||
name: "enableUpload",
|
||||
message: "Enable support to upload files?",
|
||||
default: false,
|
||||
}
|
||||
]);
|
||||
const { namespace, name, region, enableUpload } = ociPromps;
|
||||
if (ociPromps["authType"] === "preAuthentication") return {
|
||||
type: "oracleBucket", componentName, enableUpload,
|
||||
authConfig: {
|
||||
namespace, name, region
|
||||
}
|
||||
};
|
||||
const { fingerprint, privateKey, tenancy, user, passphase } = ociPromps;
|
||||
return {
|
||||
type: "oracleBucket", componentName, enableUpload,
|
||||
authConfig: {
|
||||
namespace, name, region,
|
||||
auth: {
|
||||
fingerprint, privateKey, tenancy, user, passphase
|
||||
}
|
||||
}
|
||||
};
|
||||
} else if (srcType === "docker") {
|
||||
const basicConfig = await inquirer.prompt<{authConfirm: boolean, imageURI: string}>([
|
||||
{
|
||||
name: "imageURI",
|
||||
type: "input",
|
||||
message: "Image URI/URL:",
|
||||
validate(input) {
|
||||
try {
|
||||
new dockerRegistry.parseImage(input);
|
||||
return true;
|
||||
} catch (err) {
|
||||
return String(err?.message || err);
|
||||
}
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "authConfirm",
|
||||
type: "confirm",
|
||||
message: "This registry or image required authentication?",
|
||||
default: false
|
||||
}
|
||||
]);
|
||||
let auth: dockerRegistry.userAuth;
|
||||
let enableUpload: boolean = false;
|
||||
if (basicConfig.authConfirm) {
|
||||
const authPrompts = await inquirer.prompt([
|
||||
{
|
||||
name: "user",
|
||||
type: "input",
|
||||
message: "Username:",
|
||||
validate(input: string) {
|
||||
if (input.trim().length > 1) return true;
|
||||
return "Invalid username";
|
||||
}
|
||||
},
|
||||
{
|
||||
name: "pass",
|
||||
type: "password",
|
||||
mask: "*",
|
||||
message: "Password or Token:"
|
||||
},
|
||||
{
|
||||
name: "enableUpload",
|
||||
type: "confirm",
|
||||
message: "Allow publish packages in Docker registry?"
|
||||
}
|
||||
]);
|
||||
enableUpload = authPrompts["enableUpload"];
|
||||
auth = {
|
||||
username: authPrompts.user,
|
||||
password: authPrompts.pass
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
type: "docker", componentName, enableUpload,
|
||||
image: basicConfig.imageURI,
|
||||
auth
|
||||
};
|
||||
}
|
||||
|
||||
console.log("Invalid select type!");
|
||||
return createSource();
|
||||
}
|
454
src/index.ts
454
src/index.ts
@ -1,445 +1,17 @@
|
||||
#!/usr/bin/env node
|
||||
import "./log.js";
|
||||
import path from "node:path";
|
||||
import yargs from "yargs";
|
||||
import crypto from "node:crypto";
|
||||
import cluster from "node:cluster";
|
||||
import packages from "./packages.js";
|
||||
import express from "express";
|
||||
import expressRate from "express-rate-limit";
|
||||
import streamPromise from "node:stream/promises";
|
||||
import configManeger from "./configManeger.js";
|
||||
import * as Debian from "@sirherobrine23/dpkg";
|
||||
import oldFs, { createReadStream, promises as fs } from "node:fs";
|
||||
import { aptStreamConfig } from "./config.js";
|
||||
import { dockerRegistry } from "@sirherobrine23/docker-registry";
|
||||
import { extendsFS } from "@sirherobrine23/extends";
|
||||
import { dpkg } from "@sirherobrine23/dpkg";
|
||||
import { Config, Connect, Source, generateGPG } from "./config.js";
|
||||
import { createRoute } from "./server.js";
|
||||
// import yargs from "yargs";
|
||||
// const terminalSize = typeof process.stdout.getWindowSize === "function" ? process.stdout.getWindowSize()[0] : null;
|
||||
// yargs(process.argv.slice(2)).wrap(terminalSize).version(false).help(true).alias("h", "help").strictOptions();
|
||||
|
||||
// Set yargs config
|
||||
const terminalSize = typeof process.stdout.getWindowSize === "function" ? process.stdout.getWindowSize()[0] : null;
|
||||
yargs(process.argv.slice(2)).wrap(terminalSize).version(false).help(true).alias("h", "help").strictCommands().demandCommand()
|
||||
const initialConfig = new Config();
|
||||
const gpg = await generateGPG();
|
||||
initialConfig.publicGPG = gpg.publicKey;
|
||||
initialConfig.privateGPG = gpg.privateKey;
|
||||
initialConfig.set("google", new Source());
|
||||
|
||||
// Edit/print configs interactive mode
|
||||
.command(["config", "maneger", "$0"], "Maneger config", yargs => yargs.option("config", {
|
||||
string: true,
|
||||
alias: "c",
|
||||
type: "string",
|
||||
description: "Config file path",
|
||||
default: "aptStream.yml",
|
||||
}).option("print", {
|
||||
description: "print config in stdout and select targets to print. Default is yaml",
|
||||
alias: "p",
|
||||
array: false,
|
||||
string: true,
|
||||
choices: [
|
||||
"", // if set only "--print"
|
||||
"yaml", "yml", "json", // without encode
|
||||
"yaml64", "yml64", "json64", // Encode in base64
|
||||
"yamlhex", "ymlhex", "jsonhex", // encode in hexadecimal (hex)
|
||||
],
|
||||
}), async options => {
|
||||
if (options.print !== undefined) {
|
||||
let out = String(options.print);
|
||||
if (typeof options.print === "boolean"||options.print === "") out = "yaml";
|
||||
const config = new aptStreamConfig(options.config);
|
||||
const target = out.startsWith("json") ? "json" : "yaml", encode = out.endsWith("64") ? "base64" : out.endsWith("hex") ? "hex" : "utf8";
|
||||
return console.log((config.toString(encode, target)));
|
||||
}
|
||||
if (!process.stdin.isTTY) throw new Error("Run with TTY to maneger config!");
|
||||
return configManeger(options.config);
|
||||
})
|
||||
const db = await Connect(initialConfig);
|
||||
const app = await createRoute(db);
|
||||
|
||||
// Sync repository packages
|
||||
.command(["sync", "synchronize"], "Sync packges directly from CLI", yargs => yargs.option("config", {
|
||||
string: true,
|
||||
alias: "c",
|
||||
type: "string",
|
||||
description: "Config file path",
|
||||
default: "aptStream.yml",
|
||||
}).option("verbose", {
|
||||
type: "boolean",
|
||||
boolean: true,
|
||||
description: "Enable verbose errors",
|
||||
default: false,
|
||||
alias: ["v", "vv", "dd"]
|
||||
}), async options => {
|
||||
console.log("Starting...");
|
||||
const packageManeger = await packages(options.config);
|
||||
let i = 0;
|
||||
await packageManeger.syncRepositorys((err, db) => {
|
||||
process.stdout.moveCursor(0, -1);
|
||||
console.log("Packages loaded %f", i++);
|
||||
if (!!err) {
|
||||
if (options.verbose) return console.error(err);
|
||||
return console.error(err.message || err);
|
||||
}
|
||||
console.log("Added %s: %s/%s (%s)", db.repositoryID, db.controlFile.Package, db.controlFile.Architecture, db.controlFile.Version);
|
||||
});
|
||||
console.log("End!");
|
||||
return packageManeger.close();
|
||||
})
|
||||
|
||||
// Pack debian package
|
||||
.command(["pack", "pack-deb", "create", "c"], "Create package", yargs => yargs.option("package-path", {
|
||||
type: "string",
|
||||
string: true,
|
||||
alias: "s",
|
||||
default: process.cwd(),
|
||||
description: "Debian package source",
|
||||
}).option("output", {
|
||||
type: "string",
|
||||
string: true,
|
||||
alias: "o",
|
||||
}).option("compress", {
|
||||
type: "string",
|
||||
string: true,
|
||||
alias: [
|
||||
"data-compress",
|
||||
"c"
|
||||
],
|
||||
description: "data.tar compress file",
|
||||
default: "gzip",
|
||||
choices: [
|
||||
"passThrough",
|
||||
"gzip",
|
||||
"zst",
|
||||
"xz",
|
||||
]
|
||||
}).option("control-compress", {
|
||||
type: "string",
|
||||
string: true,
|
||||
description: "control.tar compress file",
|
||||
alias: [
|
||||
"d"
|
||||
],
|
||||
default: "gzip",
|
||||
choices: [
|
||||
"gzip",
|
||||
"passThrough",
|
||||
"xz"
|
||||
]
|
||||
}), async options => {
|
||||
let debianConfig: string;
|
||||
if (!(await extendsFS.exists(debianConfig = path.resolve(process.cwd(), options.packagePath, "DEBIAN"))||await extendsFS.exists(debianConfig = path.resolve(process.cwd(), options.packagePath, "debian")))) throw new Error("Create valid package Structure!");
|
||||
if (!(await extendsFS.exists(path.join(debianConfig, "control")))) throw new Error("Require control file");
|
||||
const control = dpkg.parseControl(await fs.readFile(path.join(debianConfig, "control")));
|
||||
if (!options.output) options.output = path.join(process.cwd(), `${control.Package}_${control.Architecture}_${control.Version}.deb`); else options.output = path.resolve(process.cwd(), options.output);
|
||||
const scriptsFile = (await fs.readdir(debianConfig)).filter(file => (["preinst", "prerm", "postinst", "postrm"]).includes(file));
|
||||
|
||||
console.log("Creating debian package");
|
||||
await streamPromise.finished(dpkg.createPackage({
|
||||
control,
|
||||
dataFolder: path.resolve(debianConfig, ".."),
|
||||
compress: {
|
||||
data: options.compress as any||"gzip",
|
||||
control: options.controlCompress as any||"gzip",
|
||||
},
|
||||
scripts: scriptsFile.reduce<dpkg.packageConfig["scripts"]>((acc, file) => {acc[file] = path.join(debianConfig, file); return acc;}, {})
|
||||
}).pipe(oldFs.createWriteStream(options.output)));
|
||||
console.log("File saved %O", options.output);
|
||||
})
|
||||
|
||||
// Upload to registry
|
||||
.command(["upload", "u"], "Upload package to repoitory allow uploads", yargs => yargs.strictCommands(false).option("config", {
|
||||
string: true,
|
||||
alias: "c",
|
||||
type: "string",
|
||||
description: "Config file path",
|
||||
default: "aptStream.yml",
|
||||
}).option("repositoryID", {
|
||||
type: "string",
|
||||
string: true,
|
||||
alias: ["repoID", "id", "i"],
|
||||
demandOption: true,
|
||||
description: "Repository to upload files"
|
||||
}).option("tag", {
|
||||
type: "string",
|
||||
string: true,
|
||||
description: "Docker/Github release tag name",
|
||||
alias: ["dockerTAG", "ociTAG", "oci_tag", "release_tag"]
|
||||
}), async options => {
|
||||
const files = options._.slice(1).map((file: string) => path.resolve(process.cwd(), file));
|
||||
if (!files.length) throw new Error("Required one file to Upload");
|
||||
const config = new aptStreamConfig(options.config);
|
||||
if (!(config.getRepository(options.repositoryID).get(options.repositoryID)).enableUpload) throw new Error("Repository not support upload file!");
|
||||
const up = await config.getRepository(options.repositoryID).uploadFile(options.repositoryID);
|
||||
if (up.githubUpload) {
|
||||
for (const filePath of files) {
|
||||
if (!(await extendsFS.exists(filePath))) {console.error("%O not exsists!"); continue;}
|
||||
const stats = await fs.lstat(filePath);
|
||||
const filename = path.basename(filePath);
|
||||
await streamPromise.finished(createReadStream(filePath).pipe(await up.githubUpload(filename, stats.size, options.tag)));
|
||||
}
|
||||
} else if (up.gdriveUpload) {
|
||||
for (const filePath of files) {
|
||||
if (!(await extendsFS.exists(filePath))) {console.error("%O not exsists!"); continue;}
|
||||
const filename = path.basename(filePath);
|
||||
await streamPromise.finished(createReadStream(filePath).pipe(await up.gdriveUpload(filename)));
|
||||
}
|
||||
} else if (up.ociUpload) {
|
||||
for (const filePath of files) {
|
||||
if (!(await extendsFS.exists(filePath))) {console.error("%O not exsists!"); continue;}
|
||||
const filename = path.basename(filePath);
|
||||
await streamPromise.finished(createReadStream(filePath).pipe(await up.ociUpload(filename)));
|
||||
}
|
||||
} else if (up.dockerUpload) {
|
||||
for (const filePath of files) {
|
||||
if (!(await extendsFS.exists(filePath))) {console.error("%O not exsists!"); continue;}
|
||||
const { controlFile } = await dpkg.parsePackage(createReadStream(filePath));
|
||||
const filename = path.basename(filePath);
|
||||
const tr = await up.dockerUpload(dockerRegistry.debianArchToDockerPlatform(controlFile.Architecture));
|
||||
tr.annotations.set("org.opencontainers.image.description", controlFile.Description);
|
||||
tr.annotations.set("org.opencontainers.image.version", controlFile.Version);
|
||||
tr.annotations.set("org.sirherobrine23.aptstream.control", JSON.stringify(controlFile));
|
||||
tr.annotations.set("com.github.package.type", "aptstream_package");
|
||||
await streamPromise.finished(createReadStream(filePath).pipe(tr.addEntry({
|
||||
name: filename,
|
||||
type: "file",
|
||||
size: (await fs.lstat(filePath)).size
|
||||
})));
|
||||
const img_info = await tr.finalize(options.tag||controlFile.Version);
|
||||
console.log("Image digest: %O", img_info.digest);
|
||||
}
|
||||
}
|
||||
await config.saveConfig().catch(() => {});
|
||||
})
|
||||
|
||||
// APT Server
|
||||
.command(["server", "serve", "s"], "Run http Server", yargs => yargs.option("config", {
|
||||
string: true,
|
||||
alias: "c",
|
||||
type: "string",
|
||||
description: "Config file path",
|
||||
default: "aptStream.yml"
|
||||
}).option("port", {
|
||||
number: true,
|
||||
alias: "p",
|
||||
type: "number",
|
||||
description: "Alternative port to Run http server"
|
||||
}).option("cluster", {
|
||||
number: true,
|
||||
type: "number",
|
||||
description: "Enable cluster mode for perfomace",
|
||||
alias: "t"
|
||||
}).option("data", {
|
||||
string: true,
|
||||
alias: "C",
|
||||
type: "string",
|
||||
description: "data files"
|
||||
}).option("db", {
|
||||
string: true,
|
||||
type: "string",
|
||||
alias: "d",
|
||||
description: "database url"
|
||||
}).option("auto-sync", {
|
||||
type: "boolean",
|
||||
boolean: true,
|
||||
alias: "z",
|
||||
default: false,
|
||||
description: "Enable backgroud sync packages"
|
||||
}).option("disable-release-compress", {
|
||||
type: "boolean",
|
||||
boolean: true,
|
||||
default: false,
|
||||
description: "Disable Release generate Packages.gz and Packages.gz to calculate hash",
|
||||
alias: "L"
|
||||
}), async options => {
|
||||
let packageManegerInit = new aptStreamConfig(options.config);
|
||||
if (!!options.data) packageManegerInit.setDataStorage(options.data);
|
||||
if (!!options.port) packageManegerInit.setPortListen(options.port);
|
||||
if (!!options.db) packageManegerInit.setDatabse(options.db);
|
||||
if (!!options["disable-release-compress"]) packageManegerInit.setCompressRelease("gzip", false).setCompressRelease("xz", false);
|
||||
if (!!options.cluster && options.cluster > 0) packageManegerInit.setClusterForks(options.cluster);
|
||||
const packageManeger = await packages(packageManegerInit);
|
||||
let forks = packageManeger.getClusterForks();
|
||||
if (cluster.isPrimary) {
|
||||
if (!!(options.autoSync ?? options["auto-sync"])) (async () => {
|
||||
while (true) {
|
||||
console.info("Initing package sync!");
|
||||
await packageManeger.syncRepositorys((_err, db) => {
|
||||
if (!db) return;
|
||||
const {repositoryID, controlFile: { Package, Architecture, Version }} = db;
|
||||
console.log("Sync/Add: %s -> %s %s/%s (%s)", repositoryID, Package, Architecture, Version)
|
||||
});
|
||||
console.log("Next sync after 30 Minutes");
|
||||
await new Promise(done => setTimeout(done, 1800000));
|
||||
}
|
||||
})().catch(err => {
|
||||
console.info("Auto sync packages disabled!");
|
||||
console.error(err);
|
||||
});
|
||||
if (forks > 0) {
|
||||
const forkProcess = async (count = 0): Promise<number> => new Promise((done, reject) => {
|
||||
const fk = cluster.fork();
|
||||
return fk.on("error", err => {
|
||||
console.error(err);
|
||||
return reject(err);
|
||||
}).on("online", () => done(fk.id)).once("exit", (code, signal) => {
|
||||
count++;
|
||||
if (!signal && code === 0) return console.info("Cluster %s: exited and not restarting", fk.id);
|
||||
else if (count > 5) return console.warn("Cluster get max count retrys!");
|
||||
console.info("Cluster %s: Catch %O, and restating with this is restating count %f", fk.id, code||signal, count);
|
||||
return forkProcess(count);
|
||||
});
|
||||
});
|
||||
for (let i = 0; i < forks; i++) await forkProcess().then(id => console.info("Cluster %s is online", id));
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Serve
|
||||
const app = express();
|
||||
app.disable("x-powered-by").disable("etag");
|
||||
app.use(express.json(), (_req, res, next) => {
|
||||
res.setHeader("cluster-id", String(cluster.isPrimary ? 1 : cluster.worker.id));
|
||||
res.json = (body) => res.setHeader("Content-Type", "application/json").send(JSON.stringify(body, null, 2)); return next();
|
||||
});
|
||||
|
||||
// Serve info
|
||||
app.get("/", async ({res}) => {
|
||||
return res.json({
|
||||
cluster: cluster.worker?.id ?? 1,
|
||||
sourcesCount: packageManeger.getRepositorys().length,
|
||||
packagesRegistred: await packageManeger.packagesCount(),
|
||||
db: packageManeger.getClientInfo(),
|
||||
});
|
||||
});
|
||||
|
||||
// Public key
|
||||
app.get("/public(_key|)(|.gpg|.dearmor)", async (req, res) => res.setHeader("Content-Type", req.path.endsWith(".dearmor") ? "octect/stream" : "text/plain").send(await packageManeger.getPublicKey(req.path.endsWith(".dearmor") ? "dearmor" : "armor")));
|
||||
|
||||
// Get dists
|
||||
app.get("/dists", async ({res}) => res.json(Array.from(new Set(packageManeger.getRepositorys().map(d => d.repositoryName)))));
|
||||
app.get("/dists/:distName/info", async (req, res) => res.json(await packageManeger.repoInfo(req.params.distName)));
|
||||
app.get("/dists/(:distName)(|/InRelease|/Release(.gpg)?)?", async (req, res) => {
|
||||
const lowerPath = req.path.toLowerCase(), aptRoot = path.posix.resolve("/", path.posix.join(req.baseUrl, req.path), "../../../..");
|
||||
let Release = await packageManeger.createRelease(req.params["distName"], aptRoot);
|
||||
let releaseText: string;
|
||||
if (lowerPath.endsWith("inrelease")||lowerPath.endsWith("release.gpg")) releaseText = await Release.inRelease(req.path.endsWith(".gpg") ? "clearMessage" : "sign");
|
||||
else if (lowerPath.endsWith("release")) releaseText = Release.toString();
|
||||
else return res.json(Release.toJSON());
|
||||
return res.status(200).setHeader("Content-Type", "text/plain").setHeader("Content-Length", String(Buffer.byteLength(releaseText))).send(releaseText);
|
||||
});
|
||||
|
||||
app.get("/dists/:distName/:componentName/binary-:Arch/Packages(.(gz|xz))?", async (req, res) => {
|
||||
const { distName, componentName, Arch } = req.params;
|
||||
const reqPath = req.path;
|
||||
return packageManeger.createPackage(distName, componentName, Arch, path.posix.resolve("/", path.posix.join(req.baseUrl, req.path), "../../../../../.."), {
|
||||
compress: reqPath.endsWith(".gz") ? "gz" : reqPath.endsWith(".xz") ? "xz" : undefined,
|
||||
callback: (str) => str.pipe(res.writeHead(200, {}))
|
||||
});
|
||||
});
|
||||
|
||||
// Send package hashs
|
||||
app.get("/pool", async ({res}) => res.json(await packageManeger.getPackagesHash()));
|
||||
|
||||
app.get("/pool/(:hash)(|/data.tar|.deb)", async (req, res) => {
|
||||
const packageID = (await packageManeger.pkgQuery({"controlFile.MD5sum": req.params.hash})).at(0);
|
||||
if (!packageID) return res.status(404).json({error: "Package not exist"});
|
||||
if (req.path.endsWith("/data.tar")||req.path.endsWith(".deb")) {
|
||||
const str = await packageManeger.getPackageStream(packageID);
|
||||
if (req.path.endsWith(".deb")) return str.pipe(res.writeHead(200, {}));
|
||||
return (await Debian.getPackageData(str)).pipe(res.writeHead(200, {}));
|
||||
}
|
||||
return res.json({...packageID.controlFile, Filename: undefined});
|
||||
});
|
||||
|
||||
// Upload file
|
||||
const uploadIDs = new Map<string, {createAt: Date, deleteAt: Date, uploading: boolean, repositoryID: string, filename: string}>();
|
||||
const uploadRoute = express.Router();
|
||||
app.use("/upload", uploadRoute);
|
||||
uploadRoute.get("/", ({res}) => res.json({available: true}));
|
||||
uploadRoute.use(expressRate({
|
||||
skipSuccessfulRequests: true,
|
||||
windowMs: 1000 * 60 * 40,
|
||||
max: 1000,
|
||||
})).post("/", async ({body, headers: { authorization }}, res) => {
|
||||
if (!authorization) return res.status(401).json({error: "Require authorization/Authorization header"});
|
||||
else if (!(authorization.startsWith("Bearer "))) return res.status(401).json({error: "Invalid authorization schema"});
|
||||
else if (!(await packageManeger.userAs(authorization.replace("Bearer", "").trim()))) return res.status(401).json({error: "Invalid token!"});
|
||||
|
||||
if (!body) return res.status(400).json({error: "Required JSON or YAML to set up upload"});
|
||||
const { repositoryID, control } = body as {repositoryID: string, control: Debian.debianControl};
|
||||
if (!repositoryID) return res.status(400).json({error: "Required repository ID"});
|
||||
if (!control) return res.status(400).json({error: "Required debian control JSON"});
|
||||
const repo = packageManeger.getRepository(repositoryID).get(repositoryID);
|
||||
if (!repo.enableUpload) return res.status(401).json({message: "This repository not support upload or not setup to Upload files!"});
|
||||
let reqID: string;
|
||||
while (true) if (!(uploadIDs.has(reqID = crypto.randomBytes(12).toString("hex")))) break;
|
||||
const { Package: packageName, Architecture, Version } = control;
|
||||
const createAt = new Date(), deleteAt = new Date(createAt.getTime() + (1000 * 60 * 5));
|
||||
setTimeout(() => {if (uploadIDs.has(reqID)) uploadIDs.delete(reqID);}, createAt.getTime() - deleteAt.getTime())
|
||||
uploadIDs.set(reqID, {
|
||||
createAt, deleteAt,
|
||||
repositoryID,
|
||||
uploading: false,
|
||||
filename: `${packageName}_${Architecture}_${Version}.deb`,
|
||||
});
|
||||
return res.status(201).json({
|
||||
repositoryType: repo.type,
|
||||
uploadID: reqID,
|
||||
config: uploadIDs.get(reqID),
|
||||
});
|
||||
}).put("/:uploadID", async (req, res) => {
|
||||
if (!(uploadIDs.has(req.params.uploadID))) return res.status(401).json({error: "Create uploadID fist!"});
|
||||
if (uploadIDs.get(req.params.uploadID).uploading) return res.status(401).json({error: "Create new uploadID, this in use"});
|
||||
else if (!(req.headers["content-type"].includes("application/octet-stream"))) return res.status(400).json({error: "Send octet stream file"});
|
||||
else if (!(req.headers["content-length"])) return res.status(422).json({error: "Required file size"});
|
||||
else if (Number(req.headers["content-length"]) < 10) return res.status(422).json({error: "The file too small!"});
|
||||
uploadIDs.get(req.params.uploadID).uploading = true;
|
||||
let { repositoryID, filename } = uploadIDs.get(req.params.uploadID);
|
||||
|
||||
try {
|
||||
const up = await packageManeger.getRepository(repositoryID).uploadFile(repositoryID);
|
||||
const tagName = (Array.isArray(req.query.tagName) ? req.query.tagName.at(0).toString() : req.query.tagName.toString());
|
||||
if (up.githubUpload) {
|
||||
if (!tagName) res.setHeader("warning", "Using latest github release tag!");
|
||||
await streamPromise.finished(req.pipe(await up.githubUpload(filename, Number(req.headers["content-length"]), tagName)));
|
||||
return res.status(201).json({
|
||||
type: "Github release"
|
||||
});
|
||||
} else if (up.gdriveUpload) {
|
||||
const id = (Array.isArray(req.query.id) ? req.query.id.at(0).toString() : req.query.id.toString());
|
||||
await streamPromise.finished(req.pipe(await up.gdriveUpload(filename, id)));
|
||||
return res.status(201).json({
|
||||
type: "Google driver"
|
||||
});
|
||||
} else if (up.ociUpload) {
|
||||
if (typeof req.query.path === "string") filename = path.posix.resolve("/", req.query.path, filename);
|
||||
await streamPromise.finished(req.pipe(await up.ociUpload(filename)));
|
||||
return res.status(201).json({
|
||||
type: "Oracle cloud bucket",
|
||||
filename
|
||||
});
|
||||
} else if (up.dockerUpload) {
|
||||
const tar = await up.dockerUpload({
|
||||
os: "linux",
|
||||
architecture: req.query.arch||"generic" as any,
|
||||
});
|
||||
await streamPromise.finished(req.pipe(tar.addEntry({name: filename, size: Number(req.headers["content-length"])})));
|
||||
return res.status(201).json({
|
||||
type: "Oracle cloud bucket",
|
||||
image: await tar.finalize(tagName),
|
||||
});
|
||||
}
|
||||
return res.status(502).json({
|
||||
message: "Sorry, our error was caught"
|
||||
});
|
||||
} finally {
|
||||
uploadIDs.delete(req.params.uploadID);
|
||||
}
|
||||
});
|
||||
|
||||
app.all("*", ({res}) => res.status(404).json({message: "Page not exists"}));
|
||||
app.use((err, _req, res, _next) => {
|
||||
console.error(err);
|
||||
return res.status(400).json({error: err?.message || String(err)});
|
||||
}).listen(packageManeger.getPortListen(), function () {
|
||||
const address = this.address();
|
||||
console.log("Port Listen on %O", typeof address === "object" ? address.port : address);
|
||||
});
|
||||
}).parseAsync().catch(err => {
|
||||
console.error(err);
|
||||
process.exit(-1);
|
||||
});
|
||||
app.listen(3000, () => console.log("Listen on 3000"));
|
38
src/localFile.ts
Normal file
38
src/localFile.ts
Normal file
@ -0,0 +1,38 @@
|
||||
import fs from "node:fs/promises";
|
||||
import { createWriteStream, createReadStream } from "node:fs";
|
||||
import { Connection } from "./config.js";
|
||||
import { debianControl, dpkg } from "@sirherobrine23/dpkg";
|
||||
import path from "node:path";
|
||||
import { extendsFS } from "@sirherobrine23/extends";
|
||||
import { finished } from "node:stream/promises";
|
||||
import { compressStream } from "@sirherobrine23/decompress";
|
||||
|
||||
export async function createPackage(db: Connection, repository: string) {
|
||||
const repo = db.repoConfig.get(repository);
|
||||
const packageArray = await db.packageCollection.find({ $and: Array.from(repo.keys()).map(i => ({ repositorys: [i] })) }).toArray();
|
||||
const cc = packageArray.reduce<{ [k: string]: { [c: string]: debianControl[] } }>((acc, info) => {
|
||||
info.repositorys.filter(info => info.repository === repository).forEach(repoID => {
|
||||
acc[repo.get(repoID.origim).componentName] ??= {};
|
||||
acc[repo.get(repoID.origim).componentName][info.control.Architecture] ??= [];
|
||||
acc[repo.get(repoID.origim).componentName][info.control.Architecture].push(info.control);
|
||||
});
|
||||
return acc;
|
||||
}, {});
|
||||
const repositoryRoot = path.join(db.repoConfig.tmpFolder, "dists", repository);
|
||||
for (const componentName in cc) {
|
||||
for (const arch in cc[componentName]) {
|
||||
if (!(await extendsFS.exists(path.join(repositoryRoot, componentName, arch)))) await fs.mkdir(path.join(repositoryRoot, componentName, arch), { recursive: true });
|
||||
const file = path.join(repositoryRoot, componentName, arch, "packages");
|
||||
const wr = createWriteStream(file);
|
||||
for (const index in cc[componentName][arch]) {
|
||||
const control = cc[componentName][arch][index];
|
||||
if (Number(index) > 0) wr.write(Buffer.from("\n"));
|
||||
await new Promise<void>((done, reject) => wr.write(dpkg.createControl(control), err => err ? reject(err) : done()));
|
||||
}
|
||||
wr.close();
|
||||
await finished(wr);
|
||||
await finished(createReadStream(file).pipe(compressStream("gzip")).pipe(createWriteStream(file + ".gz")));
|
||||
await finished(createReadStream(file).pipe(compressStream("xz")).pipe(createWriteStream(file + ".xz")));
|
||||
}
|
||||
}
|
||||
}
|
43
src/log.ts
43
src/log.ts
@ -1,43 +0,0 @@
|
||||
import { formatWithOptions, InspectOptions } from "node:util";
|
||||
import cluster from "node:cluster";
|
||||
import expressLayer from "express/lib/router/layer.js";
|
||||
|
||||
// Patch promise handler to express 4.x
|
||||
expressLayer.prototype.handle_request = async function handle_request_promised(...args) {
|
||||
var fn = this.handle;
|
||||
if (fn.length > 3) return args.at(-1)();
|
||||
await Promise.resolve().then(() => fn.call(this, ...args)).catch(args.at(-1));
|
||||
}
|
||||
|
||||
// Set default custom log to Cluster workers
|
||||
if (cluster.isWorker) {
|
||||
const { log, error, debug, info, warn } = console;
|
||||
const { id } = cluster.worker ?? {};
|
||||
const defaultOptions: InspectOptions = {
|
||||
colors: true,
|
||||
showHidden: false,
|
||||
depth: null
|
||||
};
|
||||
|
||||
console.clear = console.clear ?? function () {console.warn("cannot clear tty");}
|
||||
|
||||
console.log = function(...args) {
|
||||
log("[LOG%s]: %s", id ? ` Cluster ${id}` : "", formatWithOptions(defaultOptions, ...args));
|
||||
}
|
||||
|
||||
console.error = function(...args) {
|
||||
error("[ERROR%s]: %s", id ? ` Cluster ${id}` : "", formatWithOptions(defaultOptions, ...args));
|
||||
}
|
||||
|
||||
console.debug = function(...args) {
|
||||
debug("[DEBUG%s]: %s", id ? ` Cluster ${id}` : "", formatWithOptions(defaultOptions, ...args));
|
||||
}
|
||||
|
||||
console.info = function(...args) {
|
||||
info("[INFO%s]: %s", id ? ` Cluster ${id}` : "", formatWithOptions(defaultOptions, ...args));
|
||||
}
|
||||
|
||||
console.warn = function(...args) {
|
||||
warn("[WARNING%s]: %s", id ? ` Cluster ${id}` : "", formatWithOptions(defaultOptions, ...args));
|
||||
}
|
||||
}
|
462
src/packages.ts
462
src/packages.ts
@ -1,462 +0,0 @@
|
||||
import { aptStreamConfig, configJSON, repositorySource } from "./config.js";
|
||||
import { decompressStream, compressStream } from "@sirherobrine23/decompress";
|
||||
import { googleDriver, oracleBucket } from "@sirherobrine23/cloud";
|
||||
import { extendsCrypto, extendsFS } from "@sirherobrine23/extends";
|
||||
import { apt, dpkg } from "@sirherobrine23/dpkg";
|
||||
import { tmpdir } from "node:os";
|
||||
import { format } from "node:util";
|
||||
import oldFs, { promises as fs } from "node:fs";
|
||||
import coreHTTP, { Github } from "@sirherobrine23/http";
|
||||
import streamPromise, { finished } from "node:stream/promises";
|
||||
import dockerRegistry from "@sirherobrine23/docker-registry";
|
||||
import mongoDB from "mongodb";
|
||||
import openpgp from "openpgp";
|
||||
import stream from "node:stream";
|
||||
import crypto from "node:crypto";
|
||||
import path from "node:path";
|
||||
|
||||
export interface dbStorage {
|
||||
repositoryID: string;
|
||||
restoreFile: any;
|
||||
controlFile: dpkg.debianControl;
|
||||
}
|
||||
|
||||
export interface userAuth {
|
||||
createAt: Date;
|
||||
username: string;
|
||||
token: string[];
|
||||
}
|
||||
|
||||
export default async function main(initConfig: string|configJSON|aptStreamConfig) {
|
||||
return new Promise<packageManeger>((done, reject) => {
|
||||
const pkg = new packageManeger(initConfig, (err) => {
|
||||
if (err) return reject(err);
|
||||
return done(pkg);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export class packageManeger extends aptStreamConfig {
|
||||
#client: mongoDB.MongoClient;
|
||||
#collection: mongoDB.Collection<dbStorage>;
|
||||
#authCollection: mongoDB.Collection<userAuth>;
|
||||
async close() {this.#client.close()}
|
||||
constructor(initConfig: string|configJSON|aptStreamConfig, connectionCallback?: (err?: any) => void) {
|
||||
connectionCallback ||= (err) => {if(err) process.emit("warning", err);}
|
||||
super(initConfig);
|
||||
(async () => {
|
||||
const database = this.getDatabase();
|
||||
const mongoClient = this.#client = await (new mongoDB.MongoClient(database.url)).connect();
|
||||
mongoClient.on("error", err => console.error(err));
|
||||
this.#authCollection = mongoClient.db(database.databaseName || "aptStream").collection<userAuth>("auth");
|
||||
this.#collection = mongoClient.db(database.databaseName || "aptStream").collection<dbStorage>("packages");
|
||||
})().then(() => connectionCallback(), err => connectionCallback(err));
|
||||
}
|
||||
|
||||
getClientInfo() {
|
||||
const connection = this.#client["topology"];
|
||||
return {
|
||||
connections: {
|
||||
max: Number(connection.s.options.maxConnecting),
|
||||
current: Number(connection.client.s.activeSessions?.size),
|
||||
}
|
||||
}
|
||||
}
|
||||
async createToken(username: string) {
|
||||
let token: string;
|
||||
while (true) {
|
||||
token = crypto.randomBytes(8).toString("hex");
|
||||
if (!(await this.#authCollection.findOne({token}))) break;
|
||||
}
|
||||
if (!(await this.#authCollection.findOne({username}))) await this.#authCollection.insertOne({username, createAt: new Date(), token: []});
|
||||
await this.#authCollection.findOneAndUpdate({username}, {$inc: {token: token as never}});
|
||||
return token;
|
||||
}
|
||||
|
||||
async userAs(token: string) {
|
||||
return !!(await this.#authCollection.findOne({token: [String(token)]}));
|
||||
}
|
||||
|
||||
async pkgQuery(query: mongoDB.Filter<dbStorage>) {
|
||||
return this.#collection.find(query).toArray();
|
||||
}
|
||||
|
||||
async packagesCount() {
|
||||
return (await this.#collection.stats()).count;
|
||||
}
|
||||
|
||||
async getPackagesHash() {
|
||||
return this.#collection.distinct("controlFile.MD5sum");
|
||||
}
|
||||
|
||||
async repoInfo(repositoryName: string) {
|
||||
const repositorys = this.getRepository(repositoryName).getAllRepositorys();
|
||||
if (!repositorys.length) throw new Error("Repository or Component name not exists!");
|
||||
return {
|
||||
packagesCount: (await Promise.all(repositorys.map(async ({repositoryID}) => this.#collection.countDocuments({repositoryID})))).reduce((acc, count) => acc+count, 0),
|
||||
sources: repositorys.length,
|
||||
};
|
||||
}
|
||||
|
||||
async createPackage(repositoryName: string, componentName: string, Arch: string, appRoot: string = "", options?: {compress?: "gz"|"xz", callback: (str: stream.Readable) => void}): Promise<{filePath: string; fileSize: number; sha512: string; sha256: string; sha1: string; md5: string;}[]> {
|
||||
const repositorys = this.getRepository(repositoryName).getAllRepositorys().filter(pkg => pkg.componentName === componentName);
|
||||
if (!repositorys.length) throw new Error("Repository or Component name not exists!");
|
||||
|
||||
const str = new stream.Readable({autoDestroy: true, emitClose: true, read(_s){}});
|
||||
const gg: (Promise<{filePath: string; fileSize: number; sha512: string; sha256: string; sha1: string; md5: string;}>)[] = [];
|
||||
if (typeof options?.callback === "function") (async () => options.callback(str.pipe(compressStream(options.compress === "gz" ? "gzip" : options.compress === "xz" ? "xz" : "passThrough"))))().catch(err => str.emit("error", err));
|
||||
else {
|
||||
async function getHash(compress?: "gz"|"xz") {
|
||||
const com = stream.Readable.from(str.pipe(compressStream(compress === "gz" ? "gzip" : compress === "xz" ? "xz" : "passThrough")));
|
||||
return extendsCrypto.createHashAsync(com).then(({hash, byteLength}) => ({
|
||||
filePath: path.posix.join(componentName, "binary-"+Arch, "Packages"+(compress === "gz" ? ".gz" : compress === "xz" ? ".xz" : "")),
|
||||
fileSize: byteLength,
|
||||
sha512: hash.sha512,
|
||||
sha256: hash.sha256,
|
||||
sha1: hash.sha1,
|
||||
md5: hash.md5,
|
||||
}));
|
||||
}
|
||||
gg.push(getHash());
|
||||
if (this.getCompressRelease("gzip")) gg.push(getHash("gz"));
|
||||
if (this.getCompressRelease("xz")) gg.push(getHash("xz"));
|
||||
}
|
||||
(async () => {
|
||||
let breakLine = false;
|
||||
for (const repo of repositorys) {
|
||||
let pkgs: mongoDB.WithId<dbStorage>[], page = 0;
|
||||
while ((pkgs = await this.#collection.find({repositoryID: repo.repositoryID, "controlFile.Architecture": Arch}).skip(page).limit(2500).toArray()).length > 0) {
|
||||
page += pkgs.length;
|
||||
for (const {controlFile: pkg} of pkgs) {
|
||||
let pkgHash: string;
|
||||
if (!(pkgHash = pkg.MD5sum)) continue;
|
||||
if (breakLine) str.push("\n\n"); else breakLine = true;
|
||||
str.push(dpkg.createControl({
|
||||
...pkg,
|
||||
Filename: path.posix.join("/", appRoot, "pool", `${pkgHash}.deb`).slice(1),
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
str.push(null);
|
||||
})().catch(err => str.emit("error", err));
|
||||
return Promise.all(gg);
|
||||
}
|
||||
|
||||
async createRelease(repositoryName: string, appRoot: string) {
|
||||
const source = this.getRepository(repositoryName);
|
||||
const repositorys = source.getAllRepositorys();
|
||||
const releaseDate = (new Date()).toUTCString();
|
||||
const Architectures = await this.#collection.distinct("controlFile.Architecture", {repositoryID: {$in: repositorys.map(a => a.repositoryID)}});
|
||||
const Components = Array.from(new Set(repositorys.map(rpm => rpm.componentName)));
|
||||
const MD5Sum = new Set<{hash: string, size: number, path: string}>();
|
||||
const SHA1 = new Set<{hash: string, size: number, path: string}>();
|
||||
const SHA256 = new Set<{hash: string, size: number, path: string}>();
|
||||
const SHA512 = new Set<{hash: string, size: number, path: string}>();
|
||||
await Promise.all(Architectures.map(async arch => Promise.all(Components.map(async comp => this.createPackage(repositoryName, comp, arch, appRoot).then(res => res.forEach(({fileSize, filePath, md5, sha1, sha256, sha512}) => {
|
||||
MD5Sum.add({size: fileSize, path: filePath, hash: md5});
|
||||
SHA1.add({size: fileSize, path: filePath, hash: sha1});
|
||||
SHA256.add({size: fileSize, path: filePath, hash: sha256});
|
||||
SHA512.add({size: fileSize, path: filePath, hash: sha512});
|
||||
}), err => console.log(err))))));
|
||||
const toJSON = () => {
|
||||
if ((!Architectures.length) && (!Components.length)) throw new Error("Invalid config repository or not loaded to database!");
|
||||
const data = {
|
||||
Date: releaseDate,
|
||||
acquireByHash: false,
|
||||
Codename: source.getCodename(),
|
||||
Suite: source.getSuite(),
|
||||
Origin: source.getOrigin(),
|
||||
Label: source.getLabel(),
|
||||
Description: source.getDescription(),
|
||||
Architectures,
|
||||
Components,
|
||||
MD5Sum: Array.from(MD5Sum.values()).sort((a, b) => b.size - a.size),
|
||||
SHA1: Array.from(SHA1.values()).sort((a, b) => b.size - a.size),
|
||||
SHA256: Array.from(SHA256.values()).sort((a, b) => b.size - a.size),
|
||||
SHA512: Array.from(SHA512.values()).sort((a, b) => b.size - a.size),
|
||||
};
|
||||
if (!data.Architectures.length) throw new Error("Require one packages loaded to database!");
|
||||
return data;
|
||||
}
|
||||
|
||||
const toString = () => {
|
||||
const reljson = toJSON();
|
||||
let configString: string[] = [
|
||||
"Date: "+(reljson.Date),
|
||||
"Acquire-By-Hash: no",
|
||||
"Architectures: "+(reljson.Architectures.join(" ")),
|
||||
"Components: "+(reljson.Components.join(" ")),
|
||||
];
|
||||
|
||||
if (reljson.Codename) configString.push(`Codename: ${reljson.Codename}`);
|
||||
if (reljson.Suite) configString.push(`Suite: ${reljson.Suite}`);
|
||||
if (reljson.Origin) configString.push(`Origin: ${reljson.Origin}`);
|
||||
if (reljson.Label) configString.push(`Label: ${reljson.Label}`);
|
||||
if (reljson.Description) configString.push(`Description: ${reljson.Description}`);
|
||||
|
||||
const insertHash = (name: string, hashes: typeof reljson.MD5Sum) => {
|
||||
configString.push(name+":");
|
||||
const sizeLength = hashes.at(0).size.toString().length+2;
|
||||
for (const data of hashes) configString.push((" "+data.hash + " "+(Array(Math.max(1, Math.abs(sizeLength - (data.size.toString().length)))).fill("").join(" ")+(data.size.toString()))+" "+data.path))
|
||||
}
|
||||
if (reljson.MD5Sum.length > 0) insertHash("MD5Sum", reljson.MD5Sum);
|
||||
if (reljson.SHA1.length > 0) insertHash("SHA1", reljson.SHA1);
|
||||
if (reljson.SHA256.length > 0) insertHash("SHA256", reljson.SHA256);
|
||||
if (reljson.SHA512.length > 0) insertHash("SHA512", reljson.SHA512);
|
||||
|
||||
return configString.join("\n");
|
||||
}
|
||||
|
||||
const inRelease = async (type: "sign"|"clearMessage" = "sign"): Promise<string> => {
|
||||
if (!(source.getCodename()||source.getSuite())) throw new Error("Required Suite or Codename to create InRelease file");
|
||||
else if (!(MD5Sum.size||SHA256.size)) throw new Error("Require MD5 or SHA256 to create InRelease file");
|
||||
const gpgSign = this.getPGPKey();
|
||||
const privateKey = gpgSign.gpgPassphrase ? await openpgp.decryptKey({privateKey: await openpgp.readPrivateKey({ armoredKey: gpgSign.privateKey.keyContent }), passphrase: gpgSign.gpgPassphrase}) : await openpgp.readPrivateKey({ armoredKey: gpgSign.privateKey.keyContent });
|
||||
const text = toString();
|
||||
if (type === "clearMessage") return Buffer.from(await openpgp.sign({
|
||||
signingKeys: privateKey,
|
||||
format: "armored",
|
||||
message: await openpgp.createMessage({text})
|
||||
}) as any).toString("utf8");
|
||||
return openpgp.sign({
|
||||
signingKeys: privateKey,
|
||||
format: "armored",
|
||||
message: await openpgp.createCleartextMessage({text})
|
||||
});
|
||||
}
|
||||
return {
|
||||
toJSON,
|
||||
toString,
|
||||
inRelease
|
||||
}
|
||||
}
|
||||
|
||||
async getPackageStream(packageTarget: dbStorage) {
|
||||
const source = this.getRepository(packageTarget.repositoryID).get(packageTarget.repositoryID);
|
||||
if (!source) throw new Error("Package Source no more avaible please sync packages!");
|
||||
let saveCache: string;
|
||||
if (await this.getDataStorage()) {
|
||||
const cacheFolder = path.join(await this.getDataStorage(), "deb_cache");
|
||||
if (!(await extendsFS.exists(cacheFolder))) await fs.mkdir(cacheFolder, {recursive: true});
|
||||
const { MD5sum, SHA1, SHA256, SHA512 } = packageTarget.controlFile;
|
||||
for (const hash of ([MD5sum, SHA1, SHA256, SHA512])) {
|
||||
if (!hash) continue
|
||||
const filePath = path.join(cacheFolder, `${hash}.deb`);
|
||||
if (await extendsFS.exists(filePath)) return oldFs.createReadStream(filePath);
|
||||
else if (!saveCache) saveCache = filePath;
|
||||
}
|
||||
}
|
||||
|
||||
if (source.type === "http") {
|
||||
const { url, auth: { header: headers, query } } = source;
|
||||
return coreHTTP.streamRequest(url, {headers, query}).then(src => {
|
||||
if (saveCache) src.pipe(oldFs.createWriteStream(saveCache));
|
||||
return stream.Readable.from(src);
|
||||
});
|
||||
} else if (source.type === "mirror") {
|
||||
const { debUrl } = packageTarget.restoreFile;
|
||||
return coreHTTP.streamRequest(debUrl).then(src => {
|
||||
if (saveCache) src.pipe(oldFs.createWriteStream(saveCache));
|
||||
return stream.Readable.from(src);
|
||||
});
|
||||
} else if (source.type === "github") {
|
||||
const { token } = source, { url } = packageTarget.restoreFile;
|
||||
return coreHTTP.streamRequest(url, {headers: token ? {"Authorization": "token "+token} : {}}).then(src => {
|
||||
if (saveCache) src.pipe(oldFs.createWriteStream(saveCache));
|
||||
return stream.Readable.from(src);
|
||||
});
|
||||
} else if (source.type === "oracleBucket") {
|
||||
const { authConfig } = source, { restoreFile: { path } } = packageTarget;
|
||||
const bucket = await oracleBucket.oracleBucket(authConfig);
|
||||
return bucket.getFileStream(path).then(src => {
|
||||
if (saveCache) src.pipe(oldFs.createWriteStream(saveCache));
|
||||
return stream.Readable.from(src);
|
||||
});
|
||||
} else if (source.type === "googleDriver") {
|
||||
const { clientId, clientSecret, clientToken } = source, { restoreFile: { id } } = packageTarget;
|
||||
const gdrive = await googleDriver.GoogleDriver({authConfig: {clientID: clientId, clientSecret, token: clientToken, redirectURL: "http://localhost", authUrlCallback(){throw new Error("Set up fist")}, tokenCallback() {}}});
|
||||
return gdrive.getFileStream(id).then(src => {
|
||||
if (saveCache) src.pipe(oldFs.createWriteStream(saveCache));
|
||||
return stream.Readable.from(src);
|
||||
});
|
||||
} else if (source.type === "docker") {
|
||||
const { image, auth } = source, { ref, path: debPath } = packageTarget.restoreFile;
|
||||
const registry = new dockerRegistry.v2(image, auth);
|
||||
return new Promise<stream.Readable>((done, reject) => registry.extractLayer(ref).then(tar => tar.on("error", reject).on("File", entry => entry.path === debPath ? done(entry.stream) : null))).then(src => {
|
||||
if (saveCache) src.pipe(oldFs.createWriteStream(saveCache));
|
||||
return stream.Readable.from(src);
|
||||
});
|
||||
}
|
||||
throw new Error("Check package type");
|
||||
}
|
||||
|
||||
async addPackage(repositoryID: string, control: dpkg.debianControl, restore: any): Promise<dbStorage> {
|
||||
if (Boolean(await this.#collection.findOne({
|
||||
repositoryID,
|
||||
"controlFile.Package": control.Package,
|
||||
"controlFile.Version": control.Version,
|
||||
"controlFile.Architecture": control.Architecture
|
||||
}))) {
|
||||
const { Package, Architecture, Version } = control;
|
||||
throw new Error(format("%s -> %s/%s (%s) are exists in database", repositoryID, Package, Architecture, Version));
|
||||
}
|
||||
await this.#collection.insertOne({
|
||||
repositoryID,
|
||||
restoreFile: restore,
|
||||
controlFile: control
|
||||
});
|
||||
return {
|
||||
repositoryID,
|
||||
restoreFile: restore,
|
||||
controlFile: control
|
||||
};
|
||||
}
|
||||
|
||||
async syncRepositorys(callback?: (error?: any, dbStr?: dbStorage) => void) {
|
||||
const sources = this.getRepositorys().map(({repositoryManeger}) => repositoryManeger.getAllRepositorys()).flat(2);
|
||||
const toDelete = (await this.#collection.distinct("repositoryID")).filter(key => !sources.find(d => d.repositoryID === key));
|
||||
if (toDelete.length > 0) await this.#collection.deleteMany({repositoryID: toDelete});
|
||||
for (const repo of sources) await this.registerSource(repo.repositoryID, repo, callback);
|
||||
return toDelete;
|
||||
}
|
||||
|
||||
async registerSource(repositoryID: string, target: repositorySource, callback?: (error?: any, dbStr?: dbStorage) => void) {
|
||||
callback ??= (_void1, _void2) => {};
|
||||
if (target.type === "http") {
|
||||
try {
|
||||
const control = (await dpkg.parsePackage(await coreHTTP.streamRequest(target.url, {headers: target.auth?.header, query: target.auth?.query}))).controlFile;
|
||||
callback(null, await this.addPackage(repositoryID, control, {}));
|
||||
} catch (err) {
|
||||
callback(err, null);
|
||||
}
|
||||
} else if (target.type === "oracleBucket") {
|
||||
const { authConfig, path = [] } = target;
|
||||
const bucket = await oracleBucket.oracleBucket(authConfig);
|
||||
try {
|
||||
if (path.length === 0) path.push(...((await bucket.listFiles()).filter(k => k.name.endsWith(".deb")).map(({name}) => name)));
|
||||
for (const file of path) {
|
||||
const control = (await dpkg.parsePackage(await bucket.getFileStream(file))).controlFile;
|
||||
callback(null, await this.addPackage(repositoryID, control, {path: file}));
|
||||
}
|
||||
} catch (err) {
|
||||
callback(err, null);
|
||||
}
|
||||
} else if (target.type === "googleDriver") {
|
||||
const { clientId, clientSecret, clientToken, gIDs = [] } = target;
|
||||
const gdrive = await googleDriver.GoogleDriver({authConfig: {clientID: clientId, clientSecret, token: clientToken, redirectURL: "http://localhost", authUrlCallback(){throw new Error("Set up fist")}, tokenCallback() {}}});
|
||||
if (gIDs.length === 0) gIDs.push(...((await gdrive.listFiles()).filter(rel => rel.name.endsWith(".deb")).map(({id}) => id)));
|
||||
for (const file of gIDs) {
|
||||
try {
|
||||
const control = (await dpkg.parsePackage(await gdrive.getFileStream(file))).controlFile;
|
||||
callback(null, await this.addPackage(repositoryID, control, {id: file}));
|
||||
} catch (err) {
|
||||
callback(err, null);
|
||||
}
|
||||
}
|
||||
} else if (target.type === "github") {
|
||||
const { owner, repository, token } = target;
|
||||
const gh = await Github.repositoryManeger(owner, repository, {token});
|
||||
if (target.subType === "branch") {
|
||||
const { branch = (await gh.repository.listBranchs()).at(0)?.name ?? "main" } = target;
|
||||
for (const { path: filePath } of (await gh.git.getTree(branch)).tree.filter(file => file.type === "tree" ? false : (file.size > 10) && file.path.endsWith(".deb"))) {
|
||||
try {
|
||||
const rawURL = new URL(path.posix.join(owner, repository, branch, filePath), "https://raw.githubusercontent.com");
|
||||
const control = (await dpkg.parsePackage(gh.git.getRawFile(branch, filePath))).controlFile;
|
||||
callback(null, await this.addPackage(repositoryID, control, {url: rawURL.toString()}));
|
||||
} catch (err) {
|
||||
callback(err, null);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const { tag = [] } = target;
|
||||
if (!tag.length) tag.push(...((await gh.release.getRelease()).map(d => d.tag_name)));
|
||||
for (const tagName of tag) {
|
||||
try {
|
||||
const assets = (await gh.release.getRelease(tagName)).assets.filter(({name}) => name.endsWith(".deb"));
|
||||
for (const asset of assets) {
|
||||
const control = (await dpkg.parsePackage(await coreHTTP.streamRequest(asset.browser_download_url, {headers: token ? {Authorization: `token ${token}`} : {}}))).controlFile;
|
||||
callback(null, await this.addPackage(repositoryID, control, {url: asset.browser_download_url}));
|
||||
}
|
||||
} catch (err) {
|
||||
callback(err, null);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (target.type === "docker") {
|
||||
const { image, auth, tags = [] } = target;
|
||||
const registry = new dockerRegistry.v2(image, auth);
|
||||
if (tags.length === 0) {
|
||||
const { sha256, tag } = registry.image;
|
||||
if (sha256) tags.push(sha256);
|
||||
else if (tag) tags.push(tag);
|
||||
else tags.push(...((await registry.getTags()).reverse().slice(0, 6)));
|
||||
}
|
||||
for (const tag of tags) {
|
||||
const manifestManeger = new dockerRegistry.Utils.Manifest(await registry.getManifets(tag), registry);
|
||||
const addPckage = async () => {
|
||||
for (const layer of manifestManeger.getLayers()) {
|
||||
const blob = await registry.extractLayer(layer.digest);
|
||||
blob.on("error", err => callback(err, null)).on("entry", async (entry, str, next) => {
|
||||
next();
|
||||
if (!(entry.name.endsWith(".deb"))) return null;
|
||||
try {
|
||||
const control = (await dpkg.parsePackage(stream.Readable.from(str))).controlFile;
|
||||
callback(null, await this.addPackage(repositoryID, control, {ref: layer.digest, path: entry.path}));
|
||||
} catch (err) {callback(err, null);}
|
||||
});
|
||||
await finished(blob);
|
||||
}
|
||||
}
|
||||
if (manifestManeger.multiArch) {
|
||||
for (const platform of manifestManeger.platforms) {
|
||||
await manifestManeger.setPlatform(platform as any);
|
||||
await addPckage();
|
||||
}
|
||||
} else await addPckage();
|
||||
}
|
||||
} else if (target.type === "mirror") {
|
||||
const { config = [] } = target;
|
||||
const readFile = (path: string, start: number, end: number) => new Promise<Buffer>((done, reject) => {
|
||||
let buf: Buffer[] = [];
|
||||
oldFs.createReadStream(path, { start, end }).on("error", reject).on("data", (data: Buffer) => buf.push(data)).on("close", () => {done(Buffer.concat(buf)); buf = null;});
|
||||
});
|
||||
for (const aptSrc of config.filter(d => d.type === "packages")) {
|
||||
const main_url = new URL(aptSrc.src);
|
||||
const distMain = new URL(path.posix.join(main_url.pathname, "dists", aptSrc.distname), main_url);
|
||||
const release = apt.parseRelease(await coreHTTP.bufferRequestBody(distMain.toString()+"/InRelease").then(async data => (await openpgp.readCleartextMessage({cleartextMessage: data.toString()})).getText()).catch(() => coreHTTP.bufferRequestBody(distMain.toString()+"/Release").then(data => data.toString())));
|
||||
for (const Component of release.Components) for (const Arch of release.Architectures.filter(arch => arch !== "all")) {
|
||||
for (const ext of (["", ".gz", ".xz"])) {
|
||||
const mainReq = new URL(path.posix.join(distMain.pathname, Component, `binary-${Arch}`, `Packages${ext}`), distMain);
|
||||
const tmpFile = (path.join(tmpdir(), Buffer.from(mainReq.toString(), "utf8").toString("hex")))+".package";
|
||||
try {
|
||||
await streamPromise.finished((await coreHTTP.streamRequest(mainReq)).pipe(decompressStream()).pipe(oldFs.createWriteStream(tmpFile)));
|
||||
const packagesLocation: {start: number, end: number}[] = [];
|
||||
let start: number = 0, currentChuck = 0;
|
||||
await streamPromise.finished(oldFs.createReadStream(tmpFile).on("data", (chunk: Buffer) => {
|
||||
for (let i = 0; i < chunk.length; i++) if ((chunk[i - 1] === 0x0A) && (chunk[i] === 0x0A)) {
|
||||
packagesLocation.push({
|
||||
start,
|
||||
end: i + currentChuck,
|
||||
});
|
||||
start = (i + currentChuck)+1;
|
||||
}
|
||||
currentChuck += Buffer.byteLength(chunk, "binary");
|
||||
}));
|
||||
for (const { start, end } of packagesLocation) {
|
||||
const control = dpkg.parseControl(await readFile(tmpFile, start, end));
|
||||
callback(null, await this.addPackage(repositoryID, control, {
|
||||
debUrl: (new URL(path.posix.join(main_url.pathname, control.Filename), main_url)).toString()
|
||||
}));
|
||||
}
|
||||
await fs.rm(tmpFile);
|
||||
break;
|
||||
} catch (err) {
|
||||
callback(err, null);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
13
src/patchExpress.ts
Normal file
13
src/patchExpress.ts
Normal file
@ -0,0 +1,13 @@
|
||||
/**
|
||||
* Fix Promises catch to express send correct error to client and dont crash server
|
||||
*/
|
||||
import expressLayer from "express/lib/router/layer.js";
|
||||
expressLayer.prototype.handle_request = async function handle_request_promised(...args) {
|
||||
var fn = this.handle;
|
||||
if (fn.length > 3) return args.at(-1)();
|
||||
try {
|
||||
await fn(...args);
|
||||
} catch (err) {
|
||||
args.at(-1)(err);
|
||||
}
|
||||
}
|
164
src/server.ts
Normal file
164
src/server.ts
Normal file
@ -0,0 +1,164 @@
|
||||
import { extendsFS } from "@sirherobrine23/extends";
|
||||
import express from "express";
|
||||
import cluster from "node:cluster";
|
||||
import crypto from "node:crypto";
|
||||
import { createWriteStream } from "node:fs";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import stream from "node:stream";
|
||||
import { finished } from "node:stream/promises";
|
||||
import { parse } from "node:url";
|
||||
import { Connection } from "./config.js";
|
||||
import "./patchExpress.js";
|
||||
import { http } from "@sirherobrine23/http";
|
||||
import { googleDriver, oracleBucket } from "@sirherobrine23/cloud";
|
||||
|
||||
const Range = (max: number) => {
|
||||
if (max < 0 || isNaN(max)) return new stream.PassThrough();
|
||||
return new stream.Transform({
|
||||
write(chunk, encoding, callback) {
|
||||
if (!(Buffer.isBuffer(chunk))) chunk = Buffer.from(chunk, encoding);
|
||||
this.push(chunk.subarray(Math.max(0, max)));
|
||||
max += Buffer.byteLength(chunk);
|
||||
if (0 <= max) this.push(null);
|
||||
callback();
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
export async function createRoute(configManeger: Connection) {
|
||||
const app = express();
|
||||
app.disable("x-powered-by").disable("etag");
|
||||
app.use(express.json(), (_req, res, next) => {
|
||||
res.setHeader("cluster-id", String(cluster.isPrimary ? 1 : cluster.worker.id));
|
||||
res.json = (body) => res.setHeader("Content-Type", "application/json").send(JSON.stringify(body, null, 2)); return next();
|
||||
});
|
||||
|
||||
// Public key
|
||||
app.get("/public(_key|)(|.gpg|.dearmor)", async (req, res) => {
|
||||
if (!configManeger.repoConfig.publicGPG) return res.status(400).json({ error: "GPG Key is disabled" });
|
||||
return res.setHeader("Content-Type", req.path.endsWith(".dearmor") ? "octect/stream" : "text/plain").send(await configManeger.repoConfig.getPulicKey(req.path.endsWith(".dearmor") ? "dearmor" : "armor"));
|
||||
});
|
||||
|
||||
app.get("/dists/(:distName)(|/InRelease|/Release(.gpg)?)?", (req, res) => {
|
||||
const { distName } = req.params;
|
||||
if (!(configManeger.repoConfig.has(distName))) return res.status(404).json({ error: "Ditribuition not exist" });
|
||||
|
||||
return res.json({ distName });
|
||||
});
|
||||
|
||||
app.get("/dists/:distName/:componentName/binary-:Arch/Packages(.(gz|xz))?", (req, res) => {
|
||||
const { distName, componentName, Arch } = req.params;
|
||||
const compression = req.path.endsWith(".gz") ? "gzip" : req.path.endsWith(".xz") ? "lzma" : "none";
|
||||
if (!(configManeger.repoConfig.has(distName))) return res.status(404).json({ error: "Ditribuition not exist" });
|
||||
const sources = configManeger.repoConfig.get(distName).toArray().filter(info => info.componentName === componentName);
|
||||
if (!sources.length) return res.status(404).json({ error: "This component not exists" });
|
||||
|
||||
return res.json({
|
||||
distName,
|
||||
componentName,
|
||||
Arch,
|
||||
compression
|
||||
});
|
||||
});
|
||||
|
||||
app.post("/pool/upload", async (req, res) => {
|
||||
const { repository, destID } = (req.body || {});
|
||||
if (!(configManeger.repoConfig.has(repository))) return res.status(400).json({ error: "Add valid repository name" });
|
||||
else if (!(configManeger.repoConfig.get(repository).has(destID))) return res.status(400).json({ error: "Add valid source id" });
|
||||
else if (!(configManeger.repoConfig.get(repository).get(destID).enableUpload)) return res.status(401).json({ error: "the source has upload disabled or not supported" });
|
||||
|
||||
const ID = crypto.randomUUID(), token = ([
|
||||
crypto.randomBytes(4).toString("hex"),
|
||||
crypto.randomBytes(crypto.randomInt(4, 16)).toString("hex"),
|
||||
crypto.randomBytes(crypto.randomInt(2, 8)).toString("hex"),
|
||||
]).join("-");
|
||||
let filePath: string;
|
||||
while (true) {
|
||||
filePath = path.join(configManeger.repoConfig.tmpFolder, crypto.randomBytes(crypto.randomInt(8, 16)).toString("hex"));
|
||||
if (!(await extendsFS.exists(filePath))) break;
|
||||
}
|
||||
await fs.writeFile(filePath, ""); // Touch file
|
||||
|
||||
await configManeger.uploadCollection.insertOne({
|
||||
ID,
|
||||
repository,
|
||||
destID,
|
||||
validAt: Date.now() + 1000 * 60 * 60 * 30,
|
||||
token,
|
||||
filePath,
|
||||
});
|
||||
|
||||
return res.setHeader("Location", path.posix.join(parse(req.url).pathname, ID)).status(201).json({
|
||||
token,
|
||||
ID
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* PUT data to file to package to later upload to Dest
|
||||
*
|
||||
* to add data Set `Content-Range` and `Content-Type: application/octet-stream` to Upload
|
||||
* to submit, delete this headers up
|
||||
*/
|
||||
app.put("/pool/upload/:sessionID", async (req, res) => {
|
||||
const { sessionID } = req.params;
|
||||
const info = await configManeger.uploadCollection.findOne({ ID: sessionID });
|
||||
const isPut = (req.headers["content-type"]||"").startsWith("application/octet-stream");
|
||||
if (!info) return res.status(400).json({ error: "Require upload ID" });
|
||||
else if (req.headers.authorization.slice(5).trim() !== info.token) return res.status(400).json({ error: "invalid token" });
|
||||
else if (isPut && !(req.headers["content-range"])) return res.status(400).json({ error: "set Content-Range to put file" });
|
||||
|
||||
if (isPut) {
|
||||
if (req.headers["content-range"].startsWith("bytes ")) req.headers["content-range"] = req.headers["content-range"].slice(5).trim();
|
||||
if (req.headers["content-range"].trim() === "*") req.headers["content-range"] = "0";
|
||||
|
||||
const [start, _end] = req.headers["content-range"].split("-"), [end] = _end.split("/");
|
||||
if (Number(end) < Number(start)) return res.status(400).json({ error: "Require file more that " + start })
|
||||
await finished(req.pipe(Range(Number(end || -1))).pipe(createWriteStream(info.filePath, { start: Number(start) })));
|
||||
await configManeger.uploadCollection.findOneAndUpdate({ ID: sessionID }, { $set: { validAt: 1000 * 60 * 60 * 30 } })
|
||||
return res.status(202).end();
|
||||
}
|
||||
|
||||
const upload = await configManeger.repoConfig.get(info.repository).uploadFile(info.destID, info.filePath);
|
||||
await fs.rm(info.filePath, { force: true });
|
||||
await configManeger.uploadCollection.findOneAndDelete({ ID: info.ID });
|
||||
return res.setHeader("Location", `/pool/${upload.controlFile.MD5sum}.deb`).status(201).end();
|
||||
});
|
||||
|
||||
app.get("/pool/(:packageHASH)(|.deb)?", async (req, res, next) => {
|
||||
const download = req.path.endsWith(".deb"), { packageHASH } = req.params;
|
||||
const info = await configManeger.packageCollection.findOne({ $or: [{ "control.MD5sum": packageHASH }, { "control.SHA1": packageHASH }, { "control.SHA256": packageHASH }, { "control.SHA512": packageHASH }] });
|
||||
if (!info) return res.status(404).json({ error: "Package not registred" });
|
||||
else if (!download) return res.json(info.control);
|
||||
const origem = info.repositorys.find(info => configManeger.repoConfig.has(info.repository) && configManeger.repoConfig.get(info.repository).has(info.origim));
|
||||
if (!origem) return res.status(400).json({ error: "Cannot get origem source" });
|
||||
const src = configManeger.repoConfig.get(origem.repository).get(origem.origim);
|
||||
if (src.type === "http") {
|
||||
return http.streamRequest(src.url, {
|
||||
query: src.query,
|
||||
headers: src.header
|
||||
}).then(src => src.pipe(res)).catch(next);
|
||||
} else if (src.type === "github") {
|
||||
const download: { url: string } = info.restoreFile;
|
||||
return http.streamRequest(download.url, {
|
||||
headers: src.token ? { Authorization: `token ${src.token}` } : {},
|
||||
query: { token: src.token },
|
||||
}).then(src => src.pipe(res)).catch(next);
|
||||
} else if (src.type === "mirror") {
|
||||
const download: { url: string } = info.restoreFile;
|
||||
return http.streamRequest(download.url).then(src => src.pipe(res)).catch(next);
|
||||
} else if (src.type === "oracleBucket") {
|
||||
const download: { filePath: string } = info.restoreFile;
|
||||
const oci = await oracleBucket.oracleBucket(src.authConfig);
|
||||
return oci.getFileStream(download.filePath).then(src => src.pipe(res)).catch(next);
|
||||
} else if (src.type === "googleDriver") {
|
||||
const download: { id: string } = info.restoreFile;
|
||||
const gdrive = await googleDriver.GoogleDriver({ oauth: await googleDriver.createAuth({ clientID: src.clientId, clientSecret: src.clientSecret, token: src.clientToken, authUrlCallback: () => { throw new Error("Auth disabled"); }, tokenCallback: () => { }, redirectURL: null }) });
|
||||
return gdrive.getFileStream(download.id).then(src => src.pipe(res)).catch(next);
|
||||
} else if (src.type === "docker") {
|
||||
throw new Error("CLOSE");
|
||||
} else return res.status(404).json({ error: "Source origem is unknown" });
|
||||
});
|
||||
return app;
|
||||
}
|
@ -13,7 +13,6 @@
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"skipLibCheck": true,
|
||||
"allowJs": true,
|
||||
"composite": true,
|
||||
"lib": [
|
||||
"ESNext",
|
||||
"ES7"
|
||||
|
Loading…
x
Reference in New Issue
Block a user