WIP: Migrate dpkg package #36
@ -1,16 +0,0 @@
|
||||
FROM ghcr.io/sirherobrine23/mydockerimage:latest
|
||||
|
||||
# Add non root user and Install oh my zsh
|
||||
ARG USERNAME="devcontainer"
|
||||
ARG USER_UID="1000"
|
||||
ARG USER_GID=$USER_UID
|
||||
RUN groupadd --gid $USER_GID $USERNAME && adduser --disabled-password --gecos "" --shell /usr/bin/zsh --uid $USER_UID --gid $USER_GID $USERNAME && usermod -aG sudo $USERNAME && echo "$USERNAME ALL=(ALL) NOPASSWD:ALL" >> /etc/sudoers.d/$USERNAME && chmod 0440 /etc/sudoers.d/$USERNAME && usermod -aG docker $USERNAME
|
||||
USER $USERNAME
|
||||
WORKDIR /home/$USERNAME
|
||||
# Set default entrypoint
|
||||
ENTRYPOINT [ "/usr/local/bin/start.sh" ]
|
||||
RUN yes | sh -c "$(curl -fsSL https://raw.githubusercontent.com/robbyrussell/oh-my-zsh/master/tools/install.sh)" && \
|
||||
git clone https://github.com/zsh-users/zsh-syntax-highlighting.git ~/.oh-my-zsh/custom/plugins/zsh-syntax-highlighting && \
|
||||
git clone https://github.com/zsh-users/zsh-autosuggestions ~/.oh-my-zsh/custom/plugins/zsh-autosuggestions && \
|
||||
sed -e 's|ZSH_THEME=".*"|ZSH_THEME="strug"|g' -i ~/.zshrc && \
|
||||
sed -e 's|plugins=(.*)|plugins=(git docker kubectl zsh-syntax-highlighting zsh-autosuggestions)|g' -i ~/.zshrc
|
@ -1,60 +0,0 @@
|
||||
{
|
||||
"name": "Typescript base",
|
||||
"updateRemoteUserUID": false,
|
||||
"containerUser": "develop",
|
||||
"remoteUser": "develop",
|
||||
"overrideCommand": false,
|
||||
"postCreateCommand": "npm ci",
|
||||
"build": {
|
||||
"dockerfile": "Dockerfile",
|
||||
"args": {
|
||||
"USERNAME": "develop",
|
||||
"USER_UID": "1000"
|
||||
}
|
||||
},
|
||||
"runArgs": [
|
||||
"--init",
|
||||
"--privileged"
|
||||
],
|
||||
"mounts": [
|
||||
"target=/var/lib/docker,source=typescript_base,type=volume",
|
||||
"target=/lib/modules/,source=/lib/modules/,type=bind,readonly"
|
||||
],
|
||||
"settings": {
|
||||
"editor.tabSize": 2,
|
||||
"editor.minimap.enabled": false,
|
||||
"files.eol": "\n",
|
||||
"files.trimFinalNewlines": true,
|
||||
"files.trimTrailingWhitespace": true,
|
||||
"googleTranslateExt.replaceText": true,
|
||||
"material-icon-theme.showWelcomeMessage": false,
|
||||
"gitlens.showWelcomeOnInstall": false,
|
||||
"googleTranslateExt.languages": [
|
||||
"en"
|
||||
]
|
||||
},
|
||||
"extensions": [
|
||||
"redhat.vscode-yaml",
|
||||
"hookyqr.beautify",
|
||||
"aaron-bond.better-comments",
|
||||
"wmaurer.change-case",
|
||||
"oouo-diogo-perdigao.docthis",
|
||||
"dbaeumer.vscode-eslint",
|
||||
"me-dutour-mathieu.vscode-github-actions",
|
||||
"benshabatnoam.google-translate-ext",
|
||||
"oderwat.indent-rainbow",
|
||||
"tgreen7.vs-code-node-require",
|
||||
"eg2.vscode-npm-script",
|
||||
"christian-kohler.npm-intellisense",
|
||||
"ionutvmi.path-autocomplete",
|
||||
"christian-kohler.path-intellisense",
|
||||
"esbenp.prettier-vscode",
|
||||
"visualstudioexptteam.vscodeintellicode",
|
||||
"vscode-icons-team.vscode-icons",
|
||||
"eamodio.gitlens",
|
||||
"ms-azuretools.vscode-docker",
|
||||
"chrmarti.regex",
|
||||
"formulahendry.code-runner",
|
||||
"euskadi31.json-pretty-printer"
|
||||
]
|
||||
}
|
@ -1,9 +0,0 @@
|
||||
/*.y*ml
|
||||
/example
|
||||
/*[Dd]ocker*
|
||||
node_modules/
|
||||
.devcontainer/
|
||||
.vscode/
|
||||
.github/
|
||||
*.d.ts
|
||||
*.js
|
5
.github/dependabot.yml
vendored
5
.github/dependabot.yml
vendored
@ -1,10 +1,5 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: github-actions
|
||||
directory: /
|
||||
schedule:
|
||||
interval: daily
|
||||
|
||||
- package-ecosystem: npm
|
||||
directory: /
|
||||
schedule:
|
||||
|
70
.github/workflows/publish.yaml
vendored
70
.github/workflows/publish.yaml
vendored
@ -1,70 +0,0 @@
|
||||
name: Publish package
|
||||
on:
|
||||
release:
|
||||
types:
|
||||
- prereleased
|
||||
- released
|
||||
|
||||
jobs:
|
||||
publishpackage:
|
||||
runs-on: ubuntu-latest
|
||||
name: Publish
|
||||
permissions:
|
||||
packages: write
|
||||
contents: write
|
||||
env:
|
||||
PACKAGE_VERSION: ${{ github.ref }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
name: Code checkout
|
||||
with:
|
||||
persist-credentials: true
|
||||
ref: main
|
||||
fetch-depth: 2
|
||||
submodules: true
|
||||
|
||||
- name: Setup QEMU to Docker
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
||||
- name: Setup Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Login into registry Github Packages
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Install basic tools
|
||||
- uses: actions/setup-node@v3
|
||||
name: Setup node.js
|
||||
with:
|
||||
node-version: 18.x
|
||||
registry-url: https://registry.npmjs.org/
|
||||
|
||||
- name: Edit version and install depencies
|
||||
run: |
|
||||
sudo npm i -g semver
|
||||
VERSION="$(semver -c ${{ github.ref_name }})"
|
||||
echo "PACKAGE_VERSION=$VERSION" >> $GITHUB_ENV
|
||||
jq --arg ver $VERSION '.version = $ver' package.json > package2.json
|
||||
mv -fv package2.json package.json
|
||||
|
||||
# Install depencides and build
|
||||
npm install --no-save
|
||||
|
||||
# Publish npm
|
||||
- run: npm publish --access public --tag ${{ github.event.release.prerelease && 'next' || 'latest' }}
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
- name: Build image
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
platforms: "linux/amd64,linux/arm64"
|
||||
context: ./
|
||||
push: true
|
||||
tags: |
|
||||
ghcr.io/sirherobrine23/apt-stream:latest
|
||||
ghcr.io/sirherobrine23/apt-stream:v${{ env.PACKAGE_VERSION }}
|
23
.gitignore
vendored
23
.gitignore
vendored
@ -1,21 +1,8 @@
|
||||
storage/
|
||||
/apt*.yml
|
||||
/apt*.yaml
|
||||
/apt*.json
|
||||
.repoTest/
|
||||
thunder-tests
|
||||
*.deb
|
||||
*.key
|
||||
*.pem
|
||||
*.gpg
|
||||
/client_secret*.json
|
||||
|
||||
# Ingore node folders
|
||||
node_modules/
|
||||
# Node
|
||||
/*-lock*
|
||||
*.tgz
|
||||
node_modules/
|
||||
|
||||
# Typescript
|
||||
tsconfig.tsbuildinfo
|
||||
*.d.ts
|
||||
*.js
|
||||
src/*/src/**/*.d.ts
|
||||
src/*/src/**/*.js
|
||||
*.tsbuildinfo
|
8
.hintrc
8
.hintrc
@ -1,8 +0,0 @@
|
||||
{
|
||||
"extends": [
|
||||
"development"
|
||||
],
|
||||
"hints": {
|
||||
"typescript-config/strict": "off"
|
||||
}
|
||||
}
|
35
.npmignore
35
.npmignore
@ -1,35 +0,0 @@
|
||||
# vscode
|
||||
.devcontainer/
|
||||
.vscode/
|
||||
.hintrc
|
||||
|
||||
# Typescript
|
||||
src/*.ts
|
||||
tsconfig.*
|
||||
|
||||
# node and npm
|
||||
node_modules/
|
||||
/*.tgz
|
||||
|
||||
# Github and Git
|
||||
.github/
|
||||
.git*
|
||||
|
||||
# Docker
|
||||
.dockerignore
|
||||
*docker-compose.yaml
|
||||
*docker-compose.yml
|
||||
*Dockerfile*
|
||||
*dockerfile*
|
||||
|
||||
# Project
|
||||
/apt*.yml
|
||||
/apt*.yaml
|
||||
/apt*.json
|
||||
/example
|
||||
.repoTest/
|
||||
thunder-tests
|
||||
*.deb
|
||||
*.key
|
||||
*.pem
|
||||
*.gpg
|
22
.vscode/launch.json
vendored
22
.vscode/launch.json
vendored
@ -1,22 +0,0 @@
|
||||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Lauch API",
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"internalConsoleOptions": "openOnSessionStart",
|
||||
"skipFiles": ["<node_internals>/**", "node_modules/**"],
|
||||
"cwd": "${workspaceRoot}",
|
||||
"runtimeExecutable": "node",
|
||||
"args": ["src/index.js", "server", "--cluster=0"],
|
||||
"preLaunchTask": {
|
||||
"type": "npm",
|
||||
"script": "build"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
28
.vscode/settings.json
vendored
Normal file
28
.vscode/settings.json
vendored
Normal file
@ -0,0 +1,28 @@
|
||||
{
|
||||
"files.eol": "\n",
|
||||
"files.encoding": "utf8",
|
||||
"files.defaultLanguage": "javascript",
|
||||
"files.trimFinalNewlines": true,
|
||||
"files.trimTrailingWhitespace": true,
|
||||
"editor.tabSize": 2,
|
||||
"editor.insertSpaces": true,
|
||||
"editor.minimap.enabled": false,
|
||||
"editor.detectIndentation": false,
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": true
|
||||
},
|
||||
"files.exclude": {
|
||||
"**/node_modules/": true,
|
||||
"**/*.d.ts": true,
|
||||
"**/*.js": true
|
||||
},
|
||||
"terminal.integrated.env.windows": {
|
||||
"PATH": "${workspaceFolder}/node_modules/.bin;${env:PATH}"
|
||||
},
|
||||
"terminal.integrated.env.linux": {
|
||||
"PATH": "${workspaceFolder}/node_modules/.bin:${env:PATH}"
|
||||
},
|
||||
"terminal.integrated.env.osx": {
|
||||
"PATH": "${workspaceFolder}/node_modules/.bin:${env:PATH}"
|
||||
}
|
||||
}
|
@ -1,9 +0,0 @@
|
||||
FROM node
|
||||
RUN apt update && DEBIAN_FRONTEND=noninteractive apt install -y python3 python3-pip && rm -rf /var/lib/apt/*
|
||||
WORKDIR /app
|
||||
COPY package*.json ./
|
||||
RUN npm install
|
||||
COPY . .
|
||||
RUN npm run prepack
|
||||
RUN npm link
|
||||
ENTRYPOINT [ "apt-stream", "server", "--data", "/data" ]
|
339
LICENSE
339
LICENSE
@ -1,339 +0,0 @@
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 2, June 1991
|
||||
|
||||
Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
|
||||
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The licenses for most software are designed to take away your
|
||||
freedom to share and change it. By contrast, the GNU General Public
|
||||
License is intended to guarantee your freedom to share and change free
|
||||
software--to make sure the software is free for all its users. This
|
||||
General Public License applies to most of the Free Software
|
||||
Foundation's software and to any other program whose authors commit to
|
||||
using it. (Some other Free Software Foundation software is covered by
|
||||
the GNU Lesser General Public License instead.) You can apply it to
|
||||
your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
this service if you wish), that you receive source code or can get it
|
||||
if you want it, that you can change the software or use pieces of it
|
||||
in new free programs; and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to make restrictions that forbid
|
||||
anyone to deny you these rights or to ask you to surrender the rights.
|
||||
These restrictions translate to certain responsibilities for you if you
|
||||
distribute copies of the software, or if you modify it.
|
||||
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must give the recipients all the rights that
|
||||
you have. You must make sure that they, too, receive or can get the
|
||||
source code. And you must show them these terms so they know their
|
||||
rights.
|
||||
|
||||
We protect your rights with two steps: (1) copyright the software, and
|
||||
(2) offer you this license which gives you legal permission to copy,
|
||||
distribute and/or modify the software.
|
||||
|
||||
Also, for each author's protection and ours, we want to make certain
|
||||
that everyone understands that there is no warranty for this free
|
||||
software. If the software is modified by someone else and passed on, we
|
||||
want its recipients to know that what they have is not the original, so
|
||||
that any problems introduced by others will not reflect on the original
|
||||
authors' reputations.
|
||||
|
||||
Finally, any free program is threatened constantly by software
|
||||
patents. We wish to avoid the danger that redistributors of a free
|
||||
program will individually obtain patent licenses, in effect making the
|
||||
program proprietary. To prevent this, we have made it clear that any
|
||||
patent must be licensed for everyone's free use or not licensed at all.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
|
||||
|
||||
0. This License applies to any program or other work which contains
|
||||
a notice placed by the copyright holder saying it may be distributed
|
||||
under the terms of this General Public License. The "Program", below,
|
||||
refers to any such program or work, and a "work based on the Program"
|
||||
means either the Program or any derivative work under copyright law:
|
||||
that is to say, a work containing the Program or a portion of it,
|
||||
either verbatim or with modifications and/or translated into another
|
||||
language. (Hereinafter, translation is included without limitation in
|
||||
the term "modification".) Each licensee is addressed as "you".
|
||||
|
||||
Activities other than copying, distribution and modification are not
|
||||
covered by this License; they are outside its scope. The act of
|
||||
running the Program is not restricted, and the output from the Program
|
||||
is covered only if its contents constitute a work based on the
|
||||
Program (independent of having been made by running the Program).
|
||||
Whether that is true depends on what the Program does.
|
||||
|
||||
1. You may copy and distribute verbatim copies of the Program's
|
||||
source code as you receive it, in any medium, provided that you
|
||||
conspicuously and appropriately publish on each copy an appropriate
|
||||
copyright notice and disclaimer of warranty; keep intact all the
|
||||
notices that refer to this License and to the absence of any warranty;
|
||||
and give any other recipients of the Program a copy of this License
|
||||
along with the Program.
|
||||
|
||||
You may charge a fee for the physical act of transferring a copy, and
|
||||
you may at your option offer warranty protection in exchange for a fee.
|
||||
|
||||
2. You may modify your copy or copies of the Program or any portion
|
||||
of it, thus forming a work based on the Program, and copy and
|
||||
distribute such modifications or work under the terms of Section 1
|
||||
above, provided that you also meet all of these conditions:
|
||||
|
||||
a) You must cause the modified files to carry prominent notices
|
||||
stating that you changed the files and the date of any change.
|
||||
|
||||
b) You must cause any work that you distribute or publish, that in
|
||||
whole or in part contains or is derived from the Program or any
|
||||
part thereof, to be licensed as a whole at no charge to all third
|
||||
parties under the terms of this License.
|
||||
|
||||
c) If the modified program normally reads commands interactively
|
||||
when run, you must cause it, when started running for such
|
||||
interactive use in the most ordinary way, to print or display an
|
||||
announcement including an appropriate copyright notice and a
|
||||
notice that there is no warranty (or else, saying that you provide
|
||||
a warranty) and that users may redistribute the program under
|
||||
these conditions, and telling the user how to view a copy of this
|
||||
License. (Exception: if the Program itself is interactive but
|
||||
does not normally print such an announcement, your work based on
|
||||
the Program is not required to print an announcement.)
|
||||
|
||||
These requirements apply to the modified work as a whole. If
|
||||
identifiable sections of that work are not derived from the Program,
|
||||
and can be reasonably considered independent and separate works in
|
||||
themselves, then this License, and its terms, do not apply to those
|
||||
sections when you distribute them as separate works. But when you
|
||||
distribute the same sections as part of a whole which is a work based
|
||||
on the Program, the distribution of the whole must be on the terms of
|
||||
this License, whose permissions for other licensees extend to the
|
||||
entire whole, and thus to each and every part regardless of who wrote it.
|
||||
|
||||
Thus, it is not the intent of this section to claim rights or contest
|
||||
your rights to work written entirely by you; rather, the intent is to
|
||||
exercise the right to control the distribution of derivative or
|
||||
collective works based on the Program.
|
||||
|
||||
In addition, mere aggregation of another work not based on the Program
|
||||
with the Program (or with a work based on the Program) on a volume of
|
||||
a storage or distribution medium does not bring the other work under
|
||||
the scope of this License.
|
||||
|
||||
3. You may copy and distribute the Program (or a work based on it,
|
||||
under Section 2) in object code or executable form under the terms of
|
||||
Sections 1 and 2 above provided that you also do one of the following:
|
||||
|
||||
a) Accompany it with the complete corresponding machine-readable
|
||||
source code, which must be distributed under the terms of Sections
|
||||
1 and 2 above on a medium customarily used for software interchange; or,
|
||||
|
||||
b) Accompany it with a written offer, valid for at least three
|
||||
years, to give any third party, for a charge no more than your
|
||||
cost of physically performing source distribution, a complete
|
||||
machine-readable copy of the corresponding source code, to be
|
||||
distributed under the terms of Sections 1 and 2 above on a medium
|
||||
customarily used for software interchange; or,
|
||||
|
||||
c) Accompany it with the information you received as to the offer
|
||||
to distribute corresponding source code. (This alternative is
|
||||
allowed only for noncommercial distribution and only if you
|
||||
received the program in object code or executable form with such
|
||||
an offer, in accord with Subsection b above.)
|
||||
|
||||
The source code for a work means the preferred form of the work for
|
||||
making modifications to it. For an executable work, complete source
|
||||
code means all the source code for all modules it contains, plus any
|
||||
associated interface definition files, plus the scripts used to
|
||||
control compilation and installation of the executable. However, as a
|
||||
special exception, the source code distributed need not include
|
||||
anything that is normally distributed (in either source or binary
|
||||
form) with the major components (compiler, kernel, and so on) of the
|
||||
operating system on which the executable runs, unless that component
|
||||
itself accompanies the executable.
|
||||
|
||||
If distribution of executable or object code is made by offering
|
||||
access to copy from a designated place, then offering equivalent
|
||||
access to copy the source code from the same place counts as
|
||||
distribution of the source code, even though third parties are not
|
||||
compelled to copy the source along with the object code.
|
||||
|
||||
4. You may not copy, modify, sublicense, or distribute the Program
|
||||
except as expressly provided under this License. Any attempt
|
||||
otherwise to copy, modify, sublicense or distribute the Program is
|
||||
void, and will automatically terminate your rights under this License.
|
||||
However, parties who have received copies, or rights, from you under
|
||||
this License will not have their licenses terminated so long as such
|
||||
parties remain in full compliance.
|
||||
|
||||
5. You are not required to accept this License, since you have not
|
||||
signed it. However, nothing else grants you permission to modify or
|
||||
distribute the Program or its derivative works. These actions are
|
||||
prohibited by law if you do not accept this License. Therefore, by
|
||||
modifying or distributing the Program (or any work based on the
|
||||
Program), you indicate your acceptance of this License to do so, and
|
||||
all its terms and conditions for copying, distributing or modifying
|
||||
the Program or works based on it.
|
||||
|
||||
6. Each time you redistribute the Program (or any work based on the
|
||||
Program), the recipient automatically receives a license from the
|
||||
original licensor to copy, distribute or modify the Program subject to
|
||||
these terms and conditions. You may not impose any further
|
||||
restrictions on the recipients' exercise of the rights granted herein.
|
||||
You are not responsible for enforcing compliance by third parties to
|
||||
this License.
|
||||
|
||||
7. If, as a consequence of a court judgment or allegation of patent
|
||||
infringement or for any other reason (not limited to patent issues),
|
||||
conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot
|
||||
distribute so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you
|
||||
may not distribute the Program at all. For example, if a patent
|
||||
license would not permit royalty-free redistribution of the Program by
|
||||
all those who receive copies directly or indirectly through you, then
|
||||
the only way you could satisfy both it and this License would be to
|
||||
refrain entirely from distribution of the Program.
|
||||
|
||||
If any portion of this section is held invalid or unenforceable under
|
||||
any particular circumstance, the balance of the section is intended to
|
||||
apply and the section as a whole is intended to apply in other
|
||||
circumstances.
|
||||
|
||||
It is not the purpose of this section to induce you to infringe any
|
||||
patents or other property right claims or to contest validity of any
|
||||
such claims; this section has the sole purpose of protecting the
|
||||
integrity of the free software distribution system, which is
|
||||
implemented by public license practices. Many people have made
|
||||
generous contributions to the wide range of software distributed
|
||||
through that system in reliance on consistent application of that
|
||||
system; it is up to the author/donor to decide if he or she is willing
|
||||
to distribute software through any other system and a licensee cannot
|
||||
impose that choice.
|
||||
|
||||
This section is intended to make thoroughly clear what is believed to
|
||||
be a consequence of the rest of this License.
|
||||
|
||||
8. If the distribution and/or use of the Program is restricted in
|
||||
certain countries either by patents or by copyrighted interfaces, the
|
||||
original copyright holder who places the Program under this License
|
||||
may add an explicit geographical distribution limitation excluding
|
||||
those countries, so that distribution is permitted only in or among
|
||||
countries not thus excluded. In such case, this License incorporates
|
||||
the limitation as if written in the body of this License.
|
||||
|
||||
9. The Free Software Foundation may publish revised and/or new versions
|
||||
of the General Public License from time to time. Such new versions will
|
||||
be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the Program
|
||||
specifies a version number of this License which applies to it and "any
|
||||
later version", you have the option of following the terms and conditions
|
||||
either of that version or of any later version published by the Free
|
||||
Software Foundation. If the Program does not specify a version number of
|
||||
this License, you may choose any version ever published by the Free Software
|
||||
Foundation.
|
||||
|
||||
10. If you wish to incorporate parts of the Program into other free
|
||||
programs whose distribution conditions are different, write to the author
|
||||
to ask for permission. For software which is copyrighted by the Free
|
||||
Software Foundation, write to the Free Software Foundation; we sometimes
|
||||
make exceptions for this. Our decision will be guided by the two goals
|
||||
of preserving the free status of all derivatives of our free software and
|
||||
of promoting the sharing and reuse of software generally.
|
||||
|
||||
NO WARRANTY
|
||||
|
||||
11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
|
||||
FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
|
||||
OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
|
||||
PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
|
||||
OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
|
||||
TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
|
||||
PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
|
||||
REPAIR OR CORRECTION.
|
||||
|
||||
12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
|
||||
REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
|
||||
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
|
||||
OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
|
||||
TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
|
||||
YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
|
||||
PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
|
||||
POSSIBILITY OF SUCH DAMAGES.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
convey the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software; you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation; either version 2 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License along
|
||||
with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If the program is interactive, make it output a short notice like this
|
||||
when it starts in an interactive mode:
|
||||
|
||||
Gnomovision version 69, Copyright (C) year name of author
|
||||
Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions; type `show c' for details.
|
||||
|
||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||
parts of the General Public License. Of course, the commands you use may
|
||||
be called something other than `show w' and `show c'; they could even be
|
||||
mouse-clicks or menu items--whatever suits your program.
|
||||
|
||||
You should also get your employer (if you work as a programmer) or your
|
||||
school, if any, to sign a "copyright disclaimer" for the program, if
|
||||
necessary. Here is a sample; alter the names:
|
||||
|
||||
Yoyodyne, Inc., hereby disclaims all copyright interest in the program
|
||||
`Gnomovision' (which makes passes at compilers) written by James Hacker.
|
||||
|
||||
<signature of Ty Coon>, 1 April 1989
|
||||
Ty Coon, President of Vice
|
||||
|
||||
This General Public License does not permit incorporating your program into
|
||||
proprietary programs. If your program is a subroutine library, you may
|
||||
consider it more useful to permit linking proprietary applications with the
|
||||
library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License.
|
22
README.md
22
README.md
@ -1,22 +0,0 @@
|
||||
# apt-stream
|
||||
|
||||
Create your apt repository with nodejs without having to save files or even take care of storage.
|
||||
|
||||
## Storages
|
||||
|
||||
You can host an apt fast repository with the following storages:
|
||||
|
||||
- Docker and OCI images (find `.deb` files in diff's)
|
||||
- Github Releases and Tree
|
||||
- Google Driver
|
||||
- Oracle Cloud Bucket, another driver soon
|
||||
|
||||
## Setup
|
||||
|
||||
The configuration will be very simple, the first time you run the server or even just run `apt-stream` it will look for the file in the current folder if not the one informed by the cli by the `--config/-c <path>` argument .
|
||||
|
||||
you can also create the file manually after running `apt-stream` it will make a pretty of the settings, and if there is something wrong it will ignore it or it will crash the whole program.
|
||||
|
||||
### For large repository packages
|
||||
|
||||
if you register more than 1500 packages for a single repository, I recommend disabling `gzip` and `xz` to create `Release`, as it is very slow to generate `Packages.gz` and `Packages.xz` files.
|
@ -1,39 +0,0 @@
|
||||
version: "3.9"
|
||||
name: apt_stream
|
||||
|
||||
volumes:
|
||||
# Config
|
||||
config:
|
||||
|
||||
services:
|
||||
# Mongo server
|
||||
mongo_server:
|
||||
image: mongo
|
||||
container_name: mongo_server_apt_stream
|
||||
restart: always
|
||||
|
||||
# Apt server
|
||||
apt_stream:
|
||||
build: "./"
|
||||
container_name: apt_stream
|
||||
# Restart if the container fails
|
||||
restart: on-failure
|
||||
|
||||
# Wait for mongo server to be ready
|
||||
depends_on:
|
||||
- mongo_server
|
||||
|
||||
# Mount volumes
|
||||
volumes:
|
||||
- config:/data
|
||||
|
||||
# Set extra config to "apt-stream server"
|
||||
command:
|
||||
- "--db"
|
||||
- "mongo_server"
|
||||
- "--port"
|
||||
- "3000"
|
||||
|
||||
# Expose port
|
||||
ports:
|
||||
- "3000:3000/tcp"
|
@ -1,5 +0,0 @@
|
||||
Package: example
|
||||
Architecture: amd64
|
||||
Version: 1.0.0
|
||||
Maintainer: Matheus S Queiroga
|
||||
Description: Example package
|
@ -1,3 +0,0 @@
|
||||
#!/bin/sh
|
||||
set -ex
|
||||
echo "Compress works"
|
71
package.json
71
package.json
@ -1,70 +1,13 @@
|
||||
{
|
||||
"name": "apt-stream",
|
||||
"version": "2.1.0",
|
||||
"description": "Create repository without save file in disk",
|
||||
"private": false,
|
||||
"type": "module",
|
||||
"license": "GPL-2.0",
|
||||
"author": "Matheus Sampaio Queiroga <srherobrine20@gmail.com>",
|
||||
"homepage": "https://github.com/Sirherobrine23/apt-stream#readme",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/Sirherobrine23/apt-stream.git"
|
||||
},
|
||||
"keywords": [
|
||||
"apt",
|
||||
"pack",
|
||||
"upload",
|
||||
"cloud",
|
||||
"stream",
|
||||
"server",
|
||||
"debian",
|
||||
"ubuntu",
|
||||
"cli",
|
||||
"interactive"
|
||||
"name": "mono_dpkg",
|
||||
"private": true,
|
||||
"workspaces": [
|
||||
"src/dpkg",
|
||||
"src/apt"
|
||||
],
|
||||
"bugs": {
|
||||
"url": "https://github.com/Sirherobrine23/apt-stream/issues",
|
||||
"email": "srherobrine20+apt-stream@gmail.com"
|
||||
},
|
||||
"sponsor": {
|
||||
"url": "https://github.com/sponsors/Sirherobrine23"
|
||||
},
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16.0.0"
|
||||
},
|
||||
"bin": {
|
||||
"apt-stream": "./src/index.js"
|
||||
},
|
||||
"scripts": {
|
||||
"prepack": "tsc --build --clean && tsc --build",
|
||||
"postpack": "tsc --build --clean"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/express": "^4.17.17",
|
||||
"@types/inquirer": "^9.0.3",
|
||||
"@types/node": "^18.16.3",
|
||||
"@types/yargs": "^17.0.24",
|
||||
"@types/node": "^20.10.0",
|
||||
"ts-node": "^10.9.1",
|
||||
"typescript": "^5.0.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"@sirherobrine23/cloud": "^3.6.8",
|
||||
"@sirherobrine23/decompress": "^3.6.8",
|
||||
"@sirherobrine23/docker-registry": "^3.6.8",
|
||||
"@sirherobrine23/dpkg": "^3.6.8",
|
||||
"@sirherobrine23/extends": "^3.6.8",
|
||||
"@sirherobrine23/http": "^3.6.8",
|
||||
"express": "^4.18.2",
|
||||
"express-rate-limit": "^6.7.0",
|
||||
"inquirer": "^9.2.1",
|
||||
"inquirer-file-tree-selection-prompt": "^2.0.5",
|
||||
"mongodb": "^5.4.0",
|
||||
"openpgp": "^5.8.0",
|
||||
"yaml": "^2.2.2",
|
||||
"yargs": "^17.7.2"
|
||||
"typescript": "^5.3.2"
|
||||
}
|
||||
}
|
11
railway.json
11
railway.json
@ -1,11 +0,0 @@
|
||||
{
|
||||
"$schema": "https://railway.app/railway.schema.json",
|
||||
"build": {
|
||||
"builder": "DOCKERFILE",
|
||||
"dockerfilePath": "Dockerfile"
|
||||
},
|
||||
"deploy": {
|
||||
"startCommand": "bash -c \"apt-stream server -c env:config -z -t 0 --port ${PORT} --db ${MONGO_URL}\"",
|
||||
"restartPolicyType": "ALWAYS"
|
||||
}
|
||||
}
|
22
src/apt/README.md
Normal file
22
src/apt/README.md
Normal file
@ -0,0 +1,22 @@
|
||||
# apt-stream
|
||||
|
||||
Create your apt repository with nodejs without having to save files or even take care of storage.
|
||||
|
||||
## Storages
|
||||
|
||||
You can host an apt fast repository with the following storages:
|
||||
|
||||
- Docker and OCI images (find `.deb` files in diff's)
|
||||
- Github Releases and Tree
|
||||
- Google Driver
|
||||
- Oracle Cloud Bucket, another driver soon
|
||||
|
||||
## Setup
|
||||
|
||||
The configuration will be very simple, the first time you run the server or even just run `apt-stream` it will look for the file in the current folder if not the one informed by the cli by the `--config/-c <path>` argument .
|
||||
|
||||
you can also create the file manually after running `apt-stream` it will make a pretty of the settings, and if there is something wrong it will ignore it or it will crash the whole program.
|
||||
|
||||
### For large repository packages
|
||||
|
||||
if you register more than 1500 packages for a single repository, I recommend disabling `gzip` and `xz` to create `Release`, as it is very slow to generate `Packages.gz` and `Packages.xz` files.
|
46
src/apt/package.json
Normal file
46
src/apt/package.json
Normal file
@ -0,0 +1,46 @@
|
||||
{
|
||||
"name": "apt-stream",
|
||||
"version": "2.1.0",
|
||||
"description": "Create repository without save file in disk",
|
||||
"private": false,
|
||||
"type": "module",
|
||||
"license": "GPL-2.0",
|
||||
"author": "Matheus Sampaio Queiroga <srherobrine20@gmail.com>",
|
||||
"homepage": "https://git.sirherobrine23.org/Sirherobrine23/node-apt#readme",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://git.sirherobrine23.org/Sirherobrine23/node-apt.git",
|
||||
"directory": "src/apt"
|
||||
},
|
||||
"keywords": [
|
||||
"apt",
|
||||
"pack",
|
||||
"upload",
|
||||
"cloud",
|
||||
"stream",
|
||||
"server",
|
||||
"debian",
|
||||
"ubuntu",
|
||||
"cli",
|
||||
"interactive"
|
||||
],
|
||||
"bugs": {
|
||||
"url": "https://git.sirherobrine23.org/Sirherobrine23/node-apt/issues"
|
||||
},
|
||||
"sponsor": {
|
||||
"url": "https://github.com/sponsors/Sirherobrine23"
|
||||
},
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16.0.0"
|
||||
},
|
||||
"bin": {
|
||||
"apt-stream": "./src/index.js"
|
||||
},
|
||||
"scripts": {
|
||||
"prepack": "tsc --build --clean && tsc --build",
|
||||
"postpack": "tsc --build --clean"
|
||||
}
|
||||
}
|
0
src/apt/src/index.ts
Normal file
0
src/apt/src/index.ts
Normal file
3
src/apt/tsconfig.json
Normal file
3
src/apt/tsconfig.json
Normal file
@ -0,0 +1,3 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json"
|
||||
}
|
628
src/config.ts
628
src/config.ts
@ -1,628 +0,0 @@
|
||||
import { googleDriver, oracleBucket } from "@sirherobrine23/cloud";
|
||||
import { extendsFS } from "@sirherobrine23/extends";
|
||||
import { Github } from "@sirherobrine23/http";
|
||||
import { apt } from "@sirherobrine23/dpkg";
|
||||
import oldFs, { promises as fs } from "node:fs";
|
||||
import dockerRegistry from "@sirherobrine23/docker-registry";
|
||||
import openpgp from "openpgp";
|
||||
import crypto from "node:crypto";
|
||||
import stream from "node:stream";
|
||||
import path from "node:path";
|
||||
import yaml from "yaml";
|
||||
|
||||
export type repositorySource = {
|
||||
/**
|
||||
* Dist component
|
||||
* @default main
|
||||
*/
|
||||
componentName?: string;
|
||||
|
||||
/**
|
||||
* The source is available for file upload.
|
||||
*/
|
||||
enableUpload?: boolean;
|
||||
} & ({
|
||||
type: "http",
|
||||
enableUpload?: false;
|
||||
url: string,
|
||||
auth?: {
|
||||
header?: {[key: string]: string},
|
||||
query?: {[key: string]: string}
|
||||
}
|
||||
}|{
|
||||
type: "mirror",
|
||||
enableUpload?: false;
|
||||
config: apt.sourceList;
|
||||
}|{
|
||||
type: "github",
|
||||
/**
|
||||
* Repository owner
|
||||
* @example `Sirherobrine23`
|
||||
*/
|
||||
owner: string,
|
||||
/**
|
||||
* Repository name
|
||||
* @example `apt-stream`
|
||||
*/
|
||||
repository: string,
|
||||
/**
|
||||
* Auth token, not required if public repository
|
||||
*/
|
||||
token?: string,
|
||||
} & ({
|
||||
subType: "release",
|
||||
tag?: string[],
|
||||
}|{
|
||||
subType: "branch",
|
||||
enableUpload?: false;
|
||||
branch: string,
|
||||
})|{
|
||||
type: "googleDriver",
|
||||
|
||||
/**
|
||||
* oAuth Client Secret
|
||||
*/
|
||||
clientSecret: string,
|
||||
|
||||
/**
|
||||
* oAuth Client ID
|
||||
*/
|
||||
clientId: string,
|
||||
|
||||
/**
|
||||
* Client oAuth
|
||||
*/
|
||||
clientToken: googleDriver.googleCredential,
|
||||
|
||||
/**
|
||||
* Files or Folders ID's
|
||||
*/
|
||||
gIDs?: string[],
|
||||
}|{
|
||||
type: "oracleBucket",
|
||||
|
||||
/**
|
||||
* Oracle bucket authentication
|
||||
*/
|
||||
authConfig: oracleBucket.oracleOptions,
|
||||
|
||||
/**
|
||||
* Files or Folders path
|
||||
*/
|
||||
path?: string[],
|
||||
}|{
|
||||
type: "docker",
|
||||
auth?: dockerRegistry.userAuth,
|
||||
image: string,
|
||||
tags?: string[]
|
||||
});
|
||||
|
||||
export interface repositorySources {
|
||||
Description?: string;
|
||||
Codename?: string;
|
||||
Suite?: string;
|
||||
Origin?: string;
|
||||
Label?: string;
|
||||
sources: {
|
||||
[key: string]: repositorySource;
|
||||
};
|
||||
}
|
||||
|
||||
export class Repository extends Map<string, repositorySource> {
|
||||
#Description?: string;
|
||||
setDescription(value: string) {this.#Description = value; return this;}
|
||||
getDescription() {return this.#Description}
|
||||
|
||||
#Codename?: string;
|
||||
setCodename(value: string) {this.#Codename = value; return this;}
|
||||
getCodename() {return this.#Codename}
|
||||
|
||||
#Suite?: string;
|
||||
setSuite(value: string) {this.#Suite = value; return this;}
|
||||
getSuite() {return this.#Suite}
|
||||
|
||||
#Origin?: string;
|
||||
setOrigin(value: string) {this.#Origin = value; return this;}
|
||||
getOrigin() {return this.#Origin}
|
||||
|
||||
#Label?: string;
|
||||
setLabel(value: string) {this.#Label = value; return this;}
|
||||
getLabel() {return this.#Label}
|
||||
|
||||
constructor(src?: repositorySources) {
|
||||
super();
|
||||
if (src) {
|
||||
if (Array.isArray(src["source"])) {
|
||||
console.warn("Migrating old repository to new Version");
|
||||
const aptConfig = src["aptConfig"] || {};
|
||||
this.#Description = aptConfig.Description;
|
||||
this.#Codename = aptConfig.Codename;
|
||||
this.#Origin = aptConfig.Origin;
|
||||
this.#Suite = aptConfig.Suite;
|
||||
this.#Label = aptConfig.Label;
|
||||
const old: any[] = src["source"];
|
||||
old.forEach(repo => {try {repo.type = repo.type.replace(/_([A-Z])/, (_sub, key: string) => key.toUpperCase()) as any; this.set(repo.id, repo as any)} catch {}});
|
||||
return;
|
||||
}
|
||||
this.#Description = src.Description;
|
||||
this.#Codename = src.Codename;
|
||||
this.#Origin = src.Origin;
|
||||
this.#Suite = src.Suite;
|
||||
this.#Label = src.Label;
|
||||
src.sources ||= {};
|
||||
for (const key in src.sources) {
|
||||
try {this.set(key, src.sources[key]);} catch {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add new repository source
|
||||
*
|
||||
* @param key - Repository ID
|
||||
* @param repo - Source config
|
||||
* @returns
|
||||
*/
|
||||
set(key: string, repo: repositorySource) {
|
||||
if (this.has(key)) throw new Error("ID are exists");
|
||||
if (repo["id"]) delete repo["id"];
|
||||
if (repo.type === "http") {
|
||||
if (!repo.url) throw new Error("Required URL to add this source");
|
||||
else {
|
||||
if (!(Object.keys(repo.auth?.header||{}).length) && repo.auth?.header) delete repo.auth.header;
|
||||
if (!(Object.keys(repo.auth?.query||{}).length) && repo.auth?.query) delete repo.auth.query;
|
||||
}
|
||||
if (!(Object.keys(repo.auth||{}).length) && repo.auth) delete repo.auth;
|
||||
repo.enableUpload = false;
|
||||
} else if (repo.type === "mirror") {
|
||||
if (!repo.config) throw new Error("Require Mirror sources");
|
||||
else if (!((repo.config = repo.config.filter(at => at.type === "packages" && at.distname?.trim?.() && at.src?.trim?.())).length)) throw new Error("To mirror the repository you only need a source");
|
||||
repo.enableUpload = false;
|
||||
} else if (repo.type === "github") {
|
||||
if (!(repo.owner && repo.repository)) throw new Error("github Sources require owner and repository");
|
||||
if (!repo.token) delete repo.token;
|
||||
if (repo.subType === "release") {
|
||||
if (!(repo.tag?.length)) delete repo.tag;
|
||||
} else if (repo.subType === "branch") {
|
||||
if (!(repo.branch)) delete repo.branch;
|
||||
repo.enableUpload = false;
|
||||
} else throw new Error("invalid github source");
|
||||
} else if (repo.type === "googleDriver") {
|
||||
if (!(repo.clientId && repo.clientSecret && (typeof repo.clientToken?.access_token === "string" && repo.clientToken.access_token.trim().length > 0))) throw new Error("Invalid settings to Google oAuth");
|
||||
if (!(repo.gIDs?.length)) delete repo.gIDs;
|
||||
} else if (repo.type === "oracleBucket") {
|
||||
if (!repo.authConfig) throw new Error("Required auth config to Oracle bucket");
|
||||
if (repo.authConfig.auth) {
|
||||
if (Array.isArray(repo.authConfig.auth)) {
|
||||
if (!(repo.authConfig.auth.length)) throw new Error("Require auth to Oracle Cloud");
|
||||
const backup = repo.authConfig.auth.slice(0, 2);
|
||||
if (!(oldFs.existsSync(path.resolve(process.cwd(), backup.at(0))))) throw new Error("Invalid Oracle auth path, Path not exists");
|
||||
backup[0] = path.resolve(process.cwd(), backup.at(0));
|
||||
if (typeof backup.at(1) === "string") {
|
||||
if (!(backup[1] = backup[1].trim())) delete backup[1];
|
||||
} else delete backup[1];
|
||||
repo.authConfig.auth = backup.filter(Boolean);
|
||||
} else {
|
||||
const { tenancy, user, fingerprint, privateKey, passphase } = repo.authConfig.auth;
|
||||
if (!(tenancy && user && fingerprint && privateKey)) throw new Error("Invalid auth to Oracle Cloud");
|
||||
if (!passphase) delete repo.authConfig.auth.passphase;
|
||||
}
|
||||
}
|
||||
if (!(repo.path?.length)) delete repo.path;
|
||||
} else if (repo.type === "docker") {
|
||||
if (!repo.image) throw new Error("Require docker image");
|
||||
if (repo.auth) if (!(repo.auth.username && repo.auth.password)) throw new Error("Required valid auth to Docker image");
|
||||
if (!(repo.tags?.length)) delete repo.tags;
|
||||
} else throw new Error("Invalid source type");
|
||||
repo.componentName ||= "main";
|
||||
repo.enableUpload ??= false;
|
||||
super.set(key, repo);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get repository source
|
||||
*
|
||||
* @param repoID - Repository ID
|
||||
* @returns repository source
|
||||
*/
|
||||
get(repoID: string) {
|
||||
if (!(this.has(repoID))) throw new Error("Repository not exists");
|
||||
return super.get(repoID);
|
||||
}
|
||||
|
||||
/** Get all repository sources with repository ID */
|
||||
getAllRepositorys(): ({repositoryID: string} & repositorySource)[] {
|
||||
return Array.from(this.keys()).map(key => ({repositoryID: key, ...(this.get(key))}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Upload debian file to repository source if avaible
|
||||
*
|
||||
* @param repositoryID - Repository ID
|
||||
* @returns
|
||||
*/
|
||||
async uploadFile(repositoryID: string) {
|
||||
const repo = this.get(repositoryID);
|
||||
if (!repo.enableUpload) throw new Error("Repository not allow or not support to upload files!");
|
||||
if (repo.type === "github") {
|
||||
if (!repo.token) throw new Error("Cannot create upload file to Github Release, required Token to upload files!");
|
||||
const { owner, repository, token } = repo;
|
||||
const gh = await Github.repositoryManeger(owner, repository, {token});
|
||||
return {
|
||||
async githubUpload(filename: string, fileSize: number, tagName?: string): Promise<stream.Writable> {
|
||||
if (!tagName) tagName = (await gh.release.getRelease("__latest__").catch(async () => (await gh.release.getRelease()).at(0)))?.tag_name||"v1";
|
||||
return (await gh.release.manegerRelease(tagName)).uploadAsset(filename, fileSize);
|
||||
}
|
||||
};
|
||||
} else if (repo.type === "googleDriver") {
|
||||
const { clientId: clientID, clientSecret, clientToken } = repo;
|
||||
const gdrive = await googleDriver.GoogleDriver({
|
||||
authConfig: {
|
||||
clientID,
|
||||
clientSecret,
|
||||
token: clientToken,
|
||||
redirectURL: "http://localhost",
|
||||
authUrlCallback(){throw new Error("Set up fist")},
|
||||
tokenCallback() {},
|
||||
}
|
||||
});
|
||||
return {
|
||||
gdriveUpload: async (filename: string, folderId?: string) => gdrive.uploadFile(filename, folderId),
|
||||
};
|
||||
} else if (repo.type === "oracleBucket") {
|
||||
const oci = await oracleBucket.oracleBucket(repo.authConfig);
|
||||
return {ociUpload: oci.uploadFile};
|
||||
} else if (repo.type === "docker") {
|
||||
return {
|
||||
dockerUpload: async (platform: dockerRegistry.dockerPlatform) => {
|
||||
const dockerRepo = new dockerRegistry.v2(repo.image, repo.auth);
|
||||
const img = await dockerRepo.createImage(platform);
|
||||
const blob = img.createBlob("gzip");
|
||||
return {
|
||||
...blob,
|
||||
annotations: img.annotations,
|
||||
async finalize(tagName?: string) {
|
||||
await blob.finalize();
|
||||
const dockerRepo = await img.finalize(tagName);
|
||||
repo.tags ||= [];
|
||||
repo.tags.push(dockerRepo.digest);
|
||||
return dockerRepo;
|
||||
}
|
||||
};
|
||||
},
|
||||
dockerUploadV2() {
|
||||
return new dockerRegistry.v2(repo.image, repo.auth);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
throw new Error("Not implemented");
|
||||
}
|
||||
|
||||
toJSON(): repositorySources {
|
||||
return {
|
||||
Description: this.#Description,
|
||||
Codename: this.#Codename,
|
||||
Origin: this.#Origin,
|
||||
Label: this.#Label,
|
||||
sources: Array.from(this.keys()).reduce<{[key: string]: repositorySource}>((acc, key) => {acc[key] = this.get(key); return acc;}, {}),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
interface serverConfig {
|
||||
portListen: number;
|
||||
clusterForks: number;
|
||||
dataStorage?: string;
|
||||
release?: {
|
||||
gzip?: boolean;
|
||||
xz?: boolean;
|
||||
};
|
||||
database?: {
|
||||
url: string;
|
||||
databaseName?: string;
|
||||
};
|
||||
gpgSign?: {
|
||||
gpgPassphrase?: string;
|
||||
privateKey: {
|
||||
keyContent: string;
|
||||
filePath?: string;
|
||||
};
|
||||
publicKey: {
|
||||
keyContent: string;
|
||||
filePath?: string;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
export interface configJSON extends serverConfig {
|
||||
repository: {[repoName: string]: repositorySources};
|
||||
}
|
||||
|
||||
export class aptStreamConfig {
|
||||
#internalServerConfig: serverConfig = { portListen: 0, clusterForks: 0 };
|
||||
#internalRepository: {[repositoryName: string]: Repository} = {};
|
||||
toJSON(): configJSON {
|
||||
const config: configJSON = Object(this.#internalServerConfig);
|
||||
if (config.dataStorage) config.dataStorage = path.relative(process.cwd(), config.dataStorage);
|
||||
config.repository = {};
|
||||
Object.keys(this.#internalRepository).forEach(repoName => config.repository[repoName] = this.#internalRepository[repoName].toJSON());
|
||||
return config;
|
||||
}
|
||||
|
||||
toString(encode?: BufferEncoding, type?: "json"|"yaml") {
|
||||
encode ||= "utf8";
|
||||
type ||= "json";
|
||||
return ((["hex", "base64", "base64url"]).includes(encode) ? (encode+":") : "")+(Buffer.from((type === "yaml" ? yaml : JSON).stringify(this.toJSON(), null, (["hex", "base64", "base64url"]).includes(encode) ? undefined : 2), "utf8").toString(encode || "utf8"));
|
||||
}
|
||||
|
||||
#configPath?: string;
|
||||
async saveConfig(configPath?: string, type?: "json"|"yaml") {
|
||||
if (!(configPath||this.#configPath)) throw new Error("Current config only memory");
|
||||
if (this.#configPath) type ||= path.extname(this.#configPath) === ".json" ? "json" : "yaml";
|
||||
else if (configPath) type ||= path.extname(configPath) === ".json" ? "json" : "yaml";
|
||||
await fs.writeFile((configPath||this.#configPath), this.toString("utf8", type));
|
||||
}
|
||||
|
||||
constructor(config?: string|configJSON|aptStreamConfig) {
|
||||
if (config) {
|
||||
let nodeConfig: configJSON;
|
||||
if (config instanceof aptStreamConfig) {
|
||||
this.#configPath = config.#configPath;
|
||||
config = config.toJSON();
|
||||
}
|
||||
if (typeof config === "string") {
|
||||
let indexofEncoding: number;
|
||||
if (config.startsWith("env:")) config = process.env[config.slice(4)];
|
||||
if (path.isAbsolute(path.resolve(process.cwd(), config))) {
|
||||
if (oldFs.existsSync(path.resolve(process.cwd(), config))) config = oldFs.readFileSync((this.#configPath = path.resolve(process.cwd(), config)), "utf8")
|
||||
else {
|
||||
this.#configPath = path.resolve(process.cwd(), config);
|
||||
config = undefined;
|
||||
}
|
||||
} else if ((["hex:", "base64:", "base64url:"]).find(rel => config.toString().startsWith(rel))) config = Buffer.from(config.slice(indexofEncoding+1).trim(), config.slice(0, indexofEncoding) as BufferEncoding).toString("utf8");
|
||||
else config = undefined;
|
||||
if (!!config) {
|
||||
try {
|
||||
nodeConfig = JSON.parse(config as string);
|
||||
} catch {
|
||||
try {
|
||||
nodeConfig = yaml.parse(config as string);
|
||||
} catch {
|
||||
throw new Error("Invalid config, not is YAML or JSON");
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (typeof config === "object") nodeConfig = config;
|
||||
|
||||
// Add sources
|
||||
nodeConfig ||= {clusterForks: 0, portListen: 0, repository: {}};
|
||||
nodeConfig.repository ||= {};
|
||||
Object.keys(nodeConfig.repository).forEach(keyName => this.#internalRepository[keyName] = new Repository(nodeConfig.repository[keyName]));
|
||||
|
||||
// Add server config
|
||||
delete nodeConfig.repository;
|
||||
this.#internalServerConfig = {clusterForks: Number(nodeConfig.clusterForks || 0), portListen: Number(nodeConfig.portListen || 0)};
|
||||
if (nodeConfig.dataStorage) this.#internalServerConfig.dataStorage = path.resolve(process.cwd(), nodeConfig.dataStorage);
|
||||
this.#internalServerConfig.release = {};
|
||||
this.#internalServerConfig.release.gzip = !!(nodeConfig.release?.gzip ?? true);
|
||||
this.#internalServerConfig.release.xz = !!(nodeConfig.release?.xz ?? true);
|
||||
if (nodeConfig.database?.url) this.#internalServerConfig.database = {
|
||||
url: nodeConfig.database.url,
|
||||
databaseName: nodeConfig.database.databaseName || "aptStream"
|
||||
};
|
||||
if (nodeConfig.gpgSign?.privateKey && nodeConfig.gpgSign?.publicKey) {
|
||||
const { gpgPassphrase, privateKey, publicKey } = nodeConfig.gpgSign;
|
||||
if (privateKey.filePath && publicKey.filePath) {
|
||||
privateKey.keyContent = oldFs.readFileSync(privateKey.filePath, "utf8");
|
||||
publicKey.keyContent = oldFs.readFileSync(publicKey.filePath, "utf8");
|
||||
}
|
||||
this.#internalServerConfig.gpgSign = {
|
||||
gpgPassphrase: String(gpgPassphrase||""),
|
||||
privateKey: {
|
||||
keyContent: privateKey.keyContent,
|
||||
filePath: privateKey.filePath
|
||||
},
|
||||
publicKey: {
|
||||
keyContent: publicKey.keyContent,
|
||||
filePath: publicKey.filePath
|
||||
}
|
||||
};
|
||||
}
|
||||
if (!this.#internalServerConfig.gpgSign?.gpgPassphrase && typeof this.#internalServerConfig.gpgSign?.gpgPassphrase === "string") delete this.#internalServerConfig.gpgSign.gpgPassphrase;
|
||||
}
|
||||
}
|
||||
|
||||
setCompressRelease(target: keyof configJSON["release"], value: boolean) {
|
||||
this.#internalServerConfig.release[target] = !!value;
|
||||
return this;
|
||||
}
|
||||
|
||||
getCompressRelease(target: keyof configJSON["release"]) {
|
||||
return !!(this.#internalServerConfig.release?.[target]);
|
||||
}
|
||||
|
||||
databaseAvaible() {return !!this.#internalServerConfig.database;}
|
||||
getDatabase() {
|
||||
if (!this.databaseAvaible()) throw new Error("No Database set up");
|
||||
return this.#internalServerConfig.database;
|
||||
}
|
||||
|
||||
setDatabse(url: string, databaseName?: string) {
|
||||
this.#internalServerConfig.database = {
|
||||
url,
|
||||
databaseName
|
||||
};
|
||||
return this;
|
||||
}
|
||||
|
||||
getClusterForks() {return Number(this.#internalServerConfig.clusterForks || 0);}
|
||||
setClusterForks(value: number) {
|
||||
if (value > 0 && value < 256) this.#internalServerConfig.clusterForks = value;
|
||||
else this.#internalServerConfig.clusterForks = 0;
|
||||
return this;
|
||||
}
|
||||
|
||||
setDataStorage(folderPath: string) {
|
||||
if (path.isAbsolute(folderPath)) this.#internalServerConfig.dataStorage = folderPath; else throw new Error("Require absolute path");
|
||||
return this;
|
||||
}
|
||||
async getDataStorage() {
|
||||
if (!this.#internalServerConfig.dataStorage) return undefined;
|
||||
if (!(await extendsFS.exists(this.#internalServerConfig.dataStorage))) await fs.mkdir(this.#internalServerConfig.dataStorage, {recursive: true});
|
||||
return this.#internalServerConfig.dataStorage;
|
||||
}
|
||||
|
||||
getPortListen() {return Number(this.#internalServerConfig.portListen || 0);}
|
||||
setPortListen(port: number) {
|
||||
if (port >= 0 && port <= ((2**16) - 1)) this.#internalServerConfig.portListen = port;
|
||||
else throw new Error(`Invalid port range (0 - ${(2**16) - 1})`);
|
||||
return this;
|
||||
}
|
||||
|
||||
setPGPKey(gpgSign: configJSON["gpgSign"]) {
|
||||
const { gpgPassphrase, privateKey, publicKey } = gpgSign;
|
||||
if (privateKey.filePath && publicKey.filePath) {
|
||||
privateKey.keyContent = oldFs.readFileSync(privateKey.filePath, "utf8");
|
||||
publicKey.keyContent = oldFs.readFileSync(publicKey.filePath, "utf8");
|
||||
}
|
||||
this.#internalServerConfig.gpgSign = {
|
||||
gpgPassphrase: String(gpgPassphrase||""),
|
||||
privateKey: {
|
||||
keyContent: privateKey.keyContent,
|
||||
filePath: privateKey.filePath
|
||||
},
|
||||
publicKey: {
|
||||
keyContent: publicKey.keyContent,
|
||||
filePath: publicKey.filePath
|
||||
}
|
||||
};
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate Private and Public PGP/GPG Keys to signing repository (InRelease and Release.gpg)
|
||||
*
|
||||
* @param options - Gpg Options
|
||||
* @returns
|
||||
*/
|
||||
async generateGpgKeys(options?: {passphrase?: string, email?: string, name?: string}) {
|
||||
const { passphrase, email, name } = options || {};
|
||||
const { privateKey, publicKey } = await openpgp.generateKey({
|
||||
rsaBits: 4094,
|
||||
type: "rsa",
|
||||
format: "armored",
|
||||
passphrase,
|
||||
userIDs: [
|
||||
{
|
||||
comment: "Generated in Apt-Stream",
|
||||
email, name
|
||||
}
|
||||
]
|
||||
});
|
||||
this.#internalServerConfig.gpgSign = {
|
||||
gpgPassphrase: passphrase,
|
||||
privateKey: {keyContent: privateKey},
|
||||
publicKey: {keyContent: publicKey}
|
||||
};
|
||||
if (this.#internalServerConfig.dataStorage) {
|
||||
this.#internalServerConfig.gpgSign.privateKey.filePath = path.join(this.#internalServerConfig.dataStorage, "privateKey.gpg");
|
||||
this.#internalServerConfig.gpgSign.publicKey.filePath = path.join(this.#internalServerConfig.dataStorage, "publicKey.gpg");
|
||||
await fs.writeFile(this.#internalServerConfig.gpgSign.privateKey.filePath, this.#internalServerConfig.gpgSign.privateKey.keyContent);
|
||||
await fs.writeFile(this.#internalServerConfig.gpgSign.publicKey.filePath, this.#internalServerConfig.gpgSign.publicKey.keyContent);
|
||||
}
|
||||
|
||||
return this.#internalServerConfig.gpgSign;
|
||||
}
|
||||
|
||||
getPGPKey() {
|
||||
if (!this.#internalServerConfig.gpgSign) throw new Error("PGP/GPG Key not set");
|
||||
return this.#internalServerConfig.gpgSign;
|
||||
}
|
||||
|
||||
async getPublicKey(type: "dearmor"|"armor"): Promise<string|Buffer> {
|
||||
const { publicKey } = this.getPGPKey();
|
||||
// same to gpg --dearmor
|
||||
if (type === "dearmor") return Buffer.from((await openpgp.unarmor(publicKey.keyContent)).data as any);
|
||||
return (await openpgp.readKey({ armoredKey: publicKey.keyContent })).armor();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new source to repository.
|
||||
*
|
||||
* @param repositoryName - Repository name
|
||||
* @returns Repository class
|
||||
*/
|
||||
createRepository(repositoryName: string) {
|
||||
if (this.#internalRepository[repositoryName]) throw new Error("Repository name are exists");
|
||||
this.#internalRepository[repositoryName] = new Repository();
|
||||
this.#internalRepository[repositoryName].setCodename(repositoryName).setOrigin(repositoryName);
|
||||
return this.#internalRepository[repositoryName];
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param repositoryName - Repository name, if not exists create this.
|
||||
* @param pkgSource - Packages source
|
||||
* @returns
|
||||
*/
|
||||
addToRepository(repositoryName: string, pkgSource: repositorySource) {
|
||||
this.#internalRepository[repositoryName] ||= new Repository();
|
||||
this.#internalRepository[repositoryName].set(this.createRepositoryID(), pkgSource);
|
||||
return this;
|
||||
}
|
||||
|
||||
createRepositoryID() {
|
||||
let repoID: string;
|
||||
while (!repoID) {
|
||||
repoID = ("aptS__")+(crypto.randomBytes(16).toString("hex"));
|
||||
if (this.getRepositorys().find(key => key.repositoryManeger.has(repoID))) repoID = undefined;
|
||||
}
|
||||
return repoID;
|
||||
}
|
||||
|
||||
hasSource(repositoryName: string) {
|
||||
return !!(this.#internalRepository[repositoryName]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get repository source
|
||||
* @param repositoryName - Repository name or Codename
|
||||
* @returns
|
||||
*/
|
||||
getRepository(repositoryName: string) {
|
||||
if (repositoryName.startsWith("aptS__")) {
|
||||
const bc = repositoryName;
|
||||
repositoryName = undefined;
|
||||
for (const repo of Object.keys(this.#internalRepository)) if (this.#internalRepository[repo].has(bc)) {repositoryName = repo; break;}
|
||||
} else if (!this.#internalRepository[repositoryName]) {
|
||||
const bc = repositoryName;
|
||||
repositoryName = undefined;
|
||||
for (const repo of Object.keys(this.#internalRepository)) if (this.#internalRepository[repo].getCodename() === bc) {repositoryName = repo; break;}
|
||||
}
|
||||
if (!repositoryName) throw new Error("Repository not exists");
|
||||
return this.#internalRepository[repositoryName];
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete repository
|
||||
*
|
||||
* @param repositoryName - Repository name or Codename
|
||||
* @returns return a boolean to indicate delete status
|
||||
*/
|
||||
deleteRepository(repositoryName: string) {
|
||||
if (!this.#internalRepository[repositoryName]) {
|
||||
const bc = repositoryName;
|
||||
repositoryName = undefined;
|
||||
for (const repo of Object.keys(this.#internalRepository)) if (this.#internalRepository[repo].getCodename() === bc) {repositoryName = repo; break;}
|
||||
if (!repositoryName) throw new Error("Repository not exists");
|
||||
}
|
||||
return delete this.#internalRepository[repositoryName];
|
||||
}
|
||||
|
||||
getRepositorys() {
|
||||
return Object.keys(this.#internalRepository).map(repositoryName => ({repositoryName, repositoryManeger: this.#internalRepository[repositoryName]}));
|
||||
}
|
||||
}
|
@ -1,766 +0,0 @@
|
||||
import { Repository, aptStreamConfig, repositorySource } from "./config.js";
|
||||
import connectDb, { packageManeger } from "./packages.js";
|
||||
import { googleDriver } from "@sirherobrine23/cloud";
|
||||
import { readFile, readdir } from "fs/promises";
|
||||
import { apt } from "@sirherobrine23/dpkg";
|
||||
import inquirerFileTreeSelection from "inquirer-file-tree-selection-prompt";
|
||||
import dockerRegistry from "@sirherobrine23/docker-registry";
|
||||
import coreHTTP from "@sirherobrine23/http";
|
||||
import inquirer from "inquirer";
|
||||
import path from "node:path";
|
||||
import { MongoClient } from "mongodb";
|
||||
import { createServer } from "http";
|
||||
import { githubToken } from "@sirherobrine23/http/src/github.js";
|
||||
import dns from "node:dns/promises";
|
||||
import os from "os";
|
||||
inquirer.registerPrompt("file-tree-selection", inquirerFileTreeSelection);
|
||||
|
||||
export default async function main(configOrigin: string) {
|
||||
const config = new aptStreamConfig(configOrigin);
|
||||
if (!config.databaseAvaible()) await setDatabse(config);
|
||||
const configManeger = await connectDb(config);
|
||||
while(true) {
|
||||
const action = (await inquirer.prompt<{initAction: "serverEdit"|"newRepo"|"del"|"editRepo"|"syncRepo"|"exit"}>({
|
||||
name: "initAction",
|
||||
type: "list",
|
||||
message: "Select action:",
|
||||
choices: [
|
||||
{
|
||||
name: "Edit repository",
|
||||
value: "editRepo"
|
||||
},
|
||||
{
|
||||
name: "Create new repository",
|
||||
value: "newRepo"
|
||||
},
|
||||
{
|
||||
name: "Delete repository",
|
||||
value: "del"
|
||||
},
|
||||
{
|
||||
name: "Edit server configs",
|
||||
value: "serverEdit"
|
||||
},
|
||||
{
|
||||
name: "Exit",
|
||||
value: "exit"
|
||||
}
|
||||
]
|
||||
})).initAction;
|
||||
if (action === "exit") break;
|
||||
else if (action === "newRepo") {
|
||||
const repoName = (await inquirer.prompt({
|
||||
name: "repoName",
|
||||
message: "Repository name:",
|
||||
type: "input",
|
||||
validate: (name) => configManeger.hasSource(name.trim()) ? "Type other repository name, this are exist's" : true,
|
||||
})).repoName.trim();
|
||||
if (repoName) await editRepository(configManeger.createRepository(repoName), configManeger);
|
||||
else console.log("The repository was not created, cancelling!");
|
||||
} else if (action === "editRepo") {
|
||||
const repo = configManeger.getRepositorys();
|
||||
const repoSelected = (await inquirer.prompt({
|
||||
name: "repo",
|
||||
message: "Selecte repository:",
|
||||
type: "list",
|
||||
choices: [
|
||||
{
|
||||
name: "Cancel",
|
||||
value: "exit"
|
||||
},
|
||||
...(repo.map(d => d.repositoryName))
|
||||
],
|
||||
})).repo;
|
||||
if (repoSelected !== "exit") await editRepository(configManeger.getRepository(repoSelected), configManeger);
|
||||
} else if (action === "del") {
|
||||
const repo = configManeger.getRepositorys();
|
||||
const repoSelected = (await inquirer.prompt({
|
||||
name: "repo",
|
||||
message: "Selecte repository:",
|
||||
type: "list",
|
||||
choices: [
|
||||
{
|
||||
name: "Cancel",
|
||||
value: "exit"
|
||||
},
|
||||
...(repo.map(d => d.repositoryName))
|
||||
],
|
||||
})).repo;
|
||||
if (repoSelected !== "exit") {
|
||||
if (configManeger.deleteRepository(repoSelected)) console.log("Repository deleted");
|
||||
else console.error("Fail to delete repository!");
|
||||
}
|
||||
} else if (action === "serverEdit") await serverConfig(configManeger);
|
||||
await configManeger.saveConfig().catch(() => {});
|
||||
}
|
||||
|
||||
return configManeger.close().then(async () => configManeger.saveConfig());
|
||||
}
|
||||
|
||||
async function serverConfig(config: packageManeger) {
|
||||
while (true) {
|
||||
await config.saveConfig().catch(() => {});
|
||||
const { action } = await inquirer.prompt({
|
||||
name: "action",
|
||||
type: "list",
|
||||
choices: [
|
||||
{
|
||||
name: "Serve port",
|
||||
value: "serverPort"
|
||||
},
|
||||
{
|
||||
name: "Serve threads forks",
|
||||
value: "serverThreads"
|
||||
},
|
||||
{
|
||||
name: "Change mongodb URL",
|
||||
value: "updateDB"
|
||||
},
|
||||
{
|
||||
name: "Switch gzip release compressions",
|
||||
value: "relGzip"
|
||||
},
|
||||
{
|
||||
name: "Switch xz release compressions",
|
||||
value: "relXz"
|
||||
},
|
||||
{
|
||||
name: "Return",
|
||||
value: "exit"
|
||||
},
|
||||
]
|
||||
});
|
||||
if (action === "exit") break;
|
||||
else if (action === "relGzip") console.log("Set gzip to %O", config.setCompressRelease("gzip", !config.getCompressRelease("gzip")).getCompressRelease("gzip"));
|
||||
else if (action === "relXz") console.log("Set xz to %O", config.setCompressRelease("xz", !config.getCompressRelease("xz")).getCompressRelease("xz"));
|
||||
else if (action === "serverPort") {
|
||||
await inquirer.prompt({
|
||||
name: "port",
|
||||
type: "number",
|
||||
default: config.getPortListen(),
|
||||
message: "Server port:",
|
||||
validate(input: number) {
|
||||
if (input < 0 || input > 65535) return "Port must be between 0 and 65535";
|
||||
config.setPortListen(input);
|
||||
return true;
|
||||
}
|
||||
});
|
||||
} else if (action === "serverThreads") {
|
||||
await inquirer.prompt({
|
||||
name: "threads",
|
||||
type: "number",
|
||||
default: config.getClusterForks(),
|
||||
message: "Server threads forks:",
|
||||
validate(input: number) {
|
||||
if (input < 0) return "Threads must be greater or equal 0";
|
||||
if (input > os.availableParallelism()) console.warn("\nThe number of threads was greater than the system can handle, be careful!");
|
||||
config.setClusterForks(input);
|
||||
return true;
|
||||
}
|
||||
});
|
||||
} else if (action === "updateDB") await setDatabse(config);
|
||||
}
|
||||
}
|
||||
|
||||
async function setDatabse(repo: aptStreamConfig) {
|
||||
const promps = await inquirer.prompt({
|
||||
name: "url",
|
||||
type: "input",
|
||||
message: "Mongodb URL:",
|
||||
async validate(input) {
|
||||
try {
|
||||
await (await (new MongoClient(input)).connect()).close(true);
|
||||
return true;
|
||||
} catch (err) {
|
||||
return err?.message || err;
|
||||
}
|
||||
},
|
||||
});
|
||||
repo.setDatabse(promps.url);
|
||||
}
|
||||
|
||||
async function editRepository(repo: Repository, configManeger: packageManeger) {
|
||||
let exitShowSync = false;
|
||||
await configManeger.saveConfig().catch(() => {});
|
||||
while (true) {
|
||||
const action = (await inquirer.prompt({
|
||||
name: "action",
|
||||
message: "Repository actions:",
|
||||
type: "list",
|
||||
choices: [
|
||||
{
|
||||
name: "New repository sources",
|
||||
value: "add"
|
||||
},
|
||||
{
|
||||
name: "Delete sources",
|
||||
value: "del"
|
||||
},
|
||||
{
|
||||
name: "Delete all sources",
|
||||
value: "delAll"
|
||||
},
|
||||
{
|
||||
name: "Set repository codename",
|
||||
value: "setCodename"
|
||||
},
|
||||
{
|
||||
name: "Set repository description",
|
||||
value: "setDescription"
|
||||
},
|
||||
{
|
||||
name: "Set repository label",
|
||||
value: "setLabel"
|
||||
},
|
||||
{
|
||||
name: "Set repository origin",
|
||||
value: "setOrigin"
|
||||
},
|
||||
{
|
||||
name: "Set repository suite",
|
||||
value: "setSuite"
|
||||
},
|
||||
{
|
||||
name: "Return",
|
||||
value: "exit"
|
||||
},
|
||||
]
|
||||
})).action;
|
||||
|
||||
if (action === "exit") break;
|
||||
else if (action === "setCodename") {
|
||||
const input = (await inquirer.prompt({
|
||||
name: "value",
|
||||
type: "input",
|
||||
message: "What is new Codename?"
|
||||
})).value;
|
||||
repo.setCodename(input);
|
||||
} else if (action === "setDescription") {
|
||||
const input = (await inquirer.prompt({
|
||||
name: "value",
|
||||
type: "input",
|
||||
message: "Set description in one single line:"
|
||||
})).value;
|
||||
repo.setDescription(input);
|
||||
} else if (action === "setLabel") {
|
||||
const input = (await inquirer.prompt({
|
||||
name: "value",
|
||||
type: "input",
|
||||
message: "Set label:"
|
||||
})).value;
|
||||
repo.setLabel(input);
|
||||
} else if (action === "setOrigin") {
|
||||
const input = (await inquirer.prompt([
|
||||
{
|
||||
when: () => !!repo.getOrigin(),
|
||||
name: "confirm",
|
||||
message: "Are you sure you want to change Origin?",
|
||||
type: "confirm",
|
||||
default: false
|
||||
},
|
||||
{
|
||||
when: (ask) => !repo.getOrigin() || ask["confirm"],
|
||||
name: "value",
|
||||
type: "input",
|
||||
message: "What is Origin name?"
|
||||
}
|
||||
])).value;
|
||||
if (!input) console.log("Canceled set origin"); else repo.setOrigin(input);
|
||||
} else if (action === "setSuite") {
|
||||
const input = (await inquirer.prompt({
|
||||
name: "value",
|
||||
type: "input",
|
||||
message: "What is Suite name?"
|
||||
})).value;
|
||||
repo.setSuite(input);
|
||||
} else if (action === "delAll") {
|
||||
exitShowSync = true;
|
||||
repo.clear();
|
||||
} else if (action === "del") {
|
||||
const srcs = repo.getAllRepositorys();
|
||||
if (!srcs.length) {
|
||||
console.info("Not sources!");
|
||||
continue;
|
||||
}
|
||||
const sel: string[] = (await inquirer.prompt({
|
||||
name: "sel",
|
||||
type: "checkbox",
|
||||
message: "Select IDs:",
|
||||
choices: repo.getAllRepositorys().map(d => ({name: `${d.repositoryID} (${d.type})`, value: d.repositoryID})),
|
||||
})).sel;
|
||||
exitShowSync = true;
|
||||
sel.forEach(id => repo.delete(id));
|
||||
} else if (action === "add") {
|
||||
const root = async () => createSource().catch(err => {
|
||||
console.error(err);
|
||||
console.log("Try again");
|
||||
return createSource();
|
||||
});
|
||||
repo.set(configManeger.createRepositoryID(), await root());
|
||||
}
|
||||
}
|
||||
if (exitShowSync) console.info("Sync packages!");
|
||||
return repo;
|
||||
}
|
||||
|
||||
async function createSource(): Promise<repositorySource> {
|
||||
let { srcType, componentName } = (await inquirer.prompt<{srcType: repositorySource["type"], componentName?: string}>([
|
||||
{
|
||||
name: "srcType",
|
||||
type: "list",
|
||||
choices: [
|
||||
{
|
||||
value: "http",
|
||||
name: "HTTP Directly"
|
||||
},
|
||||
{
|
||||
value: "mirror",
|
||||
name: "APT Mirror"
|
||||
},
|
||||
{
|
||||
value: "github",
|
||||
name: "Github Release/Branch"
|
||||
},
|
||||
{
|
||||
value: "googleDriver",
|
||||
name: "Google Drive"
|
||||
},
|
||||
{
|
||||
value: "oracleBucket",
|
||||
name: "Oracle Cloud Infracture Bucket"
|
||||
},
|
||||
{
|
||||
value: "docker",
|
||||
name: "OCI (Open Container Iniciative)/Docker Image"
|
||||
},
|
||||
]
|
||||
},
|
||||
{
|
||||
type: "confirm",
|
||||
name: "addComp",
|
||||
message: "Add component name?",
|
||||
default: false
|
||||
},
|
||||
{
|
||||
name: "componentName",
|
||||
when: (answers) => answers["addComp"],
|
||||
type: "input",
|
||||
default: "main",
|
||||
validate: (inputComp) => (/[\s]+/).test(inputComp) ? "Remove Spaces" : true
|
||||
}
|
||||
]));
|
||||
componentName ||= "main";
|
||||
if (srcType === "http") {
|
||||
return {
|
||||
type: "http", componentName,
|
||||
url: (await inquirer.prompt({
|
||||
name: "reqUrl",
|
||||
type: "input",
|
||||
async validate(urlInput) {
|
||||
try {
|
||||
const { hostname } = new URL(urlInput);
|
||||
await dns.resolve(hostname);
|
||||
return true
|
||||
} catch (err) { return err?.message || String(err); }}
|
||||
})).reqUrl,
|
||||
};
|
||||
} else if (srcType === "mirror") {
|
||||
const promps = (await inquirer.prompt([
|
||||
{
|
||||
type: "list",
|
||||
name: "sourceFrom",
|
||||
choices: [
|
||||
{name: "Select file", value: "fileSelect"},
|
||||
{name: "Create from scrat", value: "createIt"}
|
||||
]
|
||||
},
|
||||
{
|
||||
when: (answers) => answers["sourceFrom"] === "fileSelect",
|
||||
name: "fileSource",
|
||||
type: "file-tree-selection",
|
||||
message: "Select file source path:"
|
||||
},
|
||||
{
|
||||
when: (answers) => answers["sourceFrom"] !== "fileSelect",
|
||||
name: "fileSource",
|
||||
type: "editor",
|
||||
message: "creating sources",
|
||||
default: "# This is comment\ndeb http://example.com example main",
|
||||
}
|
||||
]));
|
||||
|
||||
return {
|
||||
type: "mirror", componentName,
|
||||
config: (apt.parseSourceList(promps["sourceFrom"] !== "fileSelect" ? promps["fileSource"] : await readFile(promps["fileSource"], "utf8"))).filter(src => src.type === "packages")
|
||||
};
|
||||
} else if (srcType === "github") {
|
||||
const promps = await inquirer.prompt([
|
||||
{
|
||||
name: "token",
|
||||
type: "input",
|
||||
message: "Github token to private repositorys (it is not necessary if it is public):",
|
||||
default: githubToken,
|
||||
validate(input: string) {
|
||||
if (input.length > 0) if (!(input.startsWith("ghp_"))) return "Invalid token, if old token set manualy in Config file!";
|
||||
return true;
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "owner",
|
||||
type: "input",
|
||||
message: "Repository owner:",
|
||||
async validate(input, ask) {
|
||||
try {
|
||||
const apiReq = new URL(path.posix.join("/users", path.posix.resolve("/", input)), "https://api.github.com");
|
||||
await coreHTTP.jsonRequestBody(apiReq, {headers: ask.token ? {Authorization: `token ${ask.token}`}:{}});
|
||||
return true;
|
||||
} catch (err) {
|
||||
return err?.body?.message || err?.message || String(err);
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
name: "repository",
|
||||
type: "list",
|
||||
message: "Select repository:",
|
||||
async choices(answers) {
|
||||
const apiReq = new URL(path.posix.join("/users", answers["owner"], "repos"), "https://api.github.com");
|
||||
return (await coreHTTP.jsonRequestBody<{name: string}[]>(apiReq, {headers: answers.token ? {Authorization: `token ${answers.token}`}:{}})).map(({name}) => name);
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "subType",
|
||||
type: "list",
|
||||
message: "Where to get the .deb files?",
|
||||
choices: [
|
||||
"Release",
|
||||
"Branch"
|
||||
]
|
||||
}
|
||||
]);
|
||||
|
||||
const { owner, repository, token } = promps;
|
||||
if (promps["subType"] === "Branch") {
|
||||
return {
|
||||
type: "github", subType: "branch", componentName, enableUpload: false,
|
||||
owner, repository, token,
|
||||
branch: (await inquirer.prompt({
|
||||
name: "branch",
|
||||
type: "list",
|
||||
message: "Select the branch:",
|
||||
async choices() {
|
||||
const apiReq = new URL(path.posix.join("/repos", owner, repository, "branches"), "https://api.github.com");
|
||||
return (await coreHTTP.jsonRequestBody<{name: string}[]>(apiReq)).map(({name}) => name);
|
||||
}
|
||||
})).branch,
|
||||
};
|
||||
}
|
||||
const { tag, enableUpload } = await inquirer.prompt([
|
||||
{
|
||||
when: () => !!token,
|
||||
type: "confirm",
|
||||
name: "enableUpload",
|
||||
message: "Enable support to upload files to Github Release?",
|
||||
default: false,
|
||||
},
|
||||
{
|
||||
name: "tag",
|
||||
type: "checkbox",
|
||||
message: "Select tags:",
|
||||
async choices() {
|
||||
const apiReq = new URL(path.posix.join("/repos", owner, repository, "releases"), "https://api.github.com");
|
||||
return (await coreHTTP.jsonRequestBody<{tag_name: string}[]>(apiReq)).map(({tag_name}) => tag_name);
|
||||
}
|
||||
},
|
||||
]);
|
||||
return {
|
||||
type: "github", subType: "release", componentName, enableUpload,
|
||||
owner, repository, token,
|
||||
tag,
|
||||
}
|
||||
} else if (srcType === "googleDriver") {
|
||||
let client_id: string, client_secret: string;
|
||||
try {
|
||||
const secretFile = (await readdir(process.cwd()).then(files => files.filter(file => file.endsWith(".json") && file.startsWith("client_secret")))).at(0);
|
||||
if (secretFile) {
|
||||
const cbb = JSON.parse(await readFile(secretFile, "utf8"));
|
||||
if (typeof cbb.installed === "object") {
|
||||
client_id = cbb.installed.client_id;
|
||||
client_secret = cbb.installed.client_secret;
|
||||
} else if (typeof cbb.CBe === "object") {
|
||||
client_id = cbb.CBe.client_id;
|
||||
client_secret = cbb.CBe.client_secret;
|
||||
} else if (typeof cbb.client_id === "string" && typeof cbb.client_secret === "string") {
|
||||
client_id = cbb.client_id;
|
||||
client_secret = cbb.client_secret;
|
||||
}
|
||||
}
|
||||
} catch {}
|
||||
|
||||
const clientPromp = await inquirer.prompt([
|
||||
{
|
||||
type: "input",
|
||||
name: "id",
|
||||
message: "Google oAuth Client ID:",
|
||||
default: client_id
|
||||
},
|
||||
{
|
||||
type: "input",
|
||||
name: "secret",
|
||||
message: "Google oAuth Client Secret:",
|
||||
default: client_secret,
|
||||
},
|
||||
{
|
||||
type: "confirm",
|
||||
name: "enableUpload",
|
||||
message: "Enable support to upload files to Google driver?",
|
||||
default: false,
|
||||
},
|
||||
{
|
||||
name: "listFiles",
|
||||
type: "confirm",
|
||||
message: "After authenticating Google Drive, will you want to select the files?"
|
||||
},
|
||||
{
|
||||
when: (ask) => ask["listFiles"],
|
||||
name: "folderID",
|
||||
type: "input",
|
||||
message: "Folder ID?"
|
||||
}
|
||||
]);
|
||||
let clientToken: any;
|
||||
const server = createServer();
|
||||
const port = await new Promise<number>((resolve, reject) => {
|
||||
server.once("error", reject);
|
||||
server.listen(0, () => {
|
||||
const addr = server.address();
|
||||
server.removeListener("error", reject);
|
||||
resolve(Number((typeof addr === "string" ? addr : addr?.port) || addr));
|
||||
});
|
||||
});
|
||||
const gdrive = await googleDriver.GoogleDriver({
|
||||
authConfig: {
|
||||
clientSecret: clientPromp["secret"],
|
||||
clientID: clientPromp["id"],
|
||||
redirectURL: "http://localhost:" + port,
|
||||
authUrlCallback(authUrl, callback) {
|
||||
server.once("request", function call(req, res) {
|
||||
const { searchParams } = new URL(String(req.url), "http://localhost:"+port);
|
||||
if (!searchParams.has("code")) {
|
||||
res.statusCode = 400;
|
||||
res.end("No code");
|
||||
server.once("request", call);
|
||||
return;
|
||||
}
|
||||
res.statusCode = 200;
|
||||
res.end(searchParams.get("code"));
|
||||
callback(searchParams.get("code"))
|
||||
});
|
||||
console.error("Please open the following URL in your browser:", authUrl);
|
||||
},
|
||||
tokenCallback(token) {
|
||||
clientToken = token;
|
||||
console.log("Google Drive token:", token);
|
||||
},
|
||||
}
|
||||
});
|
||||
server.close();
|
||||
let gIDs: string[];
|
||||
if (clientPromp["listFiles"]) {
|
||||
const folderID = clientPromp["folderID"]||undefined;
|
||||
const files = (await gdrive.listFiles(folderID)).filter(file => file.name.endsWith(".deb"));
|
||||
if (files.length <= 0) console.log("No files currently in you drive");
|
||||
else gIDs = (await inquirer.prompt({
|
||||
name: "ids",
|
||||
type: "checkbox",
|
||||
choices: files.map(file => ({name: file.name, value: file.id, checked: true}))
|
||||
})).ids;
|
||||
}
|
||||
|
||||
return {
|
||||
type: "googleDriver", componentName, enableUpload: clientPromp["enableUpload"],
|
||||
clientSecret: clientPromp["secret"],
|
||||
clientId: clientPromp["id"],
|
||||
clientToken,
|
||||
gIDs
|
||||
};
|
||||
} else if (srcType === "oracleBucket") {
|
||||
const ociPromps = await inquirer.prompt([
|
||||
{
|
||||
name: "namespace",
|
||||
type: "input",
|
||||
message: "OCI Bucket namespace:"
|
||||
},
|
||||
{
|
||||
name: "name",
|
||||
type: "input",
|
||||
message: "Bucket name:"
|
||||
},
|
||||
{
|
||||
name: "region",
|
||||
type: "list",
|
||||
message: "Select Bucket region:",
|
||||
choices: [
|
||||
"af-johannesburg-1",
|
||||
"ap-chuncheon-1",
|
||||
"ap-hyderabad-1",
|
||||
"ap-melbourne-1",
|
||||
"ap-mumbai-1",
|
||||
"ap-osaka-1",
|
||||
"ap-seoul-1",
|
||||
"ap-singapore-1",
|
||||
"ap-sydney-1",
|
||||
"ap-tokyo-1",
|
||||
"ca-montreal-1",
|
||||
"ca-toronto-1",
|
||||
"eu-amsterdam-1",
|
||||
"eu-frankfurt-1",
|
||||
"eu-madrid-1",
|
||||
"eu-marseille-1",
|
||||
"eu-milan-1",
|
||||
"eu-paris-1",
|
||||
"eu-stockholm-1",
|
||||
"eu-zurich-1",
|
||||
"il-jerusalem-1",
|
||||
"me-abudhabi-1",
|
||||
"me-jeddah-1",
|
||||
"mx-queretaro-1",
|
||||
"sa-santiago-1",
|
||||
"sa-saopaulo-1",
|
||||
"sa-vinhedo-1",
|
||||
"uk-cardiff-1",
|
||||
"uk-london-1",
|
||||
"us-ashburn-1",
|
||||
"us-chicago-1",
|
||||
"us-phoenix-1",
|
||||
"us-sanjose-1"
|
||||
]
|
||||
},
|
||||
{
|
||||
name: "authType",
|
||||
type: "list",
|
||||
choices: [
|
||||
{name: "OCI Cli config", value: "preAuthentication"},
|
||||
{name: "User", value: "user"},
|
||||
]
|
||||
},
|
||||
{
|
||||
when: (answers) => answers["authType"] !== "preAuthentication",
|
||||
name: "tenancy",
|
||||
type: "input"
|
||||
},
|
||||
{
|
||||
when: (answers) => answers["authType"] !== "preAuthentication",
|
||||
name: "user",
|
||||
type: "input"
|
||||
},
|
||||
{
|
||||
when: (answers) => answers["authType"] !== "preAuthentication",
|
||||
name: "fingerprint",
|
||||
type: "input"
|
||||
},
|
||||
{
|
||||
when: (answers) => answers["authType"] !== "preAuthentication",
|
||||
name: "privateKey",
|
||||
type: "input"
|
||||
},
|
||||
{
|
||||
when: (answers) => answers["authType"] !== "preAuthentication",
|
||||
name: "passphase",
|
||||
type: "confirm",
|
||||
message: "Private key require password to decrypt?"
|
||||
},
|
||||
{
|
||||
when: (answers) => answers["passphase"],
|
||||
name: "passphase",
|
||||
type: "password",
|
||||
mask: "*"
|
||||
},
|
||||
{
|
||||
type: "confirm",
|
||||
name: "enableUpload",
|
||||
message: "Enable support to upload files?",
|
||||
default: false,
|
||||
}
|
||||
]);
|
||||
const { namespace, name, region, enableUpload } = ociPromps;
|
||||
if (ociPromps["authType"] === "preAuthentication") return {
|
||||
type: "oracleBucket", componentName, enableUpload,
|
||||
authConfig: {
|
||||
namespace, name, region
|
||||
}
|
||||
};
|
||||
const { fingerprint, privateKey, tenancy, user, passphase } = ociPromps;
|
||||
return {
|
||||
type: "oracleBucket", componentName, enableUpload,
|
||||
authConfig: {
|
||||
namespace, name, region,
|
||||
auth: {
|
||||
fingerprint, privateKey, tenancy, user, passphase
|
||||
}
|
||||
}
|
||||
};
|
||||
} else if (srcType === "docker") {
|
||||
const basicConfig = await inquirer.prompt<{authConfirm: boolean, imageURI: string}>([
|
||||
{
|
||||
name: "imageURI",
|
||||
type: "input",
|
||||
message: "Image URI/URL:",
|
||||
validate(input) {
|
||||
try {
|
||||
new dockerRegistry.parseImage(input);
|
||||
return true;
|
||||
} catch (err) {
|
||||
return String(err?.message || err);
|
||||
}
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "authConfirm",
|
||||
type: "confirm",
|
||||
message: "This registry or image required authentication?",
|
||||
default: false
|
||||
}
|
||||
]);
|
||||
let auth: dockerRegistry.userAuth;
|
||||
let enableUpload: boolean = false;
|
||||
if (basicConfig.authConfirm) {
|
||||
const authPrompts = await inquirer.prompt([
|
||||
{
|
||||
name: "user",
|
||||
type: "input",
|
||||
message: "Username:",
|
||||
validate(input: string) {
|
||||
if (input.trim().length > 1) return true;
|
||||
return "Invalid username";
|
||||
}
|
||||
},
|
||||
{
|
||||
name: "pass",
|
||||
type: "password",
|
||||
mask: "*",
|
||||
message: "Password or Token:"
|
||||
},
|
||||
{
|
||||
name: "enableUpload",
|
||||
type: "confirm",
|
||||
message: "Allow publish packages in Docker registry?"
|
||||
}
|
||||
]);
|
||||
enableUpload = authPrompts["enableUpload"];
|
||||
auth = {
|
||||
username: authPrompts.user,
|
||||
password: authPrompts.pass
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
type: "docker", componentName, enableUpload,
|
||||
image: basicConfig.imageURI,
|
||||
auth
|
||||
};
|
||||
}
|
||||
|
||||
console.log("Invalid select type!");
|
||||
return createSource();
|
||||
}
|
52
src/dpkg/README.md
Normal file
52
src/dpkg/README.md
Normal file
@ -0,0 +1,52 @@
|
||||
# Nodejs Debian maneger
|
||||
|
||||
this package supports some features that can be useful like extracting the Package files or even creating a Nodejs direct
|
||||
|
||||
## Supported features
|
||||
|
||||
### Dpkg
|
||||
|
||||
- Create package.
|
||||
- Extract package file (`ar` file).
|
||||
- Extract and descompress `data.tar`.
|
||||
|
||||
### Apt
|
||||
|
||||
- Get packages from repository `Packages` file.
|
||||
- Parse repository `source.list`.
|
||||
- Parse `Release` and `InRelease` file.
|
||||
|
||||
## Examples
|
||||
|
||||
É possivel criar pacotes direto de pacote:
|
||||
|
||||
```ts
|
||||
import { createWriteStream } from "node:fs";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import { finished } from "node:stream/promises";
|
||||
import path from "node:path";
|
||||
import dpkg from "@sirherobrine23/debian";
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
|
||||
const deb = dpkg.createPackage({
|
||||
dataFolder: path.join(__dirname, "debian_pack"),
|
||||
control: {
|
||||
Package: "test",
|
||||
Architecture: "all",
|
||||
Version: "1.1.1",
|
||||
Description: `Example to create fist line\n\nand Second`,
|
||||
Maintainer: {
|
||||
Name: "Matheus Sampaio Queiroga",
|
||||
Email: "srherobrine20@gmail.com"
|
||||
}
|
||||
},
|
||||
compress: {
|
||||
data: "gzip",
|
||||
control: "gzip",
|
||||
},
|
||||
scripts: {
|
||||
preinst: "#!/bin/bash\nset -ex\necho OK"
|
||||
}
|
||||
});
|
||||
await finished(deb.pipe(createWriteStream(path.join(__dirname, "example.deb"))));
|
||||
```
|
49
src/dpkg/package.json
Normal file
49
src/dpkg/package.json
Normal file
@ -0,0 +1,49 @@
|
||||
{
|
||||
"name": "@sirherobrine23/dpkg",
|
||||
"version": "3.7.0",
|
||||
"description": "Create and Parse debian packages files directly from Nodejs",
|
||||
"type": "module",
|
||||
"main": "src/index.js",
|
||||
"types": "src/index.d.ts",
|
||||
"author": "Matheus Sampaio Queiroga <srherobrine20@gmail.com>",
|
||||
"license": "GPL-2.0",
|
||||
"homepage": "https://git.sirherobrine23.org/Sirherobrine23/node-apt",
|
||||
"keywords": [
|
||||
"apt",
|
||||
"dpkg",
|
||||
"debian",
|
||||
"multi-platform",
|
||||
"windows",
|
||||
"linux",
|
||||
"macOS"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://git.sirherobrine23.org/Sirherobrine23/node-apt.git",
|
||||
"directory": "src/dpkg"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://git.sirherobrine23.org/Sirherobrine23/node-apt/issues"
|
||||
},
|
||||
"scripts": {
|
||||
"prepack": "tsc --build --clean && tsc --build",
|
||||
"postpack": "tsc --build --clean"
|
||||
},
|
||||
"dependencies": {
|
||||
"@sirherobrine23/ar": "3.7.0",
|
||||
"@sirherobrine23/decompress": "3.7.0",
|
||||
"@sirherobrine23/extends": "3.7.0",
|
||||
"@sirherobrine23/http": "3.7.0",
|
||||
"openpgp": "^5.11.0",
|
||||
"tar-stream": "^3.1.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/tar-stream": "^3.1.3"
|
||||
},
|
||||
"workspaces": [
|
||||
"../ar",
|
||||
"../decompress",
|
||||
"../extends",
|
||||
"../http"
|
||||
]
|
||||
}
|
0
src/dpkg/src/apt.ts
Normal file
0
src/dpkg/src/apt.ts
Normal file
299
src/dpkg/src/dpkg.ts
Normal file
299
src/dpkg/src/dpkg.ts
Normal file
@ -0,0 +1,299 @@
|
||||
import path from "node:path";
|
||||
import fs from "node:fs";
|
||||
import stream from "node:stream";
|
||||
import stream_promise from "node:stream/promises";
|
||||
import * as nodeAr from "@sirherobrine23/ar";
|
||||
import { Compressors } from "@sirherobrine23/decompress";
|
||||
|
||||
/** Debian packages, get from `dpkg-architecture --list -L | grep 'musl-linux-' | sed 's|musl-linux-||g' | xargs`, version 1.21.1, Ubuntu */
|
||||
export type debianArch = "all"|"armhf"|"armel"|"mipsn32"|"mipsn32el"|"mipsn32r6"|"mipsn32r6el"|"mips64"|"mips64el"|"mips64r6"|"mips64r6el"|"powerpcspe"|"x32"|"arm64ilp32"|"i386"|"ia64"|"alpha"|"amd64"|"arc"|"armeb"|"arm"|"arm64"|"avr32"|"hppa"|"m32r"|"m68k"|"mips"|"mipsel"|"mipsr6"|"mipsr6el"|"nios2"|"or1k"|"powerpc"|"powerpcel"|"ppc64"|"ppc64el"|"riscv64"|"s390"|"s390x"|"sh3"|"sh3eb"|"sh4"|"sh4eb"|"sparc"|"sparc64"|"tilegx";
|
||||
export const archs: Readonly<debianArch[]> = Object.freeze(["all", "armhf", "armel", "mipsn32", "mipsn32el", "mipsn32r6", "mipsn32r6el", "mips64", "mips64el", "mips64r6", "mips64r6el", "powerpcspe", "x32", "arm64ilp32", "i386", "ia64", "alpha", "amd64", "arc", "armeb", "arm", "arm64", "avr32", "hppa", "m32r", "m68k", "mips", "mipsel", "mipsr6", "mipsr6el", "nios2", "or1k", "powerpc", "powerpcel", "ppc64", "ppc64el", "riscv64", "s390", "s390x", "sh3", "sh3eb", "sh4", "sh4eb", "sparc", "sparc64", "tilegx"]);
|
||||
|
||||
function keys<T>(arg0: T): (keyof T)[] { return Object.keys(arg0) as any[]; }
|
||||
|
||||
type Dependencie = [string]|
|
||||
[string, string]|
|
||||
[string, "|"|"<<"|"<="|"="|">="|">>", string];
|
||||
|
||||
/** Debian binary control */
|
||||
export interface DebianControl {
|
||||
/** Package name */
|
||||
Package: string;
|
||||
/**
|
||||
* The version number of a package. The format is: `[epoch:]upstream_version[-debian_revision]`.
|
||||
*
|
||||
* The three components here are:
|
||||
*
|
||||
* - **epoch**
|
||||
*
|
||||
* This is a single (generally small) unsigned integer. It may be omitted, in which case zero is assumed.
|
||||
*
|
||||
* Epochs can help when the upstream version numbering scheme changes, but they must be used with care. You should not change the epoch, even in experimental, without getting consensus on debian-devel first.
|
||||
*
|
||||
* - **upstream_version**
|
||||
*
|
||||
* This is the main part of the version number. It is usually the version number of the original (“upstream”) package from which the .deb file has been made, if this is applicable. Usually this will be in the same format as that specified by the upstream author(s); however, it may need to be reformatted to fit into the package management system’s format and comparison scheme.
|
||||
*
|
||||
* The comparison behavior of the package management system with respect to the upstream_version is described below. The upstream_version portion of the version number is mandatory.
|
||||
*
|
||||
* The upstream_version must contain only alphanumerics 6 and the characters . + - ~ (full stop, plus, hyphen, tilde) and should start with a digit. If there is no debian_revision then hyphens are not allowed.
|
||||
*
|
||||
* - **debian_revision**
|
||||
*
|
||||
* This part of the version number specifies the version of the Debian package based on the upstream version. It must contain only alphanumerics and the characters + . ~ (plus, full stop, tilde) and is compared in the same way as the upstream_version is.
|
||||
*
|
||||
* It is conventional to restart the debian_revision at 1 each time the upstream_version is increased.
|
||||
*
|
||||
* The package management system will break the version number apart at the last hyphen in the string (if there is one) to determine the upstream_version and debian_revision. The absence of a debian_revision is equivalent to a debian_revision of 0.
|
||||
*
|
||||
* Presence of the debian_revision part indicates this package is a non-native package (see Source packages). Absence indicates the package is a native package.
|
||||
*
|
||||
* When comparing two version numbers, first the epoch of each are compared, then the upstream_version if epoch is equal, and then debian_revision if upstream_version is also equal. epoch is compared numerically. The upstream_version and debian_revision parts are compared by the package management system using the following algorithm:
|
||||
*
|
||||
* The strings are compared from left to right.
|
||||
*
|
||||
* First the initial part of each string consisting entirely of non-digit characters is determined. These two parts (one of which may be empty) are compared lexically. If a difference is found it is returned. The lexical comparison is a comparison of ASCII values modified so that all the letters sort earlier than all the non-letters and so that a tilde sorts before anything, even the end of a part. For example, the following parts are in sorted order from earliest to latest: ~~, ~~a, ~, the empty part, a. 7
|
||||
*
|
||||
* Then the initial part of the remainder of each string which consists entirely of digit characters is determined. The numerical values of these two parts are compared, and any difference found is returned as the result of the comparison. For these purposes an empty string (which can only occur at the end of one or both version strings being compared) counts as zero.
|
||||
*
|
||||
* These two steps (comparing and removing initial non-digit strings and initial digit strings) are repeated until a difference is found or both strings are exhausted.
|
||||
*/
|
||||
Version: string;
|
||||
/**
|
||||
* The package maintainer’s name and email address. The name must come first, then the email address inside angle brackets `<>` (in RFC822 format).
|
||||
*
|
||||
* If the maintainer’s name contains a full stop then the whole field will not work directly as an email address due to a misfeature in the syntax specified in RFC822; a program using this field as an address must check for this and correct the problem if necessary (for example by putting the name in round brackets and moving it to the end, and bringing the email address forward).
|
||||
*/
|
||||
Maintainer: string;
|
||||
/**
|
||||
* A description of the binary package, consisting of two parts, the synopsis or the short description, and the long description. It is a multiline field with the following format:
|
||||
*
|
||||
* ```txt
|
||||
* single line synopsis
|
||||
* extended description over several lines
|
||||
* ```
|
||||
*/
|
||||
Description: string;
|
||||
/** This field identifies the source package name. */
|
||||
Source?: string;
|
||||
Section?: string;
|
||||
Priority?: string;
|
||||
Architecture: debianArch;
|
||||
Essential?: boolean;
|
||||
"Installed-Size"?: number|bigint;
|
||||
Homepage?: string;
|
||||
|
||||
/**
|
||||
* Some binary packages incorporate parts of other packages when built but do not have to depend on those packages. Examples include linking with static libraries or incorporating source code from another package during the build. In this case, the source packages of those other packages are part of the complete source (the binary package is not reproducible without them).
|
||||
*
|
||||
* When the license of either the incorporated parts or the incorporating binary package requires that the full source code of the incorporating binary package be made available, the Built-Using field must list the corresponding source package for any affected binary package incorporated during the build, 6 including an “exactly equal” (“=”) version relation on the version that was used to build that version of the incorporating binary package. 7
|
||||
*
|
||||
* This causes the Debian archive to retain the versions of the source packages that were actually incorporated. In particular, if the versions of the incorporated parts are updated but the incorporating binary package is not rebuilt, the older versions of the incorporated parts will remain in the archive in order to satisfy the license.
|
||||
*/
|
||||
"Built-Using"?: string[];
|
||||
|
||||
/**
|
||||
* This declares an absolute dependency. A package will not be configured unless all of the packages listed in its Depends field have been correctly configured (unless there is a circular dependency as described above).
|
||||
*
|
||||
* The Depends field should be used if the depended-on package is required for the depending package to provide a significant amount of functionality.
|
||||
*
|
||||
* The Depends field should also be used if the postinst or prerm scripts require the depended-on package to be unpacked or configured in order to run. In the case of postinst configure, the depended-on packages will be unpacked and configured first. (If both packages are involved in a dependency loop, this might not work as expected; see the explanation a few paragraphs back.) In the case of prerm or other postinst actions, the package dependencies will normally be at least unpacked, but they may be only “Half-Installed” if a previous upgrade of the dependency failed.
|
||||
*
|
||||
* Finally, the Depends field should be used if the depended-on package is needed by the postrm script to fully clean up after the package removal. There is no guarantee that package dependencies will be available when postrm is run, but the depended-on package is more likely to be available if the package declares a dependency (particularly in the case of postrm remove). The postrm script must gracefully skip actions that require a dependency if that dependency isn’t available.
|
||||
*/
|
||||
Depends?: Dependencie[];
|
||||
/**
|
||||
* This field is like Depends, except that it also forces dpkg to complete installation of the packages named before even starting the installation of the package which declares the pre-dependency, as follows:
|
||||
*
|
||||
* When a package declaring a pre-dependency is about to be unpacked the pre-dependency can be satisfied if the depended-on package is either fully configured, or even if the depended-on package(s) are only in the “Unpacked” or the “Half-Configured” state, provided that they have been configured correctly at some point in the past (and not removed or partially removed since). In this case, both the previously-configured and currently “Unpacked” or “Half-Configured” versions must satisfy any version clause in the Pre-Depends field.
|
||||
*
|
||||
* When the package declaring a pre-dependency is about to be configured, the pre-dependency will be treated as a normal Depends. It will be considered satisfied only if the depended-on package has been correctly configured. However, unlike with Depends, Pre-Depends does not permit circular dependencies to be broken. If a circular dependency is encountered while attempting to honor Pre-Depends, the installation will be aborted.
|
||||
*
|
||||
* Pre-Depends are also required if the preinst script depends on the named package. It is best to avoid this situation if possible.
|
||||
*
|
||||
* Pre-Depends should be used sparingly, preferably only by packages whose premature upgrade or installation would hamper the ability of the system to continue with any upgrade that might be in progress.
|
||||
*
|
||||
* You should not specify a Pre-Depends entry for a package before this has been discussed on the debian-devel mailing list and a consensus about doing that has been reached. See Dependencies.
|
||||
*/
|
||||
"Pre-Depends"?: Dependencie[];
|
||||
/**
|
||||
* This declares a strong, but not absolute, dependency.
|
||||
*
|
||||
* The Recommends field should list packages that would be found together with this one in all but unusual installations.
|
||||
*/
|
||||
Recommends?: Dependencie[];
|
||||
/** This is used to declare that one package may be more useful with one or more others. Using this field tells the packaging system and the user that the listed packages are related to this one and can perhaps enhance its usefulness, but that installing this one without them is perfectly reasonable. */
|
||||
Suggests?: Dependencie[];
|
||||
/** This field is similar to Suggests but works in the opposite direction. It is used to declare that a package can enhance the functionality of another package. */
|
||||
Enhances?: Dependencie[];
|
||||
/** Breaks is described in Packages which break other packages - Breaks */
|
||||
Breaks?: Dependencie[];
|
||||
/** Conflicts is described in Conflicting binary packages - Conflicts. The rest are described below. */
|
||||
Conflicts?: Dependencie[];
|
||||
|
||||
"Original-Maintainer"?: string;
|
||||
Bugs?: string;
|
||||
Task?: string;
|
||||
Tags?: string[];
|
||||
};
|
||||
|
||||
/** Packages file control */
|
||||
export interface DebianControlPackages extends DebianControl {
|
||||
Filename: string;
|
||||
Size: number|bigint;
|
||||
MD5sum: string;
|
||||
|
||||
Origin?: string;
|
||||
SHA1?: string;
|
||||
SHA256?: string;
|
||||
SHA512?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create control file
|
||||
* @param config - Package config
|
||||
*/
|
||||
export function createControl(config: DebianControl|DebianControlPackages): string {
|
||||
if (!config.Package || typeof config.Package !== "string") throw new TypeError("Set package name!");
|
||||
else if (config.Package.trim().toLowerCase() !== config.Package || !((/^[a-z0-9][a-z0-9\.\+\-]+$/).test(config.Package))) throw new TypeError("Set valid package name!");
|
||||
else if (!(archs.includes(config.Architecture))) throw new TypeError("Invalid arch");
|
||||
else if (!config.Description || typeof config.Description !== "string" || config.Description.length <= 1) throw new TypeError("Set package Description");
|
||||
const isDep = (arg: any): arg is "Depends" => ([ "Depends", "Pre-Depends", "Recommends", "Suggests", "Enhances", "Breaks", "Conflicts" ]).includes(arg);
|
||||
const configBuff: string[] = [];
|
||||
keys(config).forEach(keyName => {
|
||||
if (([undefined, null, ""]).includes(config[keyName] as any)) return;
|
||||
let line = String().concat(keyName, ": ");
|
||||
const data = config[keyName];
|
||||
if (keyName === "Description") {
|
||||
let [ syno, ...extend ] = config[keyName].trim().split("\n");
|
||||
extend = extend.join("\n").trim().split("\n");
|
||||
line = line.concat(syno.trim(), "\n", extend.map(s => {
|
||||
if ((["", "."]).includes((s = s.trimEnd()))) return " .";
|
||||
return String().concat(" ", s);
|
||||
}).join("\n")).trim();
|
||||
} else if (isDep(keyName)) line = line.concat(config[keyName].map(s => s.length === 1 ? s[0] : (s.length === 2 ? `${s[0]} [${s[1]}]` : (s[1] === "|" ? `${s[0]} | ${s[2]}` : `${s[0]} (${s[1]} ${s[2]})`))).join(", "));
|
||||
else if (typeof data === "boolean") line = line.concat(data ? "yes" : "no");
|
||||
else if (Array.isArray(data)) line = line.concat(data.join(", "));
|
||||
else if (typeof data === "bigint") line = data.toString().endsWith("n") ? line.concat(data.toString().slice(0, -1)) : line.concat(data.toString());
|
||||
else line = line.concat(String(data));
|
||||
configBuff.push(line);
|
||||
});
|
||||
|
||||
return configBuff.join("\n");
|
||||
}
|
||||
|
||||
function includes(str: string, sear: string|string[]): boolean {
|
||||
if (Array.isArray(sear)) return sear.some(s => str.indexOf(s) != -1);
|
||||
return str.indexOf(sear) != -1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse control file
|
||||
*/
|
||||
export function parseControl(control: string|Buffer): DebianControl {
|
||||
const Obj: [keyof DebianControlPackages, string][] = [], isDep = (arg: any): arg is "Depends" => ([ "Depends", "Pre-Depends", "Recommends", "Suggests", "Enhances", "Breaks", "Conflicts" ]).includes(arg);
|
||||
if (Buffer.isBuffer(control)) control = control.toString("utf8");
|
||||
const lines = control.trim().split("\n");
|
||||
while (lines.length > 0) {
|
||||
const line = lines.shift().trimEnd();
|
||||
if ((["\t", " "]).includes(line[0])) {
|
||||
Obj.at(-1)[1] = Obj.at(-1)[1].concat("\n", line);
|
||||
} else if (line.indexOf(":") >= 1) {
|
||||
const sp = line.indexOf(":");
|
||||
Obj.push([ line.slice(0, sp) as any, line.slice(sp+1).trim() ]);
|
||||
} else Obj.at(-1)[1] = Obj.at(-1)[1].concat("\n", line);
|
||||
}
|
||||
|
||||
return Obj.reduce<DebianControl>((acc, [ key, value ]) => {
|
||||
if (key === "Description") {
|
||||
const [ syno, ...extend ] = value.split("\n");
|
||||
const spaceTrim = Array.from(new Set(extend.map(s => s.length - s.trimStart().length).sort())).filter(i => i >= 1).at(0);
|
||||
for (const line in extend) {
|
||||
if (extend[line].trim() === ".") extend[line] = "";
|
||||
else if (extend[line].slice(0, spaceTrim).trim() === "") extend[line] = extend[line].slice(spaceTrim).trimEnd();
|
||||
}
|
||||
acc["Description"] = String().concat(syno.trim(), "\n", extend.join("\n")).trim();
|
||||
} else if (isDep(key)) {
|
||||
acc[key] = [];
|
||||
for (const val of value.split(",").map(s => s.trim())) {
|
||||
if (val.indexOf("|") > 0) {
|
||||
const or = val.indexOf("|");
|
||||
acc[key].push([
|
||||
val.slice(0, or).trim(),
|
||||
"|",
|
||||
val.slice(or+1).trim()
|
||||
]);
|
||||
} else if (includes(val, "[")) {
|
||||
const a1 = val.indexOf("["), a2 = val.indexOf("]");
|
||||
const insider = val.slice(a1+1, a2).trim();
|
||||
acc[key].push([ val.slice(0, a1).trim(), insider ]);
|
||||
} else if (includes(val, ["<<", "<=", "=", ">=", ">>"])) {
|
||||
const a1 = val.indexOf("("), a2 = val.indexOf(")");
|
||||
const insider = val.slice(a1+1, a2).trim();
|
||||
acc[key].push([
|
||||
val.slice(0, a1).trim(),
|
||||
insider.slice(0, 2).trim() as any,
|
||||
insider.slice(2).trim(),
|
||||
]);
|
||||
} else acc[key].push([ val ]);
|
||||
}
|
||||
} else acc[key] = value;
|
||||
return acc;
|
||||
}, Object.create({}));
|
||||
}
|
||||
|
||||
export interface Package {
|
||||
/** Package control */
|
||||
control: DebianControl;
|
||||
|
||||
/** Compress tarballs files */
|
||||
compress?: {
|
||||
/**
|
||||
* Compress the data.tar tar to the smallest possible size
|
||||
*/
|
||||
data?: Exclude<Compressors, "deflate">;
|
||||
/**
|
||||
* compress control file to the smallest file possible
|
||||
*/
|
||||
control?: Exclude<Compressors, "zst"|"deflate">;
|
||||
};
|
||||
|
||||
/**
|
||||
* Scripts or binary files to run post or pre action
|
||||
*
|
||||
* If set file path load directly
|
||||
@example {
|
||||
"preinst": "#!/bin/bash\nset -ex\necho \"Ok Google\"",
|
||||
"postinst": "/var/lib/example/removeMicrosoft.sh"
|
||||
}
|
||||
*/
|
||||
scripts?: {
|
||||
/** Run script before install packages */
|
||||
preinst?: string;
|
||||
/** Run script before remove package */
|
||||
prerm?: string;
|
||||
/** After install package */
|
||||
postinst?: string;
|
||||
/** After package removed */
|
||||
postrm?: string;
|
||||
};
|
||||
/**
|
||||
* Add files to data tar
|
||||
*/
|
||||
dataFiles(callback: (done: boolean, fileInfo?: fs.BigIntStats|fs.Stats, fileStream?: stream.Readable) => void): void;
|
||||
}
|
||||
|
||||
export function createPackage(config: Package) {}
|
||||
createPackage({
|
||||
compress: { control: "gzip", data: "gzip" },
|
||||
control: {
|
||||
Package: "google",
|
||||
Version: "1.1.1",
|
||||
Architecture: "all",
|
||||
Description: "google",
|
||||
Maintainer: "Matheus Sampaio Queiroga <srherobrine20@gmail.com>",
|
||||
},
|
||||
dataFiles(callback) {
|
||||
callback()
|
||||
},
|
||||
})
|
0
src/dpkg/src/index.ts
Normal file
0
src/dpkg/src/index.ts
Normal file
3
src/dpkg/tsconfig.json
Normal file
3
src/dpkg/tsconfig.json
Normal file
@ -0,0 +1,3 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json"
|
||||
}
|
445
src/index.ts
445
src/index.ts
@ -1,445 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
import "./log.js";
|
||||
import path from "node:path";
|
||||
import yargs from "yargs";
|
||||
import crypto from "node:crypto";
|
||||
import cluster from "node:cluster";
|
||||
import packages from "./packages.js";
|
||||
import express from "express";
|
||||
import expressRate from "express-rate-limit";
|
||||
import streamPromise from "node:stream/promises";
|
||||
import configManeger from "./configManeger.js";
|
||||
import * as Debian from "@sirherobrine23/dpkg";
|
||||
import oldFs, { createReadStream, promises as fs } from "node:fs";
|
||||
import { aptStreamConfig } from "./config.js";
|
||||
import { dockerRegistry } from "@sirherobrine23/docker-registry";
|
||||
import { extendsFS } from "@sirherobrine23/extends";
|
||||
import { dpkg } from "@sirherobrine23/dpkg";
|
||||
|
||||
// Set yargs config
|
||||
const terminalSize = typeof process.stdout.getWindowSize === "function" ? process.stdout.getWindowSize()[0] : null;
|
||||
yargs(process.argv.slice(2)).wrap(terminalSize).version(false).help(true).alias("h", "help").strictCommands().demandCommand()
|
||||
|
||||
// Edit/print configs interactive mode
|
||||
.command(["config", "maneger", "$0"], "Maneger config", yargs => yargs.option("config", {
|
||||
string: true,
|
||||
alias: "c",
|
||||
type: "string",
|
||||
description: "Config file path",
|
||||
default: "aptStream.yml",
|
||||
}).option("print", {
|
||||
description: "print config in stdout and select targets to print. Default is yaml",
|
||||
alias: "p",
|
||||
array: false,
|
||||
string: true,
|
||||
choices: [
|
||||
"", // if set only "--print"
|
||||
"yaml", "yml", "json", // without encode
|
||||
"yaml64", "yml64", "json64", // Encode in base64
|
||||
"yamlhex", "ymlhex", "jsonhex", // encode in hexadecimal (hex)
|
||||
],
|
||||
}), async options => {
|
||||
if (options.print !== undefined) {
|
||||
let out = String(options.print);
|
||||
if (typeof options.print === "boolean"||options.print === "") out = "yaml";
|
||||
const config = new aptStreamConfig(options.config);
|
||||
const target = out.startsWith("json") ? "json" : "yaml", encode = out.endsWith("64") ? "base64" : out.endsWith("hex") ? "hex" : "utf8";
|
||||
return console.log((config.toString(encode, target)));
|
||||
}
|
||||
if (!process.stdin.isTTY) throw new Error("Run with TTY to maneger config!");
|
||||
return configManeger(options.config);
|
||||
})
|
||||
|
||||
// Sync repository packages
|
||||
.command(["sync", "synchronize"], "Sync packges directly from CLI", yargs => yargs.option("config", {
|
||||
string: true,
|
||||
alias: "c",
|
||||
type: "string",
|
||||
description: "Config file path",
|
||||
default: "aptStream.yml",
|
||||
}).option("verbose", {
|
||||
type: "boolean",
|
||||
boolean: true,
|
||||
description: "Enable verbose errors",
|
||||
default: false,
|
||||
alias: ["v", "vv", "dd"]
|
||||
}), async options => {
|
||||
console.log("Starting...");
|
||||
const packageManeger = await packages(options.config);
|
||||
let i = 0;
|
||||
await packageManeger.syncRepositorys((err, db) => {
|
||||
process.stdout.moveCursor(0, -1);
|
||||
console.log("Packages loaded %f", i++);
|
||||
if (!!err) {
|
||||
if (options.verbose) return console.error(err);
|
||||
return console.error(err.message || err);
|
||||
}
|
||||
console.log("Added %s: %s/%s (%s)", db.repositoryID, db.controlFile.Package, db.controlFile.Architecture, db.controlFile.Version);
|
||||
});
|
||||
console.log("End!");
|
||||
return packageManeger.close();
|
||||
})
|
||||
|
||||
// Pack debian package
|
||||
.command(["pack", "pack-deb", "create", "c"], "Create package", yargs => yargs.option("package-path", {
|
||||
type: "string",
|
||||
string: true,
|
||||
alias: "s",
|
||||
default: process.cwd(),
|
||||
description: "Debian package source",
|
||||
}).option("output", {
|
||||
type: "string",
|
||||
string: true,
|
||||
alias: "o",
|
||||
}).option("compress", {
|
||||
type: "string",
|
||||
string: true,
|
||||
alias: [
|
||||
"data-compress",
|
||||
"c"
|
||||
],
|
||||
description: "data.tar compress file",
|
||||
default: "gzip",
|
||||
choices: [
|
||||
"passThrough",
|
||||
"gzip",
|
||||
"zst",
|
||||
"xz",
|
||||
]
|
||||
}).option("control-compress", {
|
||||
type: "string",
|
||||
string: true,
|
||||
description: "control.tar compress file",
|
||||
alias: [
|
||||
"d"
|
||||
],
|
||||
default: "gzip",
|
||||
choices: [
|
||||
"gzip",
|
||||
"passThrough",
|
||||
"xz"
|
||||
]
|
||||
}), async options => {
|
||||
let debianConfig: string;
|
||||
if (!(await extendsFS.exists(debianConfig = path.resolve(process.cwd(), options.packagePath, "DEBIAN"))||await extendsFS.exists(debianConfig = path.resolve(process.cwd(), options.packagePath, "debian")))) throw new Error("Create valid package Structure!");
|
||||
if (!(await extendsFS.exists(path.join(debianConfig, "control")))) throw new Error("Require control file");
|
||||
const control = dpkg.parseControl(await fs.readFile(path.join(debianConfig, "control")));
|
||||
if (!options.output) options.output = path.join(process.cwd(), `${control.Package}_${control.Architecture}_${control.Version}.deb`); else options.output = path.resolve(process.cwd(), options.output);
|
||||
const scriptsFile = (await fs.readdir(debianConfig)).filter(file => (["preinst", "prerm", "postinst", "postrm"]).includes(file));
|
||||
|
||||
console.log("Creating debian package");
|
||||
await streamPromise.finished(dpkg.createPackage({
|
||||
control,
|
||||
dataFolder: path.resolve(debianConfig, ".."),
|
||||
compress: {
|
||||
data: options.compress as any||"gzip",
|
||||
control: options.controlCompress as any||"gzip",
|
||||
},
|
||||
scripts: scriptsFile.reduce<dpkg.packageConfig["scripts"]>((acc, file) => {acc[file] = path.join(debianConfig, file); return acc;}, {})
|
||||
}).pipe(oldFs.createWriteStream(options.output)));
|
||||
console.log("File saved %O", options.output);
|
||||
})
|
||||
|
||||
// Upload to registry
|
||||
.command(["upload", "u"], "Upload package to repoitory allow uploads", yargs => yargs.strictCommands(false).option("config", {
|
||||
string: true,
|
||||
alias: "c",
|
||||
type: "string",
|
||||
description: "Config file path",
|
||||
default: "aptStream.yml",
|
||||
}).option("repositoryID", {
|
||||
type: "string",
|
||||
string: true,
|
||||
alias: ["repoID", "id", "i"],
|
||||
demandOption: true,
|
||||
description: "Repository to upload files"
|
||||
}).option("tag", {
|
||||
type: "string",
|
||||
string: true,
|
||||
description: "Docker/Github release tag name",
|
||||
alias: ["dockerTAG", "ociTAG", "oci_tag", "release_tag"]
|
||||
}), async options => {
|
||||
const files = options._.slice(1).map((file: string) => path.resolve(process.cwd(), file));
|
||||
if (!files.length) throw new Error("Required one file to Upload");
|
||||
const config = new aptStreamConfig(options.config);
|
||||
if (!(config.getRepository(options.repositoryID).get(options.repositoryID)).enableUpload) throw new Error("Repository not support upload file!");
|
||||
const up = await config.getRepository(options.repositoryID).uploadFile(options.repositoryID);
|
||||
if (up.githubUpload) {
|
||||
for (const filePath of files) {
|
||||
if (!(await extendsFS.exists(filePath))) {console.error("%O not exsists!"); continue;}
|
||||
const stats = await fs.lstat(filePath);
|
||||
const filename = path.basename(filePath);
|
||||
await streamPromise.finished(createReadStream(filePath).pipe(await up.githubUpload(filename, stats.size, options.tag)));
|
||||
}
|
||||
} else if (up.gdriveUpload) {
|
||||
for (const filePath of files) {
|
||||
if (!(await extendsFS.exists(filePath))) {console.error("%O not exsists!"); continue;}
|
||||
const filename = path.basename(filePath);
|
||||
await streamPromise.finished(createReadStream(filePath).pipe(await up.gdriveUpload(filename)));
|
||||
}
|
||||
} else if (up.ociUpload) {
|
||||
for (const filePath of files) {
|
||||
if (!(await extendsFS.exists(filePath))) {console.error("%O not exsists!"); continue;}
|
||||
const filename = path.basename(filePath);
|
||||
await streamPromise.finished(createReadStream(filePath).pipe(await up.ociUpload(filename)));
|
||||
}
|
||||
} else if (up.dockerUpload) {
|
||||
for (const filePath of files) {
|
||||
if (!(await extendsFS.exists(filePath))) {console.error("%O not exsists!"); continue;}
|
||||
const { controlFile } = await dpkg.parsePackage(createReadStream(filePath));
|
||||
const filename = path.basename(filePath);
|
||||
const tr = await up.dockerUpload(dockerRegistry.debianArchToDockerPlatform(controlFile.Architecture));
|
||||
tr.annotations.set("org.opencontainers.image.description", controlFile.Description);
|
||||
tr.annotations.set("org.opencontainers.image.version", controlFile.Version);
|
||||
tr.annotations.set("org.sirherobrine23.aptstream.control", JSON.stringify(controlFile));
|
||||
tr.annotations.set("com.github.package.type", "aptstream_package");
|
||||
await streamPromise.finished(createReadStream(filePath).pipe(tr.addEntry({
|
||||
name: filename,
|
||||
type: "file",
|
||||
size: (await fs.lstat(filePath)).size
|
||||
})));
|
||||
const img_info = await tr.finalize(options.tag||controlFile.Version);
|
||||
console.log("Image digest: %O", img_info.digest);
|
||||
}
|
||||
}
|
||||
await config.saveConfig().catch(() => {});
|
||||
})
|
||||
|
||||
// APT Server
|
||||
.command(["server", "serve", "s"], "Run http Server", yargs => yargs.option("config", {
|
||||
string: true,
|
||||
alias: "c",
|
||||
type: "string",
|
||||
description: "Config file path",
|
||||
default: "aptStream.yml"
|
||||
}).option("port", {
|
||||
number: true,
|
||||
alias: "p",
|
||||
type: "number",
|
||||
description: "Alternative port to Run http server"
|
||||
}).option("cluster", {
|
||||
number: true,
|
||||
type: "number",
|
||||
description: "Enable cluster mode for perfomace",
|
||||
alias: "t"
|
||||
}).option("data", {
|
||||
string: true,
|
||||
alias: "C",
|
||||
type: "string",
|
||||
description: "data files"
|
||||
}).option("db", {
|
||||
string: true,
|
||||
type: "string",
|
||||
alias: "d",
|
||||
description: "database url"
|
||||
}).option("auto-sync", {
|
||||
type: "boolean",
|
||||
boolean: true,
|
||||
alias: "z",
|
||||
default: false,
|
||||
description: "Enable backgroud sync packages"
|
||||
}).option("disable-release-compress", {
|
||||
type: "boolean",
|
||||
boolean: true,
|
||||
default: false,
|
||||
description: "Disable Release generate Packages.gz and Packages.gz to calculate hash",
|
||||
alias: "L"
|
||||
}), async options => {
|
||||
let packageManegerInit = new aptStreamConfig(options.config);
|
||||
if (!!options.data) packageManegerInit.setDataStorage(options.data);
|
||||
if (!!options.port) packageManegerInit.setPortListen(options.port);
|
||||
if (!!options.db) packageManegerInit.setDatabse(options.db);
|
||||
if (!!options["disable-release-compress"]) packageManegerInit.setCompressRelease("gzip", false).setCompressRelease("xz", false);
|
||||
if (!!options.cluster && options.cluster > 0) packageManegerInit.setClusterForks(options.cluster);
|
||||
const packageManeger = await packages(packageManegerInit);
|
||||
let forks = packageManeger.getClusterForks();
|
||||
if (cluster.isPrimary) {
|
||||
if (!!(options.autoSync ?? options["auto-sync"])) (async () => {
|
||||
while (true) {
|
||||
console.info("Initing package sync!");
|
||||
await packageManeger.syncRepositorys((_err, db) => {
|
||||
if (!db) return;
|
||||
const {repositoryID, controlFile: { Package, Architecture, Version }} = db;
|
||||
console.log("Sync/Add: %s -> %s %s/%s (%s)", repositoryID, Package, Architecture, Version)
|
||||
});
|
||||
console.log("Next sync after 30 Minutes");
|
||||
await new Promise(done => setTimeout(done, 1800000));
|
||||
}
|
||||
})().catch(err => {
|
||||
console.info("Auto sync packages disabled!");
|
||||
console.error(err);
|
||||
});
|
||||
if (forks > 0) {
|
||||
const forkProcess = async (count = 0): Promise<number> => new Promise((done, reject) => {
|
||||
const fk = cluster.fork();
|
||||
return fk.on("error", err => {
|
||||
console.error(err);
|
||||
return reject(err);
|
||||
}).on("online", () => done(fk.id)).once("exit", (code, signal) => {
|
||||
count++;
|
||||
if (!signal && code === 0) return console.info("Cluster %s: exited and not restarting", fk.id);
|
||||
else if (count > 5) return console.warn("Cluster get max count retrys!");
|
||||
console.info("Cluster %s: Catch %O, and restating with this is restating count %f", fk.id, code||signal, count);
|
||||
return forkProcess(count);
|
||||
});
|
||||
});
|
||||
for (let i = 0; i < forks; i++) await forkProcess().then(id => console.info("Cluster %s is online", id));
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Serve
|
||||
const app = express();
|
||||
app.disable("x-powered-by").disable("etag");
|
||||
app.use(express.json(), (_req, res, next) => {
|
||||
res.setHeader("cluster-id", String(cluster.isPrimary ? 1 : cluster.worker.id));
|
||||
res.json = (body) => res.setHeader("Content-Type", "application/json").send(JSON.stringify(body, null, 2)); return next();
|
||||
});
|
||||
|
||||
// Serve info
|
||||
app.get("/", async ({res}) => {
|
||||
return res.json({
|
||||
cluster: cluster.worker?.id ?? 1,
|
||||
sourcesCount: packageManeger.getRepositorys().length,
|
||||
packagesRegistred: await packageManeger.packagesCount(),
|
||||
db: packageManeger.getClientInfo(),
|
||||
});
|
||||
});
|
||||
|
||||
// Public key
|
||||
app.get("/public(_key|)(|.gpg|.dearmor)", async (req, res) => res.setHeader("Content-Type", req.path.endsWith(".dearmor") ? "octect/stream" : "text/plain").send(await packageManeger.getPublicKey(req.path.endsWith(".dearmor") ? "dearmor" : "armor")));
|
||||
|
||||
// Get dists
|
||||
app.get("/dists", async ({res}) => res.json(Array.from(new Set(packageManeger.getRepositorys().map(d => d.repositoryName)))));
|
||||
app.get("/dists/:distName/info", async (req, res) => res.json(await packageManeger.repoInfo(req.params.distName)));
|
||||
app.get("/dists/(:distName)(|/InRelease|/Release(.gpg)?)?", async (req, res) => {
|
||||
const lowerPath = req.path.toLowerCase(), aptRoot = path.posix.resolve("/", path.posix.join(req.baseUrl, req.path), "../../../..");
|
||||
let Release = await packageManeger.createRelease(req.params["distName"], aptRoot);
|
||||
let releaseText: string;
|
||||
if (lowerPath.endsWith("inrelease")||lowerPath.endsWith("release.gpg")) releaseText = await Release.inRelease(req.path.endsWith(".gpg") ? "clearMessage" : "sign");
|
||||
else if (lowerPath.endsWith("release")) releaseText = Release.toString();
|
||||
else return res.json(Release.toJSON());
|
||||
return res.status(200).setHeader("Content-Type", "text/plain").setHeader("Content-Length", String(Buffer.byteLength(releaseText))).send(releaseText);
|
||||
});
|
||||
|
||||
app.get("/dists/:distName/:componentName/binary-:Arch/Packages(.(gz|xz))?", async (req, res) => {
|
||||
const { distName, componentName, Arch } = req.params;
|
||||
const reqPath = req.path;
|
||||
return packageManeger.createPackage(distName, componentName, Arch, path.posix.resolve("/", path.posix.join(req.baseUrl, req.path), "../../../../../.."), {
|
||||
compress: reqPath.endsWith(".gz") ? "gz" : reqPath.endsWith(".xz") ? "xz" : undefined,
|
||||
callback: (str) => str.pipe(res.writeHead(200, {}))
|
||||
});
|
||||
});
|
||||
|
||||
// Send package hashs
|
||||
app.get("/pool", async ({res}) => res.json(await packageManeger.getPackagesHash()));
|
||||
|
||||
app.get("/pool/(:hash)(|/data.tar|.deb)", async (req, res) => {
|
||||
const packageID = (await packageManeger.pkgQuery({"controlFile.MD5sum": req.params.hash})).at(0);
|
||||
if (!packageID) return res.status(404).json({error: "Package not exist"});
|
||||
if (req.path.endsWith("/data.tar")||req.path.endsWith(".deb")) {
|
||||
const str = await packageManeger.getPackageStream(packageID);
|
||||
if (req.path.endsWith(".deb")) return str.pipe(res.writeHead(200, {}));
|
||||
return (await Debian.getPackageData(str)).pipe(res.writeHead(200, {}));
|
||||
}
|
||||
return res.json({...packageID.controlFile, Filename: undefined});
|
||||
});
|
||||
|
||||
// Upload file
|
||||
const uploadIDs = new Map<string, {createAt: Date, deleteAt: Date, uploading: boolean, repositoryID: string, filename: string}>();
|
||||
const uploadRoute = express.Router();
|
||||
app.use("/upload", uploadRoute);
|
||||
uploadRoute.get("/", ({res}) => res.json({available: true}));
|
||||
uploadRoute.use(expressRate({
|
||||
skipSuccessfulRequests: true,
|
||||
windowMs: 1000 * 60 * 40,
|
||||
max: 1000,
|
||||
})).post("/", async ({body, headers: { authorization }}, res) => {
|
||||
if (!authorization) return res.status(401).json({error: "Require authorization/Authorization header"});
|
||||
else if (!(authorization.startsWith("Bearer "))) return res.status(401).json({error: "Invalid authorization schema"});
|
||||
else if (!(await packageManeger.userAs(authorization.replace("Bearer", "").trim()))) return res.status(401).json({error: "Invalid token!"});
|
||||
|
||||
if (!body) return res.status(400).json({error: "Required JSON or YAML to set up upload"});
|
||||
const { repositoryID, control } = body as {repositoryID: string, control: Debian.debianControl};
|
||||
if (!repositoryID) return res.status(400).json({error: "Required repository ID"});
|
||||
if (!control) return res.status(400).json({error: "Required debian control JSON"});
|
||||
const repo = packageManeger.getRepository(repositoryID).get(repositoryID);
|
||||
if (!repo.enableUpload) return res.status(401).json({message: "This repository not support upload or not setup to Upload files!"});
|
||||
let reqID: string;
|
||||
while (true) if (!(uploadIDs.has(reqID = crypto.randomBytes(12).toString("hex")))) break;
|
||||
const { Package: packageName, Architecture, Version } = control;
|
||||
const createAt = new Date(), deleteAt = new Date(createAt.getTime() + (1000 * 60 * 5));
|
||||
setTimeout(() => {if (uploadIDs.has(reqID)) uploadIDs.delete(reqID);}, createAt.getTime() - deleteAt.getTime())
|
||||
uploadIDs.set(reqID, {
|
||||
createAt, deleteAt,
|
||||
repositoryID,
|
||||
uploading: false,
|
||||
filename: `${packageName}_${Architecture}_${Version}.deb`,
|
||||
});
|
||||
return res.status(201).json({
|
||||
repositoryType: repo.type,
|
||||
uploadID: reqID,
|
||||
config: uploadIDs.get(reqID),
|
||||
});
|
||||
}).put("/:uploadID", async (req, res) => {
|
||||
if (!(uploadIDs.has(req.params.uploadID))) return res.status(401).json({error: "Create uploadID fist!"});
|
||||
if (uploadIDs.get(req.params.uploadID).uploading) return res.status(401).json({error: "Create new uploadID, this in use"});
|
||||
else if (!(req.headers["content-type"].includes("application/octet-stream"))) return res.status(400).json({error: "Send octet stream file"});
|
||||
else if (!(req.headers["content-length"])) return res.status(422).json({error: "Required file size"});
|
||||
else if (Number(req.headers["content-length"]) < 10) return res.status(422).json({error: "The file too small!"});
|
||||
uploadIDs.get(req.params.uploadID).uploading = true;
|
||||
let { repositoryID, filename } = uploadIDs.get(req.params.uploadID);
|
||||
|
||||
try {
|
||||
const up = await packageManeger.getRepository(repositoryID).uploadFile(repositoryID);
|
||||
const tagName = (Array.isArray(req.query.tagName) ? req.query.tagName.at(0).toString() : req.query.tagName.toString());
|
||||
if (up.githubUpload) {
|
||||
if (!tagName) res.setHeader("warning", "Using latest github release tag!");
|
||||
await streamPromise.finished(req.pipe(await up.githubUpload(filename, Number(req.headers["content-length"]), tagName)));
|
||||
return res.status(201).json({
|
||||
type: "Github release"
|
||||
});
|
||||
} else if (up.gdriveUpload) {
|
||||
const id = (Array.isArray(req.query.id) ? req.query.id.at(0).toString() : req.query.id.toString());
|
||||
await streamPromise.finished(req.pipe(await up.gdriveUpload(filename, id)));
|
||||
return res.status(201).json({
|
||||
type: "Google driver"
|
||||
});
|
||||
} else if (up.ociUpload) {
|
||||
if (typeof req.query.path === "string") filename = path.posix.resolve("/", req.query.path, filename);
|
||||
await streamPromise.finished(req.pipe(await up.ociUpload(filename)));
|
||||
return res.status(201).json({
|
||||
type: "Oracle cloud bucket",
|
||||
filename
|
||||
});
|
||||
} else if (up.dockerUpload) {
|
||||
const tar = await up.dockerUpload({
|
||||
os: "linux",
|
||||
architecture: req.query.arch||"generic" as any,
|
||||
});
|
||||
await streamPromise.finished(req.pipe(tar.addEntry({name: filename, size: Number(req.headers["content-length"])})));
|
||||
return res.status(201).json({
|
||||
type: "Oracle cloud bucket",
|
||||
image: await tar.finalize(tagName),
|
||||
});
|
||||
}
|
||||
return res.status(502).json({
|
||||
message: "Sorry, our error was caught"
|
||||
});
|
||||
} finally {
|
||||
uploadIDs.delete(req.params.uploadID);
|
||||
}
|
||||
});
|
||||
|
||||
app.all("*", ({res}) => res.status(404).json({message: "Page not exists"}));
|
||||
app.use((err, _req, res, _next) => {
|
||||
console.error(err);
|
||||
return res.status(400).json({error: err?.message || String(err)});
|
||||
}).listen(packageManeger.getPortListen(), function () {
|
||||
const address = this.address();
|
||||
console.log("Port Listen on %O", typeof address === "object" ? address.port : address);
|
||||
});
|
||||
}).parseAsync().catch(err => {
|
||||
console.error(err);
|
||||
process.exit(-1);
|
||||
});
|
43
src/log.ts
43
src/log.ts
@ -1,43 +0,0 @@
|
||||
import { formatWithOptions, InspectOptions } from "node:util";
|
||||
import cluster from "node:cluster";
|
||||
import expressLayer from "express/lib/router/layer.js";
|
||||
|
||||
// Patch promise handler to express 4.x
|
||||
expressLayer.prototype.handle_request = async function handle_request_promised(...args) {
|
||||
var fn = this.handle;
|
||||
if (fn.length > 3) return args.at(-1)();
|
||||
await Promise.resolve().then(() => fn.call(this, ...args)).catch(args.at(-1));
|
||||
}
|
||||
|
||||
// Set default custom log to Cluster workers
|
||||
if (cluster.isWorker) {
|
||||
const { log, error, debug, info, warn } = console;
|
||||
const { id } = cluster.worker ?? {};
|
||||
const defaultOptions: InspectOptions = {
|
||||
colors: true,
|
||||
showHidden: false,
|
||||
depth: null
|
||||
};
|
||||
|
||||
console.clear = console.clear ?? function () {console.warn("cannot clear tty");}
|
||||
|
||||
console.log = function(...args) {
|
||||
log("[LOG%s]: %s", id ? ` Cluster ${id}` : "", formatWithOptions(defaultOptions, ...args));
|
||||
}
|
||||
|
||||
console.error = function(...args) {
|
||||
error("[ERROR%s]: %s", id ? ` Cluster ${id}` : "", formatWithOptions(defaultOptions, ...args));
|
||||
}
|
||||
|
||||
console.debug = function(...args) {
|
||||
debug("[DEBUG%s]: %s", id ? ` Cluster ${id}` : "", formatWithOptions(defaultOptions, ...args));
|
||||
}
|
||||
|
||||
console.info = function(...args) {
|
||||
info("[INFO%s]: %s", id ? ` Cluster ${id}` : "", formatWithOptions(defaultOptions, ...args));
|
||||
}
|
||||
|
||||
console.warn = function(...args) {
|
||||
warn("[WARNING%s]: %s", id ? ` Cluster ${id}` : "", formatWithOptions(defaultOptions, ...args));
|
||||
}
|
||||
}
|
462
src/packages.ts
462
src/packages.ts
@ -1,462 +0,0 @@
|
||||
import { aptStreamConfig, configJSON, repositorySource } from "./config.js";
|
||||
import { decompressStream, compressStream } from "@sirherobrine23/decompress";
|
||||
import { googleDriver, oracleBucket } from "@sirherobrine23/cloud";
|
||||
import { extendsCrypto, extendsFS } from "@sirherobrine23/extends";
|
||||
import { apt, dpkg } from "@sirherobrine23/dpkg";
|
||||
import { tmpdir } from "node:os";
|
||||
import { format } from "node:util";
|
||||
import oldFs, { promises as fs } from "node:fs";
|
||||
import coreHTTP, { Github } from "@sirherobrine23/http";
|
||||
import streamPromise, { finished } from "node:stream/promises";
|
||||
import dockerRegistry from "@sirherobrine23/docker-registry";
|
||||
import mongoDB from "mongodb";
|
||||
import openpgp from "openpgp";
|
||||
import stream from "node:stream";
|
||||
import crypto from "node:crypto";
|
||||
import path from "node:path";
|
||||
|
||||
export interface dbStorage {
|
||||
repositoryID: string;
|
||||
restoreFile: any;
|
||||
controlFile: dpkg.debianControl;
|
||||
}
|
||||
|
||||
export interface userAuth {
|
||||
createAt: Date;
|
||||
username: string;
|
||||
token: string[];
|
||||
}
|
||||
|
||||
export default async function main(initConfig: string|configJSON|aptStreamConfig) {
|
||||
return new Promise<packageManeger>((done, reject) => {
|
||||
const pkg = new packageManeger(initConfig, (err) => {
|
||||
if (err) return reject(err);
|
||||
return done(pkg);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export class packageManeger extends aptStreamConfig {
|
||||
#client: mongoDB.MongoClient;
|
||||
#collection: mongoDB.Collection<dbStorage>;
|
||||
#authCollection: mongoDB.Collection<userAuth>;
|
||||
async close() {this.#client.close()}
|
||||
constructor(initConfig: string|configJSON|aptStreamConfig, connectionCallback?: (err?: any) => void) {
|
||||
connectionCallback ||= (err) => {if(err) process.emit("warning", err);}
|
||||
super(initConfig);
|
||||
(async () => {
|
||||
const database = this.getDatabase();
|
||||
const mongoClient = this.#client = await (new mongoDB.MongoClient(database.url)).connect();
|
||||
mongoClient.on("error", err => console.error(err));
|
||||
this.#authCollection = mongoClient.db(database.databaseName || "aptStream").collection<userAuth>("auth");
|
||||
this.#collection = mongoClient.db(database.databaseName || "aptStream").collection<dbStorage>("packages");
|
||||
})().then(() => connectionCallback(), err => connectionCallback(err));
|
||||
}
|
||||
|
||||
getClientInfo() {
|
||||
const connection = this.#client["topology"];
|
||||
return {
|
||||
connections: {
|
||||
max: Number(connection.s.options.maxConnecting),
|
||||
current: Number(connection.client.s.activeSessions?.size),
|
||||
}
|
||||
}
|
||||
}
|
||||
async createToken(username: string) {
|
||||
let token: string;
|
||||
while (true) {
|
||||
token = crypto.randomBytes(8).toString("hex");
|
||||
if (!(await this.#authCollection.findOne({token}))) break;
|
||||
}
|
||||
if (!(await this.#authCollection.findOne({username}))) await this.#authCollection.insertOne({username, createAt: new Date(), token: []});
|
||||
await this.#authCollection.findOneAndUpdate({username}, {$inc: {token: token as never}});
|
||||
return token;
|
||||
}
|
||||
|
||||
async userAs(token: string) {
|
||||
return !!(await this.#authCollection.findOne({token: [String(token)]}));
|
||||
}
|
||||
|
||||
async pkgQuery(query: mongoDB.Filter<dbStorage>) {
|
||||
return this.#collection.find(query).toArray();
|
||||
}
|
||||
|
||||
async packagesCount() {
|
||||
return (await this.#collection.stats()).count;
|
||||
}
|
||||
|
||||
async getPackagesHash() {
|
||||
return this.#collection.distinct("controlFile.MD5sum");
|
||||
}
|
||||
|
||||
async repoInfo(repositoryName: string) {
|
||||
const repositorys = this.getRepository(repositoryName).getAllRepositorys();
|
||||
if (!repositorys.length) throw new Error("Repository or Component name not exists!");
|
||||
return {
|
||||
packagesCount: (await Promise.all(repositorys.map(async ({repositoryID}) => this.#collection.countDocuments({repositoryID})))).reduce((acc, count) => acc+count, 0),
|
||||
sources: repositorys.length,
|
||||
};
|
||||
}
|
||||
|
||||
async createPackage(repositoryName: string, componentName: string, Arch: string, appRoot: string = "", options?: {compress?: "gz"|"xz", callback: (str: stream.Readable) => void}): Promise<{filePath: string; fileSize: number; sha512: string; sha256: string; sha1: string; md5: string;}[]> {
|
||||
const repositorys = this.getRepository(repositoryName).getAllRepositorys().filter(pkg => pkg.componentName === componentName);
|
||||
if (!repositorys.length) throw new Error("Repository or Component name not exists!");
|
||||
|
||||
const str = new stream.Readable({autoDestroy: true, emitClose: true, read(_s){}});
|
||||
const gg: (Promise<{filePath: string; fileSize: number; sha512: string; sha256: string; sha1: string; md5: string;}>)[] = [];
|
||||
if (typeof options?.callback === "function") (async () => options.callback(str.pipe(compressStream(options.compress === "gz" ? "gzip" : options.compress === "xz" ? "xz" : "passThrough"))))().catch(err => str.emit("error", err));
|
||||
else {
|
||||
async function getHash(compress?: "gz"|"xz") {
|
||||
const com = stream.Readable.from(str.pipe(compressStream(compress === "gz" ? "gzip" : compress === "xz" ? "xz" : "passThrough")));
|
||||
return extendsCrypto.createHashAsync(com).then(({hash, byteLength}) => ({
|
||||
filePath: path.posix.join(componentName, "binary-"+Arch, "Packages"+(compress === "gz" ? ".gz" : compress === "xz" ? ".xz" : "")),
|
||||
fileSize: byteLength,
|
||||
sha512: hash.sha512,
|
||||
sha256: hash.sha256,
|
||||
sha1: hash.sha1,
|
||||
md5: hash.md5,
|
||||
}));
|
||||
}
|
||||
gg.push(getHash());
|
||||
if (this.getCompressRelease("gzip")) gg.push(getHash("gz"));
|
||||
if (this.getCompressRelease("xz")) gg.push(getHash("xz"));
|
||||
}
|
||||
(async () => {
|
||||
let breakLine = false;
|
||||
for (const repo of repositorys) {
|
||||
let pkgs: mongoDB.WithId<dbStorage>[], page = 0;
|
||||
while ((pkgs = await this.#collection.find({repositoryID: repo.repositoryID, "controlFile.Architecture": Arch}).skip(page).limit(2500).toArray()).length > 0) {
|
||||
page += pkgs.length;
|
||||
for (const {controlFile: pkg} of pkgs) {
|
||||
let pkgHash: string;
|
||||
if (!(pkgHash = pkg.MD5sum)) continue;
|
||||
if (breakLine) str.push("\n\n"); else breakLine = true;
|
||||
str.push(dpkg.createControl({
|
||||
...pkg,
|
||||
Filename: path.posix.join("/", appRoot, "pool", `${pkgHash}.deb`).slice(1),
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
str.push(null);
|
||||
})().catch(err => str.emit("error", err));
|
||||
return Promise.all(gg);
|
||||
}
|
||||
|
||||
async createRelease(repositoryName: string, appRoot: string) {
|
||||
const source = this.getRepository(repositoryName);
|
||||
const repositorys = source.getAllRepositorys();
|
||||
const releaseDate = (new Date()).toUTCString();
|
||||
const Architectures = await this.#collection.distinct("controlFile.Architecture", {repositoryID: {$in: repositorys.map(a => a.repositoryID)}});
|
||||
const Components = Array.from(new Set(repositorys.map(rpm => rpm.componentName)));
|
||||
const MD5Sum = new Set<{hash: string, size: number, path: string}>();
|
||||
const SHA1 = new Set<{hash: string, size: number, path: string}>();
|
||||
const SHA256 = new Set<{hash: string, size: number, path: string}>();
|
||||
const SHA512 = new Set<{hash: string, size: number, path: string}>();
|
||||
await Promise.all(Architectures.map(async arch => Promise.all(Components.map(async comp => this.createPackage(repositoryName, comp, arch, appRoot).then(res => res.forEach(({fileSize, filePath, md5, sha1, sha256, sha512}) => {
|
||||
MD5Sum.add({size: fileSize, path: filePath, hash: md5});
|
||||
SHA1.add({size: fileSize, path: filePath, hash: sha1});
|
||||
SHA256.add({size: fileSize, path: filePath, hash: sha256});
|
||||
SHA512.add({size: fileSize, path: filePath, hash: sha512});
|
||||
}), err => console.log(err))))));
|
||||
const toJSON = () => {
|
||||
if ((!Architectures.length) && (!Components.length)) throw new Error("Invalid config repository or not loaded to database!");
|
||||
const data = {
|
||||
Date: releaseDate,
|
||||
acquireByHash: false,
|
||||
Codename: source.getCodename(),
|
||||
Suite: source.getSuite(),
|
||||
Origin: source.getOrigin(),
|
||||
Label: source.getLabel(),
|
||||
Description: source.getDescription(),
|
||||
Architectures,
|
||||
Components,
|
||||
MD5Sum: Array.from(MD5Sum.values()).sort((a, b) => b.size - a.size),
|
||||
SHA1: Array.from(SHA1.values()).sort((a, b) => b.size - a.size),
|
||||
SHA256: Array.from(SHA256.values()).sort((a, b) => b.size - a.size),
|
||||
SHA512: Array.from(SHA512.values()).sort((a, b) => b.size - a.size),
|
||||
};
|
||||
if (!data.Architectures.length) throw new Error("Require one packages loaded to database!");
|
||||
return data;
|
||||
}
|
||||
|
||||
const toString = () => {
|
||||
const reljson = toJSON();
|
||||
let configString: string[] = [
|
||||
"Date: "+(reljson.Date),
|
||||
"Acquire-By-Hash: no",
|
||||
"Architectures: "+(reljson.Architectures.join(" ")),
|
||||
"Components: "+(reljson.Components.join(" ")),
|
||||
];
|
||||
|
||||
if (reljson.Codename) configString.push(`Codename: ${reljson.Codename}`);
|
||||
if (reljson.Suite) configString.push(`Suite: ${reljson.Suite}`);
|
||||
if (reljson.Origin) configString.push(`Origin: ${reljson.Origin}`);
|
||||
if (reljson.Label) configString.push(`Label: ${reljson.Label}`);
|
||||
if (reljson.Description) configString.push(`Description: ${reljson.Description}`);
|
||||
|
||||
const insertHash = (name: string, hashes: typeof reljson.MD5Sum) => {
|
||||
configString.push(name+":");
|
||||
const sizeLength = hashes.at(0).size.toString().length+2;
|
||||
for (const data of hashes) configString.push((" "+data.hash + " "+(Array(Math.max(1, Math.abs(sizeLength - (data.size.toString().length)))).fill("").join(" ")+(data.size.toString()))+" "+data.path))
|
||||
}
|
||||
if (reljson.MD5Sum.length > 0) insertHash("MD5Sum", reljson.MD5Sum);
|
||||
if (reljson.SHA1.length > 0) insertHash("SHA1", reljson.SHA1);
|
||||
if (reljson.SHA256.length > 0) insertHash("SHA256", reljson.SHA256);
|
||||
if (reljson.SHA512.length > 0) insertHash("SHA512", reljson.SHA512);
|
||||
|
||||
return configString.join("\n");
|
||||
}
|
||||
|
||||
const inRelease = async (type: "sign"|"clearMessage" = "sign"): Promise<string> => {
|
||||
if (!(source.getCodename()||source.getSuite())) throw new Error("Required Suite or Codename to create InRelease file");
|
||||
else if (!(MD5Sum.size||SHA256.size)) throw new Error("Require MD5 or SHA256 to create InRelease file");
|
||||
const gpgSign = this.getPGPKey();
|
||||
const privateKey = gpgSign.gpgPassphrase ? await openpgp.decryptKey({privateKey: await openpgp.readPrivateKey({ armoredKey: gpgSign.privateKey.keyContent }), passphrase: gpgSign.gpgPassphrase}) : await openpgp.readPrivateKey({ armoredKey: gpgSign.privateKey.keyContent });
|
||||
const text = toString();
|
||||
if (type === "clearMessage") return Buffer.from(await openpgp.sign({
|
||||
signingKeys: privateKey,
|
||||
format: "armored",
|
||||
message: await openpgp.createMessage({text})
|
||||
}) as any).toString("utf8");
|
||||
return openpgp.sign({
|
||||
signingKeys: privateKey,
|
||||
format: "armored",
|
||||
message: await openpgp.createCleartextMessage({text})
|
||||
});
|
||||
}
|
||||
return {
|
||||
toJSON,
|
||||
toString,
|
||||
inRelease
|
||||
}
|
||||
}
|
||||
|
||||
async getPackageStream(packageTarget: dbStorage) {
|
||||
const source = this.getRepository(packageTarget.repositoryID).get(packageTarget.repositoryID);
|
||||
if (!source) throw new Error("Package Source no more avaible please sync packages!");
|
||||
let saveCache: string;
|
||||
if (await this.getDataStorage()) {
|
||||
const cacheFolder = path.join(await this.getDataStorage(), "deb_cache");
|
||||
if (!(await extendsFS.exists(cacheFolder))) await fs.mkdir(cacheFolder, {recursive: true});
|
||||
const { MD5sum, SHA1, SHA256, SHA512 } = packageTarget.controlFile;
|
||||
for (const hash of ([MD5sum, SHA1, SHA256, SHA512])) {
|
||||
if (!hash) continue
|
||||
const filePath = path.join(cacheFolder, `${hash}.deb`);
|
||||
if (await extendsFS.exists(filePath)) return oldFs.createReadStream(filePath);
|
||||
else if (!saveCache) saveCache = filePath;
|
||||
}
|
||||
}
|
||||
|
||||
if (source.type === "http") {
|
||||
const { url, auth: { header: headers, query } } = source;
|
||||
return coreHTTP.streamRequest(url, {headers, query}).then(src => {
|
||||
if (saveCache) src.pipe(oldFs.createWriteStream(saveCache));
|
||||
return stream.Readable.from(src);
|
||||
});
|
||||
} else if (source.type === "mirror") {
|
||||
const { debUrl } = packageTarget.restoreFile;
|
||||
return coreHTTP.streamRequest(debUrl).then(src => {
|
||||
if (saveCache) src.pipe(oldFs.createWriteStream(saveCache));
|
||||
return stream.Readable.from(src);
|
||||
});
|
||||
} else if (source.type === "github") {
|
||||
const { token } = source, { url } = packageTarget.restoreFile;
|
||||
return coreHTTP.streamRequest(url, {headers: token ? {"Authorization": "token "+token} : {}}).then(src => {
|
||||
if (saveCache) src.pipe(oldFs.createWriteStream(saveCache));
|
||||
return stream.Readable.from(src);
|
||||
});
|
||||
} else if (source.type === "oracleBucket") {
|
||||
const { authConfig } = source, { restoreFile: { path } } = packageTarget;
|
||||
const bucket = await oracleBucket.oracleBucket(authConfig);
|
||||
return bucket.getFileStream(path).then(src => {
|
||||
if (saveCache) src.pipe(oldFs.createWriteStream(saveCache));
|
||||
return stream.Readable.from(src);
|
||||
});
|
||||
} else if (source.type === "googleDriver") {
|
||||
const { clientId, clientSecret, clientToken } = source, { restoreFile: { id } } = packageTarget;
|
||||
const gdrive = await googleDriver.GoogleDriver({authConfig: {clientID: clientId, clientSecret, token: clientToken, redirectURL: "http://localhost", authUrlCallback(){throw new Error("Set up fist")}, tokenCallback() {}}});
|
||||
return gdrive.getFileStream(id).then(src => {
|
||||
if (saveCache) src.pipe(oldFs.createWriteStream(saveCache));
|
||||
return stream.Readable.from(src);
|
||||
});
|
||||
} else if (source.type === "docker") {
|
||||
const { image, auth } = source, { ref, path: debPath } = packageTarget.restoreFile;
|
||||
const registry = new dockerRegistry.v2(image, auth);
|
||||
return new Promise<stream.Readable>((done, reject) => registry.extractLayer(ref).then(tar => tar.on("error", reject).on("File", entry => entry.path === debPath ? done(entry.stream) : null))).then(src => {
|
||||
if (saveCache) src.pipe(oldFs.createWriteStream(saveCache));
|
||||
return stream.Readable.from(src);
|
||||
});
|
||||
}
|
||||
throw new Error("Check package type");
|
||||
}
|
||||
|
||||
async addPackage(repositoryID: string, control: dpkg.debianControl, restore: any): Promise<dbStorage> {
|
||||
if (Boolean(await this.#collection.findOne({
|
||||
repositoryID,
|
||||
"controlFile.Package": control.Package,
|
||||
"controlFile.Version": control.Version,
|
||||
"controlFile.Architecture": control.Architecture
|
||||
}))) {
|
||||
const { Package, Architecture, Version } = control;
|
||||
throw new Error(format("%s -> %s/%s (%s) are exists in database", repositoryID, Package, Architecture, Version));
|
||||
}
|
||||
await this.#collection.insertOne({
|
||||
repositoryID,
|
||||
restoreFile: restore,
|
||||
controlFile: control
|
||||
});
|
||||
return {
|
||||
repositoryID,
|
||||
restoreFile: restore,
|
||||
controlFile: control
|
||||
};
|
||||
}
|
||||
|
||||
async syncRepositorys(callback?: (error?: any, dbStr?: dbStorage) => void) {
|
||||
const sources = this.getRepositorys().map(({repositoryManeger}) => repositoryManeger.getAllRepositorys()).flat(2);
|
||||
const toDelete = (await this.#collection.distinct("repositoryID")).filter(key => !sources.find(d => d.repositoryID === key));
|
||||
if (toDelete.length > 0) await this.#collection.deleteMany({repositoryID: toDelete});
|
||||
for (const repo of sources) await this.registerSource(repo.repositoryID, repo, callback);
|
||||
return toDelete;
|
||||
}
|
||||
|
||||
async registerSource(repositoryID: string, target: repositorySource, callback?: (error?: any, dbStr?: dbStorage) => void) {
|
||||
callback ??= (_void1, _void2) => {};
|
||||
if (target.type === "http") {
|
||||
try {
|
||||
const control = (await dpkg.parsePackage(await coreHTTP.streamRequest(target.url, {headers: target.auth?.header, query: target.auth?.query}))).controlFile;
|
||||
callback(null, await this.addPackage(repositoryID, control, {}));
|
||||
} catch (err) {
|
||||
callback(err, null);
|
||||
}
|
||||
} else if (target.type === "oracleBucket") {
|
||||
const { authConfig, path = [] } = target;
|
||||
const bucket = await oracleBucket.oracleBucket(authConfig);
|
||||
try {
|
||||
if (path.length === 0) path.push(...((await bucket.listFiles()).filter(k => k.name.endsWith(".deb")).map(({name}) => name)));
|
||||
for (const file of path) {
|
||||
const control = (await dpkg.parsePackage(await bucket.getFileStream(file))).controlFile;
|
||||
callback(null, await this.addPackage(repositoryID, control, {path: file}));
|
||||
}
|
||||
} catch (err) {
|
||||
callback(err, null);
|
||||
}
|
||||
} else if (target.type === "googleDriver") {
|
||||
const { clientId, clientSecret, clientToken, gIDs = [] } = target;
|
||||
const gdrive = await googleDriver.GoogleDriver({authConfig: {clientID: clientId, clientSecret, token: clientToken, redirectURL: "http://localhost", authUrlCallback(){throw new Error("Set up fist")}, tokenCallback() {}}});
|
||||
if (gIDs.length === 0) gIDs.push(...((await gdrive.listFiles()).filter(rel => rel.name.endsWith(".deb")).map(({id}) => id)));
|
||||
for (const file of gIDs) {
|
||||
try {
|
||||
const control = (await dpkg.parsePackage(await gdrive.getFileStream(file))).controlFile;
|
||||
callback(null, await this.addPackage(repositoryID, control, {id: file}));
|
||||
} catch (err) {
|
||||
callback(err, null);
|
||||
}
|
||||
}
|
||||
} else if (target.type === "github") {
|
||||
const { owner, repository, token } = target;
|
||||
const gh = await Github.repositoryManeger(owner, repository, {token});
|
||||
if (target.subType === "branch") {
|
||||
const { branch = (await gh.repository.listBranchs()).at(0)?.name ?? "main" } = target;
|
||||
for (const { path: filePath } of (await gh.git.getTree(branch)).tree.filter(file => file.type === "tree" ? false : (file.size > 10) && file.path.endsWith(".deb"))) {
|
||||
try {
|
||||
const rawURL = new URL(path.posix.join(owner, repository, branch, filePath), "https://raw.githubusercontent.com");
|
||||
const control = (await dpkg.parsePackage(gh.git.getRawFile(branch, filePath))).controlFile;
|
||||
callback(null, await this.addPackage(repositoryID, control, {url: rawURL.toString()}));
|
||||
} catch (err) {
|
||||
callback(err, null);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const { tag = [] } = target;
|
||||
if (!tag.length) tag.push(...((await gh.release.getRelease()).map(d => d.tag_name)));
|
||||
for (const tagName of tag) {
|
||||
try {
|
||||
const assets = (await gh.release.getRelease(tagName)).assets.filter(({name}) => name.endsWith(".deb"));
|
||||
for (const asset of assets) {
|
||||
const control = (await dpkg.parsePackage(await coreHTTP.streamRequest(asset.browser_download_url, {headers: token ? {Authorization: `token ${token}`} : {}}))).controlFile;
|
||||
callback(null, await this.addPackage(repositoryID, control, {url: asset.browser_download_url}));
|
||||
}
|
||||
} catch (err) {
|
||||
callback(err, null);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (target.type === "docker") {
|
||||
const { image, auth, tags = [] } = target;
|
||||
const registry = new dockerRegistry.v2(image, auth);
|
||||
if (tags.length === 0) {
|
||||
const { sha256, tag } = registry.image;
|
||||
if (sha256) tags.push(sha256);
|
||||
else if (tag) tags.push(tag);
|
||||
else tags.push(...((await registry.getTags()).reverse().slice(0, 6)));
|
||||
}
|
||||
for (const tag of tags) {
|
||||
const manifestManeger = new dockerRegistry.Utils.Manifest(await registry.getManifets(tag), registry);
|
||||
const addPckage = async () => {
|
||||
for (const layer of manifestManeger.getLayers()) {
|
||||
const blob = await registry.extractLayer(layer.digest);
|
||||
blob.on("error", err => callback(err, null)).on("entry", async (entry, str, next) => {
|
||||
next();
|
||||
if (!(entry.name.endsWith(".deb"))) return null;
|
||||
try {
|
||||
const control = (await dpkg.parsePackage(stream.Readable.from(str))).controlFile;
|
||||
callback(null, await this.addPackage(repositoryID, control, {ref: layer.digest, path: entry.path}));
|
||||
} catch (err) {callback(err, null);}
|
||||
});
|
||||
await finished(blob);
|
||||
}
|
||||
}
|
||||
if (manifestManeger.multiArch) {
|
||||
for (const platform of manifestManeger.platforms) {
|
||||
await manifestManeger.setPlatform(platform as any);
|
||||
await addPckage();
|
||||
}
|
||||
} else await addPckage();
|
||||
}
|
||||
} else if (target.type === "mirror") {
|
||||
const { config = [] } = target;
|
||||
const readFile = (path: string, start: number, end: number) => new Promise<Buffer>((done, reject) => {
|
||||
let buf: Buffer[] = [];
|
||||
oldFs.createReadStream(path, { start, end }).on("error", reject).on("data", (data: Buffer) => buf.push(data)).on("close", () => {done(Buffer.concat(buf)); buf = null;});
|
||||
});
|
||||
for (const aptSrc of config.filter(d => d.type === "packages")) {
|
||||
const main_url = new URL(aptSrc.src);
|
||||
const distMain = new URL(path.posix.join(main_url.pathname, "dists", aptSrc.distname), main_url);
|
||||
const release = apt.parseRelease(await coreHTTP.bufferRequestBody(distMain.toString()+"/InRelease").then(async data => (await openpgp.readCleartextMessage({cleartextMessage: data.toString()})).getText()).catch(() => coreHTTP.bufferRequestBody(distMain.toString()+"/Release").then(data => data.toString())));
|
||||
for (const Component of release.Components) for (const Arch of release.Architectures.filter(arch => arch !== "all")) {
|
||||
for (const ext of (["", ".gz", ".xz"])) {
|
||||
const mainReq = new URL(path.posix.join(distMain.pathname, Component, `binary-${Arch}`, `Packages${ext}`), distMain);
|
||||
const tmpFile = (path.join(tmpdir(), Buffer.from(mainReq.toString(), "utf8").toString("hex")))+".package";
|
||||
try {
|
||||
await streamPromise.finished((await coreHTTP.streamRequest(mainReq)).pipe(decompressStream()).pipe(oldFs.createWriteStream(tmpFile)));
|
||||
const packagesLocation: {start: number, end: number}[] = [];
|
||||
let start: number = 0, currentChuck = 0;
|
||||
await streamPromise.finished(oldFs.createReadStream(tmpFile).on("data", (chunk: Buffer) => {
|
||||
for (let i = 0; i < chunk.length; i++) if ((chunk[i - 1] === 0x0A) && (chunk[i] === 0x0A)) {
|
||||
packagesLocation.push({
|
||||
start,
|
||||
end: i + currentChuck,
|
||||
});
|
||||
start = (i + currentChuck)+1;
|
||||
}
|
||||
currentChuck += Buffer.byteLength(chunk, "binary");
|
||||
}));
|
||||
for (const { start, end } of packagesLocation) {
|
||||
const control = dpkg.parseControl(await readFile(tmpFile, start, end));
|
||||
callback(null, await this.addPackage(repositoryID, control, {
|
||||
debUrl: (new URL(path.posix.join(main_url.pathname, control.Filename), main_url)).toString()
|
||||
}));
|
||||
}
|
||||
await fs.rm(tmpFile);
|
||||
break;
|
||||
} catch (err) {
|
||||
callback(err, null);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -15,13 +15,13 @@
|
||||
"allowJs": true,
|
||||
"composite": true,
|
||||
"lib": [
|
||||
"ESNext",
|
||||
"ES7"
|
||||
"ESNext"
|
||||
]
|
||||
},
|
||||
"exclude": [
|
||||
"**/*.test.ts",
|
||||
"**/test*/**",
|
||||
"**/scripts/**",
|
||||
"node_modules/"
|
||||
],
|
||||
"ts-node": {
|
||||
|
Loading…
x
Reference in New Issue
Block a user